Browse Source

Even more mozilla rel-257 patches

Ian Neal 4 years ago
parent
commit
aade0cfd85
26 changed files with 24719 additions and 38671 deletions
  1. 0 44
      comm-esr60/mozilla-esr60/patches/1362858-backout-60-TBonly.patch
  2. 0 47
      comm-esr60/mozilla-esr60/patches/1387428-Backout-56.patch
  3. 0 73
      comm-esr60/mozilla-esr60/patches/1502090-65a1.patch
  4. 0 33
      comm-esr60/mozilla-esr60/patches/1542829-68a1.patch
  5. 0 34
      comm-esr60/mozilla-esr60/patches/1583041-xlib_logging_hack.patch
  6. 0 38264
      comm-esr60/mozilla-esr60/patches/1602261-retsulcksat.patch
  7. 0 18
      comm-esr60/mozilla-esr60/patches/series
  8. 49 0
      rel-257/mozilla-esr60/patches/1233768-61a1.patch
  9. 70 68
      rel-257/mozilla-esr60/patches/1372458-63a1.patch
  10. 179 0
      rel-257/mozilla-esr60/patches/1418629-68a1.patch
  11. 16 25
      rel-257/mozilla-esr60/patches/1437128-61a1.patch
  12. 4 3
      rel-257/mozilla-esr60/patches/1445969-61a1.patch
  13. 7 7
      rel-257/mozilla-esr60/patches/1472672-63a1.patch
  14. 45 0
      rel-257/mozilla-esr60/patches/1473833-63a1.patch
  15. 2 3
      rel-257/mozilla-esr60/patches/1488401-64a1.patch
  16. 1 2
      rel-257/mozilla-esr60/patches/1500637-65a1.patch
  17. 23 2
      rel-257/mozilla-esr60/patches/1510276-67a1.patch
  18. 1 2
      rel-257/mozilla-esr60/patches/1516605-66a1.patch
  19. 73 0
      rel-257/mozilla-esr60/patches/1520909-65a1.patch
  20. 17 17
      rel-257/mozilla-esr60/patches/1523665-67a1.patch
  21. 383 0
      rel-257/mozilla-esr60/patches/1565919-70a1.patch
  22. 7 7
      rel-257/mozilla-esr60/patches/1566465-70a1.patch
  23. 1 1
      rel-257/mozilla-esr60/patches/1578303_enable_loginmanagercontextmenu-71a1.patch
  24. 23820 0
      rel-257/mozilla-esr60/patches/1602261-retsulcksat.patch
  25. 3 21
      rel-257/mozilla-esr60/patches/1602262-safebrowsing-pref.patch
  26. 18 0
      rel-257/mozilla-esr60/patches/NOBUG-0c5f5c2e2a86-64a1.patch

+ 0 - 44
comm-esr60/mozilla-esr60/patches/1362858-backout-60-TBonly.patch

@@ -1,44 +0,0 @@
-# HG changeset patch
-# User Jorg K <jorgk@jorgk.com>
-# Date 1537205939 -7200
-# Node ID 78c54e4a83e8807cdeaa96e22648d381eb51c8da
-# Parent  5d2d4d0948e51d5a13f1ba5dc32f5acafffe7d34
-Backed out part of changeset eaf99ba3813a (bug 1362858, part 1, for causing bug 1418629) to build Thunderbird 60.2. a=jorgk DONTBUILD
-
-diff --git a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
---- a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
-+++ b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
-@@ -643,30 +643,21 @@ static bool
- TextNodeContainsDOMWordSeparator(nsINode* aNode,
-                                  int32_t aBeforeOffset,
-                                  int32_t* aSeparatorOffset)
- {
-   // aNode is actually an nsIContent, since it's eTEXT
-   nsIContent* content = static_cast<nsIContent*>(aNode);
-   const nsTextFragment* textFragment = content->GetText();
-   NS_ASSERTION(textFragment, "Where is our text?");
--  nsString text;
--  int32_t end = std::min(aBeforeOffset, int32_t(textFragment->GetLength()));
--  bool ok = textFragment->AppendTo(text, 0, end, mozilla::fallible);
--  if(!ok)
--    return false;
--
--  WordSplitState state(nullptr, text, 0, end);
--  for (int32_t i = end - 1; i >= 0; --i) {
--    if (IsDOMWordSeparator(textFragment->CharAt(i)) ||
--        state.ClassifyCharacter(i, true) == CHAR_CLASS_SEPARATOR) {
-+  for (int32_t i = std::min(aBeforeOffset, int32_t(textFragment->GetLength())) - 1; i >= 0; --i) {
-+    if (IsDOMWordSeparator(textFragment->CharAt(i))) {
-       // Be greedy, find as many separators as we can
-       for (int32_t j = i - 1; j >= 0; --j) {
--        if (IsDOMWordSeparator(textFragment->CharAt(j)) ||
--            state.ClassifyCharacter(j, true) == CHAR_CLASS_SEPARATOR) {
-+        if (IsDOMWordSeparator(textFragment->CharAt(j))) {
-           i = j;
-         } else {
-           break;
-         }
-       }
-       *aSeparatorOffset = i;
-       return true;
-     }

+ 0 - 47
comm-esr60/mozilla-esr60/patches/1387428-Backout-56.patch

@@ -1,47 +0,0 @@
-# HG changeset patch
-# User Ian Neal <iann_cvs@blueyonder.co.uk>
-# Date 1567972140 -7200
-# Parent  4dccd6b83d79caec543f5b9eedeb16c8e948a75a
-Bug 1387428 - Backout: Disable TLS 1.3 by default for Release 56. r=frg a=frg
-
-diff --git a/security/manager/ssl/nsNSSComponent.cpp b/security/manager/ssl/nsNSSComponent.cpp
---- a/security/manager/ssl/nsNSSComponent.cpp
-+++ b/security/manager/ssl/nsNSSComponent.cpp
-@@ -1682,17 +1682,17 @@ void nsNSSComponent::setValidationOption
- // Enable the TLS versions given in the prefs, defaulting to TLS 1.0 (min) and
- // TLS 1.2 (max) when the prefs aren't set or set to invalid values.
- nsresult
- nsNSSComponent::setEnabledTLSVersions()
- {
-   // keep these values in sync with security-prefs.js
-   // 1 means TLS 1.0, 2 means TLS 1.1, etc.
-   static const uint32_t PSM_DEFAULT_MIN_TLS_VERSION = 1;
--  static const uint32_t PSM_DEFAULT_MAX_TLS_VERSION = 3;
-+  static const uint32_t PSM_DEFAULT_MAX_TLS_VERSION = 4;
- 
-   uint32_t minFromPrefs = Preferences::GetUint("security.tls.version.min",
-                                                PSM_DEFAULT_MIN_TLS_VERSION);
-   uint32_t maxFromPrefs = Preferences::GetUint("security.tls.version.max",
-                                                PSM_DEFAULT_MAX_TLS_VERSION);
- 
-   SSLVersionRange defaults = {
-     SSL_LIBRARY_VERSION_3_0 + PSM_DEFAULT_MIN_TLS_VERSION,
-diff --git a/security/manager/ssl/security-prefs.js b/security/manager/ssl/security-prefs.js
---- a/security/manager/ssl/security-prefs.js
-+++ b/security/manager/ssl/security-prefs.js
-@@ -1,14 +1,14 @@
- /* This Source Code Form is subject to the terms of the Mozilla Public
-  * License, v. 2.0. If a copy of the MPL was not distributed with this
-  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
- 
- pref("security.tls.version.min", 1);
--pref("security.tls.version.max", 3);
-+pref("security.tls.version.max", 4);
- pref("security.tls.version.fallback-limit", 3);
- pref("security.tls.insecure_fallback_hosts", "");
- pref("security.tls.enable_0rtt_data", false);
- 
- pref("security.ssl.treat_unsafe_negotiation_as_broken", false);
- pref("security.ssl.require_safe_negotiation",  false);
- pref("security.ssl.enable_ocsp_stapling", true);
- pref("security.ssl.enable_false_start", true);

+ 0 - 73
comm-esr60/mozilla-esr60/patches/1502090-65a1.patch

@@ -1,73 +0,0 @@
-# HG changeset patch
-# User Ted Campbell <tcampbell@mozilla.com>
-# Date 1540497002 0
-# Node ID 1c4bf766a99a657e2f88183afbef240e9e8e38ac
-# Parent  428742bd8237eef0d27e71c30fd37bc0a2b17e80
-Bug 1502090 - Fix bailout tracking with fun.call. r=nbp
-
-NOTE: Multi-arg array.push is still disabled in Ion.
-
-Differential Revision: https://phabricator.services.mozilla.com/D9803
-
-diff --git a/js/src/jit-test/tests/ion/bug1502090.js b/js/src/jit-test/tests/ion/bug1502090.js
-new file mode 100644
---- /dev/null
-+++ b/js/src/jit-test/tests/ion/bug1502090.js
-@@ -0,0 +1,13 @@
-+function f(o) {
-+   var a = [o];
-+   a.length = a[0];
-+   var useless = function() {}
-+   var sz = Array.prototype.push.call(a, 42, 43);
-+   (function(){
-+       sz;
-+   })(new Boolean(false));
-+}
-+for (var i = 0; i < 2; i++) {
-+   f(1);
-+}
-+f(2);
-diff --git a/js/src/jit/IonBuilder.cpp b/js/src/jit/IonBuilder.cpp
---- a/js/src/jit/IonBuilder.cpp
-+++ b/js/src/jit/IonBuilder.cpp
-@@ -5053,32 +5053,38 @@ IonBuilder::jsop_funcall(uint32_t argc)
-         return makeCall(native, callInfo);
-     }
-     current->peek(calleeDepth)->setImplicitlyUsedUnchecked();
- 
-     // Extract call target.
-     TemporaryTypeSet* funTypes = current->peek(funcDepth)->resultTypeSet();
-     JSFunction* target = getSingleCallTarget(funTypes);
- 
-+    CallInfo callInfo(alloc(), pc, /* constructing = */ false,
-+                      /* ignoresReturnValue = */ BytecodeIsPopped(pc));
-+
-+    // Save prior call stack in case we need to resolve during bailout
-+    // recovery of inner inlined function. This includes the JSFunction and the
-+    // 'call' native function.
-+    MOZ_TRY(callInfo.savePriorCallStack(this, current, argc + 2));
-+
-     // Shimmy the slots down to remove the native 'call' function.
-     current->shimmySlots(funcDepth - 1);
- 
-     bool zeroArguments = (argc == 0);
- 
-     // If no |this| argument was provided, explicitly pass Undefined.
-     // Pushing is safe here, since one stack slot has been removed.
-     if (zeroArguments) {
-         pushConstant(UndefinedValue());
-     } else {
-         // |this| becomes implicit in the call.
-         argc -= 1;
-     }
- 
--    CallInfo callInfo(alloc(), pc, /* constructing = */ false,
--                      /* ignoresReturnValue = */ BytecodeIsPopped(pc));
-     if (!callInfo.init(current, argc))
-         return abort(AbortReason::Alloc);
- 
-     // Try to inline the call.
-     if (!zeroArguments) {
-         InliningDecision decision = makeInliningDecision(target, callInfo);
-         switch (decision) {
-           case InliningDecision_Error:

+ 0 - 33
comm-esr60/mozilla-esr60/patches/1542829-68a1.patch

@@ -1,33 +0,0 @@
-# HG changeset patch
-# User Ryan VanderMeulen <ryanvm@gmail.com>
-# Date 1554750031 0
-#      Mon Apr 08 19:00:31 2019 +0000
-# Node ID ffaff6fb1fe3b38604abea4d980401f319aad43e
-# Parent  8fcb2ad64899854db6b7a4b27b8c7f518e95e528
-Bug 1542829 - Backport an upstream libpng patch. r=aosmond
-
-Differential Revision: https://phabricator.services.mozilla.com/D26575
-
-diff --git a/media/libpng/png.c b/media/libpng/png.c
---- a/media/libpng/png.c
-+++ b/media/libpng/png.c
-@@ -4593,18 +4593,17 @@ png_image_free(png_imagep image)
- {
-    /* Safely call the real function, but only if doing so is safe at this point
-     * (if not inside an error handling context).  Otherwise assume
-     * png_safe_execute will call this API after the return.
-     */
-    if (image != NULL && image->opaque != NULL &&
-       image->opaque->error_buf == NULL)
-    {
--      /* Ignore errors here: */
--      (void)png_safe_execute(image, png_image_free_function, image);
-+      png_image_free_function(image);
-       image->opaque = NULL;
-    }
- }
- 
- int /* PRIVATE */
- png_image_error(png_imagep image, png_const_charp error_message)
- {
-    /* Utility to log an error. */

+ 0 - 34
comm-esr60/mozilla-esr60/patches/1583041-xlib_logging_hack.patch

@@ -1,34 +0,0 @@
-# HG changeset patch
-# User Ian Neal <iann_cvs@blueyonder.co.uk>
-# Date 1569255060 -7200
-# Parent  d33dd9a2e0ba8ef499170c0cde43cd808e977ccd
-Bug 1583041 - Xlib headers overriding member name. r=frg a=frg
-Fixes issue with conflict of Status variable between logging.h and Xlib.h
-See https://github.com/protocolbuffers/protobuf/issues/4186
-
-diff --git a/gfx/layers/LayerScope.cpp b/gfx/layers/LayerScope.cpp
---- a/gfx/layers/LayerScope.cpp
-+++ b/gfx/layers/LayerScope.cpp
-@@ -47,16 +47,22 @@
- #include "nsIEventTarget.h"
- #include "nsProxyRelease.h"
- #include <list>
- 
- // Undo the damage done by mozzconf.h
- #undef compress
- #include "mozilla/Compression.h"
- 
-+#ifdef Status
-+/* Xlib headers insist on this for some reason... Nuke it because
-+   it'll override our member name */
-+#undef Status
-+#endif
-+
- // Protocol buffer (generated automatically)
- #include "protobuf/LayerScopePacket.pb.h"
- 
- namespace mozilla {
- namespace layers {
- 
- using namespace mozilla::Compression;
- using namespace mozilla::gfx;

+ 0 - 38264
comm-esr60/mozilla-esr60/patches/1602261-retsulcksat.patch

@@ -1,38264 +0,0 @@
-# HG changeset patch
-# User Frank-Rainer Grahl <frgrahl@gmx.net>
-# Date 1572648780 -3600
-# Parent  e1f52ec3c96bd235b015425e34c8ee8a8e5f3bbd
-Bug 1602261 - Say goodbye to taskcluster. Just in the way for porting. r=IanN a=IanN
-
-diff --git a/.taskcluster.yml b/.taskcluster.yml
-deleted file mode 100644
---- a/.taskcluster.yml
-+++ /dev/null
-@@ -1,134 +0,0 @@
--# This file is rendered via JSON-e by
--# - mozilla-taskcluster - https://docs.taskcluster.net/reference/integrations/mozilla-taskcluster/docs/taskcluster-yml
--# - cron tasks - taskcluster/taskgraph/cron/decision.py
--version: 1
--tasks:
--  $let:
--    # sometimes the push user is just `ffxbld` or the like, but we want an email-like field..
--    ownerEmail: {$if: '"@" in push.owner', then: '${push.owner}', else: '${push.owner}@noreply.mozilla.org'}
--    # ensure there's no trailing `/` on the repo URL
--    repoUrl: {$if: 'repository.url[-1] == "/"', then: {$eval: 'repository.url[:-1]'}, else: {$eval: 'repository.url'}}
--  in:
--  - taskId: '${as_slugid("decision")}'
--    taskGroupId: '${as_slugid("decision")}' # same as tsakId; this is how automation identifies a decision tsak
--    schedulerId: 'gecko-level-${repository.level}'
--
--    created: {$fromNow: ''}
--    deadline: {$fromNow: '1 day'}
--    expires: {$fromNow: '1 year 1 second'} # 1 second so artifacts expire first, despite rounding errors
--    metadata:
--      $merge:
--        - owner: "${ownerEmail}"
--          source: "${repoUrl}/raw-file/${push.revision}/.taskcluster.yml"
--        - $if: 'tasks_for == "hg-push"'
--          then:
--            name: "Gecko Decision Task"
--            description: 'The task that creates all of the other tasks in the task graph'
--          else:
--            name: "Decision Task for cron job ${cron.job_name}"
--            description: 'Created by a [cron task](https://tools.taskcluster.net/tasks/${cron.task_id})'
--
--    provisionerId: "aws-provisioner-v1"
--    workerType: "gecko-decision"
--
--    tags:
--      $if: 'tasks_for == "hg-push"'
--      then: {createdForUser: "${ownerEmail}"}
--
--    routes:
--      $if: 'tasks_for == "hg-push"'
--      then:
--        - "index.gecko.v2.${repository.project}.latest.firefox.decision"
--        - "index.gecko.v2.${repository.project}.pushlog-id.${push.pushlog_id}.decision"
--        - "tc-treeherder.v2.${repository.project}.${push.revision}.${push.pushlog_id}"
--        - "tc-treeherder-stage.v2.${repository.project}.${push.revision}.${push.pushlog_id}"
--        - "notify.email.${ownerEmail}.on-failed"
--        - "notify.email.${ownerEmail}.on-exception"
--      else:
--        - "index.gecko.v2.${repository.project}.latest.firefox.decision-${cron.job_name}"
--        - "tc-treeherder.v2.${repository.project}.${push.revision}.${push.pushlog_id}"
--        - "tc-treeherder-stage.v2.${repository.project}.${push.revision}.${push.pushlog_id}"
--
--    scopes:
--      $if: 'tasks_for == "hg-push"'
--      then:
--        - 'assume:repo:${repoUrl[8:]}:*'
--        - 'queue:route:notify.email.${ownerEmail}.*'
--      else:
--        - 'assume:repo:${repoUrl[8:]}:cron:${cron.job_name}'
--
--    dependencies: []
--    requires: all-completed
--
--    priority: lowest
--    retries: 5
--
--    payload:
--      env:
--        # checkout-gecko uses these to check out the source; the inputs
--        # to `mach taskgraph decision` are all on the command line.
--        GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
--        GECKO_HEAD_REPOSITORY: '${repoUrl}'
--        GECKO_HEAD_REF: '${push.revision}'
--        GECKO_HEAD_REV: '${push.revision}'
--        GECKO_COMMIT_MSG: '${push.comment}'
--        HG_STORE_PATH: /home/worker/checkouts/hg-store
--
--      cache:
--        level-${repository.level}-checkouts: /home/worker/checkouts
--
--      features:
--        taskclusterProxy: true
--        chainOfTrust: true
--
--      # Note: This task is built server side without the context or tooling that
--      # exist in tree so we must hard code the hash
--      # XXX Changing this will break Chain of Trust without an associated puppet and
--      # scriptworker patch!
--      image: 'taskcluster/decision:0.1.8@sha256:195d8439c8e90d59311d877bd2a8964849b2e43bfc6c234092618518d8b2891b'
--
--      maxRunTime: 1800
--
--      # TODO use mozilla-unified for the base repository once the tc-vcs
--      # tar.gz archives are created or tc-vcs isn't being used.
--      command:
--        - /home/worker/bin/run-task
--        - '--vcs-checkout=/home/worker/checkouts/gecko'
--        - '--'
--        - bash
--        - -cx
--        - $let:
--            extraArgs: {$if: 'tasks_for == "hg-push"', then: '', else: '${cron.quoted_args}'}
--          # NOTE: the explicit reference to mozilla-central below is required because android-stuff
--          # still uses tc-vcs, which does not support mozilla-unified
--          # https://bugzilla.mozilla.org/show_bug.cgi?id=1383973
--          in: >
--            cd /home/worker/checkouts/gecko &&
--            ln -s /home/worker/artifacts artifacts &&
--            ./mach --log-no-times taskgraph decision
--            --pushlog-id='${push.pushlog_id}'
--            --pushdate='${push.pushdate}'
--            --project='${repository.project}'
--            --message="$GECKO_COMMIT_MSG"
--            --owner='${ownerEmail}'
--            --level='${repository.level}'
--            --base-repository='https://hg.mozilla.org/mozilla-central'
--            --head-repository="$GECKO_HEAD_REPOSITORY"
--            --head-ref="$GECKO_HEAD_REF"
--            --head-rev="$GECKO_HEAD_REV"
--            ${extraArgs}
--
--      artifacts:
--        'public':
--          type: 'directory'
--          path: '/home/worker/artifacts'
--          expires: {$fromNow: '1 year'}
--
--    extra:
--      treeherder:
--        $if: 'tasks_for == "hg-push"'
--        then:
--          symbol: D
--        else:
--          groupSymbol: cron
--          symbol: "${cron.job_symbol}"
-diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py
---- a/build/mach_bootstrap.py
-+++ b/build/mach_bootstrap.py
-@@ -43,33 +43,31 @@ MACH_MODULES = [
-     'python/mach/mach/commands/commandinfo.py',
-     'python/mach/mach/commands/settings.py',
-     'python/mozboot/mozboot/mach_commands.py',
-     'python/mozbuild/mozbuild/mach_commands.py',
-     'python/mozbuild/mozbuild/backend/mach_commands.py',
-     'python/mozbuild/mozbuild/compilation/codecomplete.py',
-     'python/mozbuild/mozbuild/frontend/mach_commands.py',
-     'services/common/tests/mach_commands.py',
--    'taskcluster/mach_commands.py',
-     'testing/awsy/mach_commands.py',
-     'testing/firefox-ui/mach_commands.py',
-     'testing/geckodriver/mach_commands.py',
-     'testing/mach_commands.py',
-     'testing/marionette/mach_commands.py',
-     'testing/mochitest/mach_commands.py',
-     'testing/mozharness/mach_commands.py',
-     'testing/talos/mach_commands.py',
-     'testing/web-platform/mach_commands.py',
-     'testing/xpcshell/mach_commands.py',
-     'tools/compare-locales/mach_commands.py',
-     'tools/docs/mach_commands.py',
-     'tools/lint/mach_commands.py',
-     'tools/mach_commands.py',
-     'tools/power/mach_commands.py',
--    'tools/tryselect/mach_commands.py',
-     'mobile/android/mach_commands.py',
- ]
- 
- 
- CATEGORIES = {
-     'build': {
-         'short': 'Build Commands',
-         'long': 'Interact with the build system',
-@@ -80,21 +78,16 @@ CATEGORIES = {
-         'long': 'Common actions performed after completing a build.',
-         'priority': 70,
-     },
-     'testing': {
-         'short': 'Testing',
-         'long': 'Run tests.',
-         'priority': 60,
-     },
--    'ci': {
--        'short': 'CI',
--        'long': 'Taskcluster commands',
--        'priority': 59
--    },
-     'devenv': {
-         'short': 'Development Environment',
-         'long': 'Set up and configure your development environment.',
-         'priority': 50,
-     },
-     'build-dev': {
-         'short': 'Low-level Build System Interaction',
-         'long': 'Interact with specific parts of the build system.',
-diff --git a/build/virtualenv_packages.txt b/build/virtualenv_packages.txt
---- a/build/virtualenv_packages.txt
-+++ b/build/virtualenv_packages.txt
-@@ -27,17 +27,16 @@ mozilla.pth:third_party/python/json-e
- mozilla.pth:build
- objdir:build
- mozilla.pth:build/pymake
- mozilla.pth:config
- mozilla.pth:dom/bindings
- mozilla.pth:dom/bindings/parser
- mozilla.pth:layout/tools/reftest
- mozilla.pth:other-licenses/ply/
--mozilla.pth:taskcluster
- mozilla.pth:testing
- mozilla.pth:testing/firefox-ui/harness
- mozilla.pth:testing/marionette/client
- mozilla.pth:testing/marionette/harness
- mozilla.pth:testing/marionette/harness/marionette_harness/runner/mixins/browsermob-proxy-py
- mozilla.pth:testing/marionette/puppeteer/firefox
- packages.txt:testing/mozbase/packages.txt
- mozilla.pth:tools
-diff --git a/moz.build b/moz.build
---- a/moz.build
-+++ b/moz.build
-@@ -56,17 +56,16 @@ CONFIGURE_SUBST_FILES += [
- ]
- 
- if CONFIG['ENABLE_CLANG_PLUGIN']:
-     DIRS += ['build/clang-plugin']
- 
- DIRS += [
-     'config',
-     'python',
--    'taskcluster',
-     'testing/mozbase',
-     'third_party/python',
- ]
- 
- if CONFIG['MOZ_WIDGET_TOOLKIT'] or not CONFIG['MOZ_BUILD_APP']:
-     # These python manifests are included here so they get picked up without an objdir
-     PYTHON_UNITTEST_MANIFESTS += [
-         'testing/marionette/harness/marionette_harness/tests/harness_unit/python.ini',
-diff --git a/taskcluster/ci/android-stuff/kind.yml b/taskcluster/ci/android-stuff/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/android-stuff/kind.yml
-+++ /dev/null
-@@ -1,317 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# The name of this kind should suggest it's not meant to be permanent.  This is
--# a temporary place to generate these tasks in Bug 1286075 until they are
--# rewritten in a better way.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.try_job:transforms
--   - taskgraph.transforms.android_stuff:transforms
--   - taskgraph.transforms.task:transforms
--
--jobs:
--    android-api-16-gradle-dependencies:
--        description: "Android armv7 API 16+ gradle dependencies"
--        index:
--            product: mobile
--            job-name: android-api-16-gradle-dependencies-opt
--        treeherder:
--            platform: android-4-0-armv7-api16/opt
--            kind: other
--            tier: 2
--            symbol: tc(Deps)
--        worker-type: aws-provisioner-v1/gecko-{level}-b-android
--        worker:
--            implementation: docker-worker
--            os: linux
--            docker-image: {in-tree: android-gradle-build}
--            env:
--                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle-online"
--                MH_BUILD_POOL: "taskcluster"
--                MH_CUSTOM_BUILD_VARIANT_CFG: "api-16-gradle-dependencies"
--                MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
--                MOZHARNESS_CONFIG: >
--                    builds/releng_base_android_64_builds.py
--                    disable_signing.py
--                    platform_supports_post_upload_to_latest.py
--                MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
--                TOOLTOOL_CACHE: "/home/worker/tooltool-cache"
--                TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-gradle-dependencies/releng.manifest"
--            artifacts:
--              - name: public/build
--                path: /home/worker/artifacts/
--                type: directory
--              - name: private/android-sdk
--                path: /home/worker/private/android-sdk
--                type: directory
--              - name: private/java_home
--                path: /home/worker/private/java_home
--                type: directory
--            caches:
--              - name: tooltool-cache
--                mount-point: /home/worker/tooltool-cache
--                type: persistent
--            relengapi-proxy: true
--            command:
--              - "/bin/bash"
--              - "-c"
--              - "/home/worker/bin/before.sh && /home/worker/bin/build.sh && /home/worker/bin/after.sh && true\n"
--            max-run-time: 36000
--        scopes:
--          - docker-worker:relengapi-proxy:tooltool.download.internal
--          - docker-worker:relengapi-proxy:tooltool.download.public
--        optimizations:
--          - - skip-unless-changed
--            - - "mobile/android/config/**"
--              - "testing/mozharness/configs/builds/releng_sub_android_configs/*gradle_dependencies.py"
--              - "**/*.gradle"
--              - "taskcluster/docker/android-gradle-build/**"
--        toolchains:
--          - linux64-sccache
--
--    android-test:
--        description: "Android armv7 unit tests"
--        treeherder:
--            platform: android-4-0-armv7-api16/opt
--            kind: test
--            tier: 2
--            symbol: tc(test)
--        worker-type: aws-provisioner-v1/gecko-{level}-b-android
--        worker:
--            implementation: docker-worker
--            os: linux
--            docker-image: {in-tree: desktop-build}
--            env:
--                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
--                MH_BUILD_POOL: "taskcluster"
--                MH_CUSTOM_BUILD_VARIANT_CFG: "android-test"
--                MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
--                MOZHARNESS_CONFIG: >
--                    builds/releng_base_android_64_builds.py
--                    disable_signing.py
--                    platform_supports_post_upload_to_latest.py
--                MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
--                TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
--            artifacts:
--              - name: public/android/unittest
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/tests
--                type: directory
--              - name: public/build
--                path: /home/worker/artifacts/
--                type: directory
--            caches:
--              - name: tooltool-cache
--                mount-point: /home/worker/tooltool-cache
--                type: persistent
--            relengapi-proxy: true
--            command:
--              # NOTE: this could probably be a job description with run.using = 'mozharness'
--              - "/bin/bash"
--              - "bin/build.sh"
--            max-run-time: 36000
--        scopes:
--          - docker-worker:relengapi-proxy:tooltool.download.internal
--          - docker-worker:relengapi-proxy:tooltool.download.public
--        optimizations:
--          - - skip-unless-changed
--            - - "mobile/android/base/**"
--              - "mobile/android/tests/background/junit4/**"
--        toolchains:
--          - linux64-sccache
--
--    android-lint:
--        description: "Android lint"
--        treeherder:
--            platform: android-4-0-armv7-api16/opt
--            kind: test
--            tier: 2
--            symbol: tc(lint)
--        worker-type: aws-provisioner-v1/gecko-{level}-b-android
--        worker:
--            implementation: docker-worker
--            os: linux
--            docker-image: {in-tree: desktop-build}
--            env:
--                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
--                MH_BUILD_POOL: "taskcluster"
--                MH_CUSTOM_BUILD_VARIANT_CFG: "android-lint"
--                MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
--                MOZHARNESS_CONFIG: >
--                    builds/releng_base_android_64_builds.py
--                    disable_signing.py
--                    platform_supports_post_upload_to_latest.py
--                MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
--                TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
--            artifacts:
--              - name: public/android/lint/lint-results-officialAustralisDebug.html
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.html
--                type: file
--              - name: public/android/lint/lint-results-officialAustralisDebug.xml
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug.xml
--                type: file
--              - name: public/android/lint/lint-results-officialAustralisDebug_files
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialAustralisDebug_files
--                type: directory
--              - name: public/android/lint/lint-results-officialPhotonDebug.html
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.html
--                type: file
--              - name: public/android/lint/lint-results-officialPhotonDebug.xml
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug.xml
--                type: file
--              - name: public/android/lint/lint-results-officialPhotonDebug_files
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/lint-results-officialPhotonDebug_files
--                type: directory
--              - name: public/build
--                path: /home/worker/artifacts/
--                type: directory
--            caches:
--              - name: tooltool-cache
--                mount-point: /home/worker/tooltool-cache
--                type: persistent
--            relengapi-proxy: true
--            command:
--              # NOTE: this could probably be a job description with run.using = 'mozharness'
--              - "/bin/bash"
--              - "bin/build.sh"
--            max-run-time: 36000
--        scopes:
--          - docker-worker:relengapi-proxy:tooltool.download.internal
--          - docker-worker:relengapi-proxy:tooltool.download.public
--        optimizations:
--          - - skip-unless-changed
--            - - "mobile/android/**/*.java"
--              - "mobile/android/**/*.jpeg"
--              - "mobile/android/**/*.jpg"
--              - "mobile/android/**/*.png"
--              - "mobile/android/**/*.svg"
--              - "mobile/android/**/*.xml" # Manifest & android resources
--              - "mobile/android/**/*.gradle"
--              - "mobile/android/**/Makefile.in"
--              - "mobile/android/**/moz.build"
--        toolchains:
--          - linux64-sccache
--
--    android-checkstyle:
--        description: "Android checkstyle"
--        treeherder:
--            platform: android-4-0-armv7-api16/opt
--            kind: test
--            tier: 2
--            symbol: tc(checkstyle)
--        worker-type: aws-provisioner-v1/gecko-{level}-b-android
--        worker:
--            implementation: docker-worker
--            os: linux
--            docker-image: {in-tree: desktop-build}
--            env:
--                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
--                MH_BUILD_POOL: "taskcluster"
--                MH_CUSTOM_BUILD_VARIANT_CFG: "android-checkstyle"
--                MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
--                MOZHARNESS_CONFIG: >
--                    builds/releng_base_android_64_builds.py
--                    disable_signing.py
--                    platform_supports_post_upload_to_latest.py
--                MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
--                TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
--            artifacts:
--              - name: public/android/checkstyle/checkstyle.html
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.html
--                type: file
--              - name: public/android/checkstyle/checkstyle.xml
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/reports/checkstyle/checkstyle.xml
--                type: file
--              - name: public/build
--                path: /home/worker/artifacts/
--                type: directory
--            caches:
--              - name: tooltool-cache
--                mount-point: /home/worker/tooltool-cache
--                type: persistent
--            relengapi-proxy: true
--            command:
--              # NOTE: this could probably be a job description with run.using = 'mozharness'
--              - "/bin/bash"
--              - "bin/build.sh"
--            max-run-time: 36000
--        scopes:
--          - docker-worker:relengapi-proxy:tooltool.download.internal
--          - docker-worker:relengapi-proxy:tooltool.download.public
--        optimizations:
--          - - skip-unless-changed
--            - - "mobile/android/**/checkstyle.xml"
--              - "mobile/android/**/*.java"
--              - "mobile/android/**/*.gradle"
--              - "mobile/android/**/Makefile.in"
--              - "mobile/android/**/moz.build"
--        toolchains:
--          - linux64-sccache
--
--    android-findbugs:
--        description: "Android findbugs"
--        treeherder:
--            platform: android-4-0-armv7-api16/opt
--            kind: test
--            tier: 2
--            symbol: tc(findbugs)
--        worker-type: aws-provisioner-v1/gecko-{level}-b-android
--        worker:
--            implementation: docker-worker
--            os: linux
--            docker-image: {in-tree: desktop-build}
--            env:
--                GRADLE_USER_HOME: "/home/worker/workspace/build/src/dotgradle"
--                MH_BUILD_POOL: "taskcluster"
--                MH_CUSTOM_BUILD_VARIANT_CFG: "android-findbugs"
--                MOZHARNESS_ACTIONS: "get-secrets build multi-l10n update"
--                MOZHARNESS_CONFIG: >
--                    builds/releng_base_android_64_builds.py
--                    disable_signing.py
--                    platform_supports_post_upload_to_latest.py
--                MOZHARNESS_SCRIPT: "mozharness/scripts/fx_desktop_build.py"
--                TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-frontend/releng.manifest"
--            artifacts:
--              - name: public/android/findbugs/findbugs-officialAustralisDebug-output.html
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.html
--                type: file
--              - name: public/android/findbugs/findbugs-officialAustralisDebug-output.xml
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialAustralisDebug-output.xml
--                type: file
--              - name: public/android/findbugs/findbugs-officialPhotonDebug-output.html
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.html
--                type: file
--              - name: public/android/findbugs/findbugs-officialPhotonDebug-output.xml
--                path: /home/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/app/outputs/findbugs/findbugs-officialPhotonDebug-output.xml
--                type: file
--              - name: public/build
--                path: /home/worker/artifacts/
--                type: directory
--            caches:
--              - name: tooltool-cache
--                mount-point: /home/worker/tooltool-cache
--                type: persistent
--            relengapi-proxy: true
--            command:
--              # NOTE: this could probably be a job description with run.using = 'mozharness'
--              - "/bin/bash"
--              - "bin/build.sh"
--            max-run-time: 36000
--        scopes:
--          - docker-worker:relengapi-proxy:tooltool.download.internal
--          - docker-worker:relengapi-proxy:tooltool.download.public
--        optimizations:
--          - - skip-unless-changed
--            - - "mobile/android/**/*.java"
--              - "mobile/android/**/*.gradle"
--              - "mobile/android/**/Makefile.in"
--              - "mobile/android/**/moz.build"
--        toolchains:
--          - linux64-sccache
-diff --git a/taskcluster/ci/artifact-build/kind.yml b/taskcluster/ci/artifact-build/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/artifact-build/kind.yml
-+++ /dev/null
-@@ -1,47 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.build_attrs:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--jobs:
--    linux64-artifact/opt:
--        description: "Linux64 Opt Artifact Build"
--        index:
--            product: firefox
--            job-name: linux64-artifact-opt
--        treeherder:
--            platform: linux64/opt
--            kind: build
--            symbol: AB
--            tier: 2
--        worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--        worker:
--            docker-image: {in-tree: desktop-build}
--            max-run-time: 36000
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--        run:
--            using: mozharness
--            actions: [get-secrets build]
--            config:
--                - builds/releng_sub_linux_configs/64_artifact.py
--                - balrog/production.py
--            script: "mozharness/scripts/fx_desktop_build.py"
--            secrets: true
--            tooltool-downloads: public
--            need-xvfb: true
--            keep-artifacts: false
--        toolchains:
--          - linux64-clang
--          - linux64-gcc
--          - linux64-sccache
-diff --git a/taskcluster/ci/balrog/kind.yml b/taskcluster/ci/balrog/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/balrog/kind.yml
-+++ /dev/null
-@@ -1,18 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.balrog:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - beetmover
--  - beetmover-l10n
--  - beetmover-repackage
--
--only-for-attributes:
--  - nightly
--  - signed
-diff --git a/taskcluster/ci/beetmover-checksums/kind.yml b/taskcluster/ci/beetmover-checksums/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/beetmover-checksums/kind.yml
-+++ /dev/null
-@@ -1,15 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.beetmover_checksums:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - checksums-signing
--
--only-for-attributes:
--  - nightly
-diff --git a/taskcluster/ci/beetmover-l10n/kind.yml b/taskcluster/ci/beetmover-l10n/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/beetmover-l10n/kind.yml
-+++ /dev/null
-@@ -1,23 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.beetmover_l10n:transforms
--   - taskgraph.transforms.beetmover:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - nightly-l10n-signing
--
--only-for-attributes:
--  - nightly
--
--not-for-build-platforms:
--  - linux-nightly/opt
--  - linux64-nightly/opt
--  - macosx64-nightly/opt
--  - win32-nightly/opt
--  - win64-nightly/opt
-diff --git a/taskcluster/ci/beetmover-repackage/kind.yml b/taskcluster/ci/beetmover-repackage/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/beetmover-repackage/kind.yml
-+++ /dev/null
-@@ -1,20 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.beetmover_repackage_l10n:transforms
--   - taskgraph.transforms.beetmover_repackage:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - repackage-signing
--
--only-for-build-platforms:
--  - linux-nightly/opt
--  - linux64-nightly/opt
--  - macosx64-nightly/opt
--  - win32-nightly/opt
--  - win64-nightly/opt
-diff --git a/taskcluster/ci/beetmover/kind.yml b/taskcluster/ci/beetmover/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/beetmover/kind.yml
-+++ /dev/null
-@@ -1,22 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.beetmover:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - build-signing
--
--only-for-attributes:
--  - nightly
--
--not-for-build-platforms:
--  - linux-nightly/opt
--  - linux64-nightly/opt
--  - macosx64-nightly/opt
--  - win32-nightly/opt
--  - win64-nightly/opt
-diff --git a/taskcluster/ci/build-signing/kind.yml b/taskcluster/ci/build-signing/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/build-signing/kind.yml
-+++ /dev/null
-@@ -1,13 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.build_signing:loader
--
--transforms:
--   - taskgraph.transforms.build_signing:transforms
--   - taskgraph.transforms.signing:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - build
-diff --git a/taskcluster/ci/build/android.yml b/taskcluster/ci/build/android.yml
-deleted file mode 100644
---- a/taskcluster/ci/build/android.yml
-+++ /dev/null
-@@ -1,372 +0,0 @@
--android-api-16/debug:
--    description: "Android 4.0 api-16+ Debug"
--    index:
--        product: mobile
--        job-name: android-api-16-debug
--    treeherder:
--        platform: android-4-0-armv7-api16/debug
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: api-16-debug
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-x86/opt:
--    description: "Android 4.2 x86 Opt"
--    index:
--        product: mobile
--        job-name: android-x86-opt
--    treeherder:
--        platform: android-4-2-x86/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-x86/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: x86
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-x86-nightly/opt:
--    description: "Android 4.2 x86 Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: mobile
--        job-name: android-x86-opt
--        type: nightly
--    treeherder:
--        platform: android-4-2-x86/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-x86/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: x86
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-api-16/opt:
--    description: "Android 4.0 api-16+ Opt"
--    index:
--        product: mobile
--        job-name: android-api-16-opt
--    treeherder:
--        platform: android-4-0-armv7-api16/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: api-16
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-api-16-nightly/opt:
--    description: "Android 4.0 api-16+ Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: mobile
--        job-name: android-api-16-opt
--        type: nightly-with-multi-l10n
--    treeherder:
--        platform: android-4-0-armv7-api16/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: api-16
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-x86-old-id/opt:
--    description: "Android 4.2 x86 Opt OldId"
--    index:
--        product: mobile
--        job-name: android-x86-old-id-opt
--    treeherder:
--        platform: android-4-2-x86-old-id/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-x86/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: x86-old-id
--        tooltool-downloads: internal
--    run-on-projects: [ 'mozilla-central' ]
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-x86-old-id-nightly/opt:
--    description: "Android 4.2 x86 OldId Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: mobile
--        job-name: android-x86-old-id-opt
--        type: nightly
--    treeherder:
--        platform: android-4-2-x86-old-id/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android-x86/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: x86-old-id
--        tooltool-downloads: internal
--    run-on-projects: [ 'mozilla-central' ]
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-api-16-old-id/opt:
--    description: "Android 4.0 api-16+ Opt OldId"
--    index:
--        product: mobile
--        job-name: android-api-16-old-id-opt
--    treeherder:
--        platform: android-4-0-armv7-api16-old-id/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: api-16-old-id
--        tooltool-downloads: internal
--    run-on-projects: [ 'mozilla-central' ]
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-api-16-old-id-nightly/opt:
--    description: "Android 4.0 api-16+ OldId Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: mobile
--        job-name: android-api-16-old-id-opt
--        type: nightly-with-multi-l10n
--    treeherder:
--        platform: android-4-0-armv7-api16-old-id/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: api-16-old-id
--        tooltool-downloads: internal
--    run-on-projects: [ 'mozilla-central' ]
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-api-16-gradle/opt:
--    description: "Android 4.0 api-16+ (Gradle) Opt"
--    index:
--        product: mobile
--        job-name: android-api-16-gradle-opt
--    treeherder:
--        platform: android-api-16-gradle/opt
--        symbol: tc(Bg)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            # Bug 1292762 - Set GRADLE_USER_HOME to avoid sdk-manager-plugin intermittent
--            GRADLE_USER_HOME: /builds/worker/workspace/build/src/dotgradle
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--        artifacts:
--          - name: public/android/maven
--            path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview/maven/
--            type: directory
--          - name: public/build/geckoview_example.apk
--            path: /builds/worker/workspace/build/src/obj-firefox/gradle/build/mobile/android/geckoview_example/outputs/apk/geckoview_example-withGeckoBinaries.apk
--            type: file
--          - name: public/build
--            path: /builds/worker/artifacts/
--            type: directory
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: api-16-gradle
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-aarch64/opt:
--    description: "Android 5.0 AArch64 Opt"
--    index:
--        product: mobile
--        job-name: android-aarch64-opt
--    treeherder:
--        platform: android-5-0-aarch64/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: aarch64
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--android-aarch64-nightly/opt:
--    description: "Android 5.0 AArch64 Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: mobile
--        job-name: android-aarch64-opt
--        type: nightly
--    treeherder:
--        platform: android-5-0-aarch64/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-android
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "mobile/android/config/tooltool-manifests/android/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build multi-l10n update]
--        config:
--            - builds/releng_base_android_64_builds.py
--            - disable_signing.py
--            - platform_supports_post_upload_to_latest.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: aarch64
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
-diff --git a/taskcluster/ci/build/kind.yml b/taskcluster/ci/build/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/build/kind.yml
-+++ /dev/null
-@@ -1,24 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.build:transforms
--   - taskgraph.transforms.build_attrs:transforms
--   - taskgraph.transforms.build_lints:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--jobs-from:
--    - android.yml
--    - linux.yml
--    - macosx.yml
--    - windows.yml
--
--parse-commit: taskgraph.try_option_syntax:parse_message
-diff --git a/taskcluster/ci/build/linux.yml b/taskcluster/ci/build/linux.yml
-deleted file mode 100644
---- a/taskcluster/ci/build/linux.yml
-+++ /dev/null
-@@ -1,612 +0,0 @@
--linux64/opt:
--    description: "Linux64 Opt"
--    index:
--        product: firefox
--        job-name: linux64-opt
--    treeherder:
--        platform: linux64/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-dmd/opt:
--    description: "Linux64 DMD Opt"
--    index:
--        product: firefox
--        job-name: linux64-dmd-opt
--    treeherder:
--        platform: linux64-dmd/opt
--        symbol: tc(Bdmd)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        custom-build-variant-cfg: dmd
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    run-on-projects: [ ]
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64/pgo:
--    description: "Linux64 PGO"
--    index:
--        product: firefox
--        job-name: linux64-pgo
--    treeherder:
--        platform: linux64/pgo
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    coalesce-name: linux64-pgo
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        options: [enable-pgo]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64/debug:
--    description: "Linux64 Debug"
--    index:
--        product: firefox
--        job-name: linux64-debug
--    treeherder:
--        platform: linux64/debug
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: debug
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-devedition-nightly/opt:
--    description: "Linux64 devedition Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: devedition
--        job-name: linux64-opt
--        type: nightly
--    treeherder:
--        platform: linux64-devedition/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - disable_signing.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--        custom-build-variant-cfg: devedition
--    run-on-projects: [ 'mozilla-beta', ]
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-base-toolchains/opt:
--    description: "Linux64 base toolchains Opt"
--    index:
--        product: firefox
--        job-name: linux64-base-toolchains-opt
--    treeherder:
--        platform: linux64/opt
--        symbol: tc(Bb)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            PERFHERDER_EXTRA_OPTIONS: base-toolchains
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/base-toolchains.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang-3.9
--        - linux64-gcc-4.9
--        - linux64-sccache
--
--linux64-base-toolchains/debug:
--    description: "Linux64 base toolchains Debug"
--    index:
--        product: firefox
--        job-name: linux64-base-toolchains-debug
--    treeherder:
--        platform: linux64/debug
--        symbol: tc(Bb)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            PERFHERDER_EXTRA_OPTIONS: base-toolchains
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/base-toolchains.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: debug
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang-3.9
--        - linux64-gcc-4.9
--        - linux64-sccache
--
--linux/opt:
--    description: "Linux32 Opt"
--    index:
--        product: firefox
--        job-name: linux-opt
--    treeherder:
--        platform: linux32/opt
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
--    coalesce-name: opt_linux32
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_32_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux/debug:
--    description: "Linux32 Debug"
--    index:
--        product: firefox
--        job-name: linux-debug
--    treeherder:
--        platform: linux32/debug
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
--    coalesce-name: dbg_linux32
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_32_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: debug
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux/pgo:
--    description: "Linux32 PGO"
--    index:
--        product: firefox
--        job-name: linux-pgo
--    treeherder:
--        platform: linux32/pgo
--        symbol: tc(B)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
--    coalesce-name: linux32-pgo
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        options: [enable-pgo]
--        config:
--            - builds/releng_base_linux_32_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux-devedition-nightly/opt:
--    description: "Linux32 devedition Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: devedition
--        job-name: linux-opt
--        type: nightly
--    treeherder:
--        platform: linux32-devedition/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_32_builds.py
--            - disable_signing.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--        custom-build-variant-cfg: devedition
--    run-on-projects: [ 'mozilla-beta', ]
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux-nightly/opt:
--    description: "Linux32 Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: firefox
--        job-name: linux-opt
--        type: nightly
--    treeherder:
--        platform: linux32/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_32_builds.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--
--linux64-asan/opt:
--    description: "Linux64 Opt ASAN"
--    index:
--        product: firefox
--        job-name: linux64-asan-opt
--    treeherder:
--        platform: linux64/asan
--        symbol: tc(Bo)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        env:
--            PERFHERDER_EXTRA_OPTIONS: "opt asan"
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/asan.manifest"
--        max-run-time: 36000
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: asan-tc
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--
--linux64-asan-fuzzing/opt:
--    description: "Linux64 Fuzzing Opt ASAN"
--    index:
--        product: firefox
--        job-name: linux64-fuzzing-asan-opt
--    treeherder:
--        platform: linux64/asan
--        symbol: tc(Bof)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        env:
--            PERFHERDER_EXTRA_OPTIONS: asan-fuzzing
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/fuzzing.manifest"
--        max-run-time: 36000
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: fuzzing-asan-tc
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-asan/debug:
--    description: "Linux64 Debug ASAN"
--    index:
--        product: firefox
--        job-name: linux64-asan-debug
--    treeherder:
--        platform: linux64/asan
--        symbol: tc(Bd)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        env:
--            PERFHERDER_EXTRA_OPTIONS: "debug asan"
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/asan.manifest"
--        max-run-time: 36000
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: asan-tc-and-debug
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-nightly/opt:
--    description: "Linux64 Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: firefox
--        job-name: linux64-opt
--        type: nightly
--    treeherder:
--        platform: linux64/opt
--        symbol: tc(N)
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-noopt/debug:
--    description: "Linux64 No-optimize Debug"
--    index:
--        product: firefox
--        job-name: linux64-noopt-debug
--    treeherder:
--        platform: linux64-noopt/debug
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 3600
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: noopt-debug
--        tooltool-downloads: public
--        keep-artifacts: false
--        need-xvfb: true
--    run-on-projects: [ 'trunk', 'try' ]
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-jsdcov/opt:
--    description: "Linux64-JSDCov Opt"
--    index:
--        product: firefox
--        job-name: linux64-jsdcov-opt
--    treeherder:
--        platform: linux64-jsdcov/opt
--        symbol: tc(B)
--        tier: 2
--    run-on-projects: [ ]
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-ccov/opt:
--    description: "Linux64-CCov Opt"
--    index:
--        product: firefox
--        job-name: linux64-ccov-opt
--    needs-sccache: false
--    treeherder:
--        platform: linux64-ccov/opt
--        symbol: tc(B)
--        tier: 2
--    run-on-projects: [ ]
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: code-coverage
--        tooltool-downloads: public
--        need-xvfb: true
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--linux64-add-on-devel/opt:
--    description: "Linux64 add-on-devel"
--    index:
--        product: firefox
--        job-name: linux64-add-on-devel
--    treeherder:
--        platform: linux64-add-on-devel/opt
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build check-test update]
--        config:
--            - builds/releng_base_linux_64_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: add-on-devel
--        tooltool-downloads: public
--        need-xvfb: true
--    run-on-projects: [ 'mozilla-release', 'mozilla-esr45' ]
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
-diff --git a/taskcluster/ci/build/macosx.yml b/taskcluster/ci/build/macosx.yml
-deleted file mode 100644
---- a/taskcluster/ci/build/macosx.yml
-+++ /dev/null
-@@ -1,225 +0,0 @@
--macosx64/debug:
--    description: "MacOS X x64 Cross-compile"
--    index:
--        product: firefox
--        job-name: macosx64-debug
--    treeherder:
--        platform: osx-cross/debug
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build update]
--        config:
--            - builds/releng_base_mac_64_cross_builds.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: cross-debug
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
--
--macosx64/opt:
--    description: "MacOS X x64 Cross-compile"
--    index:
--        product: firefox
--        job-name: macosx64-opt
--    treeherder:
--        platform: osx-cross/opt
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build update]
--        config:
--            - builds/releng_base_mac_64_cross_builds.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
--
--macosx64-dmd/opt:
--    description: "MacOS X x64 DMD Cross-compile"
--    index:
--        product: firefox
--        job-name: macosx64-dmd-opt
--    treeherder:
--        platform: osx-10-10-dmd/opt
--        symbol: tc(Bdmd)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build update]
--        config:
--            - builds/releng_base_mac_64_cross_builds.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        custom-build-variant-cfg: dmd
--        secrets: true
--        tooltool-downloads: internal
--    run-on-projects: [ ]
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
--
--macosx64-devedition-nightly/opt:
--    description: "MacOS X Dev Edition x64 Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: devedition
--        job-name: macosx64-opt
--        type: nightly
--    treeherder:
--        platform: osx-cross-devedition/opt
--        symbol: tc(N)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build update]
--        config:
--            - builds/releng_base_mac_64_cross_builds.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: internal
--        custom-build-variant-cfg: devedition
--    run-on-projects: ['mozilla-beta',]
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
--
--macosx64-noopt/debug:
--    description: "MacOS X x64 No-optimize Debug"
--    index:
--        product: firefox
--        job-name: macosx64-noopt-debug
--    treeherder:
--        platform: osx-cross-noopt/debug
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build update]
--        config:
--            - builds/releng_base_mac_64_cross_builds.py
--            - balrog/production.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        custom-build-variant-cfg: cross-noopt-debug
--        tooltool-downloads: internal
--        keep-artifacts: false
--    run-on-projects: [ 'trunk', 'try' ]
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
--
--macosx64-add-on-devel/opt:
--    description: "MacOS X x64 add-on-devel"
--    index:
--        product: firefox
--        job-name: macosx64-add-on-devel
--    treeherder:
--        platform: osx-cross-add-on-devel/opt
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--       using: mozharness
--       actions: [get-secrets build update]
--       config:
--            - builds/releng_base_mac_64_cross_builds.py
--            - balrog/production.py
--       script: "mozharness/scripts/fx_desktop_build.py"
--       secrets: true
--       custom-build-variant-cfg: add-on-devel
--       tooltool-downloads: internal
--    run-on-projects: [ 'mozilla-release', 'mozilla-esr45' ]
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
--
--macosx64-nightly/opt:
--    description: "MacOS X x64 Cross-compile Nightly"
--    attributes:
--        nightly: true
--    index:
--        product: firefox
--        job-name: macosx64-opt
--        type: nightly
--    treeherder:
--        platform: osx-cross/opt
--        symbol: tc(N)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-releng.manifest"
--    run:
--        using: mozharness
--        actions: [get-secrets build update]
--        config:
--            - builds/releng_base_mac_64_cross_builds.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--        script: "mozharness/scripts/fx_desktop_build.py"
--        secrets: true
--        tooltool-downloads: internal
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-hfsplus
--        - linux64-libdmg
--        - linux64-sccache
-diff --git a/taskcluster/ci/build/windows.yml b/taskcluster/ci/build/windows.yml
-deleted file mode 100644
---- a/taskcluster/ci/build/windows.yml
-+++ /dev/null
-@@ -1,453 +0,0 @@
--win32/debug:
--    description: "Win32 Debug"
--    index:
--        product: firefox
--        job-name: win32-debug
--    treeherder:
--        platform: windows2012-32/debug
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_32_debug.py
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win32/opt:
--    description: "Win32 Opt"
--    index:
--        product: firefox
--        job-name: win32-opt
--    treeherder:
--        platform: windows2012-32/opt
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_32_opt.py
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win32-dmd/opt:
--    description: "Win32 DMD Opt"
--    index:
--        product: firefox
--        job-name: win32-dmd-opt
--    treeherder:
--        platform: windows2012-32-dmd/opt
--        symbol: tc(Bdmd)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_32_opt.py
--        custom-build-variant-cfg: dmd
--    run-on-projects: [ ]
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win32/pgo:
--    description: "Win32 Opt PGO"
--    index:
--        product: firefox
--        job-name: win32-pgo
--    treeherder:
--        platform: windows2012-32/pgo
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 9000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        options: [enable-pgo]
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_32_opt.py
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win64/debug:
--    description: "Win64 Debug"
--    index:
--        product: firefox
--        job-name: win64-debug
--    treeherder:
--        platform: windows2012-64/debug
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_64_debug.py
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win64/opt:
--    description: "Win64 Opt"
--    index:
--        product: firefox
--        job-name: win64-opt
--    treeherder:
--        platform: windows2012-64/opt
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_64_opt.py
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win64-dmd/opt:
--    description: "Win64 DMD Opt"
--    index:
--        product: firefox
--        job-name: win64-dmd-opt
--    treeherder:
--        platform: windows2012-64-dmd/opt
--        symbol: tc(Bdmd)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_64_opt.py
--        custom-build-variant-cfg: dmd
--    run-on-projects: [ ]
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win32-nightly/opt:
--    description: "Win32 Nightly"
--    index:
--        product: firefox
--        job-name: win32-opt
--        type: nightly
--    attributes:
--        nightly: true
--    treeherder:
--        platform: windows2012-32/opt
--        symbol: tc(N)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        actions: [clone-tools, build, check-test, update]
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_32_opt.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win64-nightly/opt:
--    description: "Win64 Nightly"
--    index:
--        product: firefox
--        job-name: win64-opt
--        type: nightly
--    attributes:
--        nightly: true
--    treeherder:
--        platform: windows2012-64/opt
--        symbol: tc(N)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        actions: [clone-tools, build, check-test, update]
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_64_opt.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win64/pgo:
--    description: "Win64 Opt PGO"
--    index:
--        product: firefox
--        job-name: win64-pgo
--    treeherder:
--        platform: windows2012-64/pgo
--        symbol: tc(B)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 10800
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        options: [enable-pgo]
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_64_opt.py
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win32-add-on-devel/opt:
--    description: "Windows32 add-on-devel"
--    index:
--        product: firefox
--        job-name: win32-add-on-devel
--    treeherder:
--        platform: windows2012-32-add-on-devel/opt
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 10800
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        script: "mozharness/scripts/fx_desktop_build.py"
--        config:
--            - builds/taskcluster_firefox_windows_32_addondevel.py
--            - balrog/production.py
--    run-on-projects: [ 'mozilla-release', 'mozilla-esr45' ]
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win64-add-on-devel/opt:
--    description: "Windows64 add-on-devel"
--    index:
--        product: firefox
--        job-name: win64-add-on-devel
--    treeherder:
--        platform: windows2012-64-add-on-devel/opt
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 10800
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        script: "mozharness/scripts/fx_desktop_build.py"
--        config:
--            - builds/taskcluster_firefox_windows_64_addondevel.py
--            - balrog/production.py
--    run-on-projects: [ 'mozilla-release', 'mozilla-esr45' ]
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win64-noopt/debug:
--    description: "Win64 No-optimize Debug"
--    index:
--        product: firefox
--        job-name: win64-noopt-debug
--    treeherder:
--        platform: windows2012-64-noopt/debug
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_win64_noopt_debug.py
--    run-on-projects: [ 'trunk', 'try' ]
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win32-noopt/debug:
--    description: "Win32 No-optimize Debug"
--    index:
--        product: firefox
--        job-name: win32-noopt-debug
--    treeherder:
--        platform: windows2012-32-noopt/debug
--        symbol: tc(B)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_win32_noopt_debug.py
--    run-on-projects: [ 'trunk', 'try' ]
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win64-asan/debug:
--    description: "Win64 Debug ASAN"
--    index:
--        product: firefox
--        job-name: win64-asan-debug
--    treeherder:
--        platform: windows2012-64/asan
--        symbol: tc(Bd)
--        tier: 3
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/clang.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_win64_asan_debug.py
--    run-on-projects: []
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win64-asan/opt:
--    description: "Win64 Opt ASAN"
--    index:
--        product: firefox
--        job-name: win64-asan-opt
--    treeherder:
--        platform: windows2012-64/asan
--        symbol: tc(Bo)
--        tier: 3
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/clang.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_win64_asan_opt.py
--    run-on-projects: []
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
--
--win32-devedition-nightly/opt:
--    description: "Win32 Dev Edition Nightly"
--    index:
--        product: devedition
--        job-name: win32-opt
--        type: nightly
--    attributes:
--        nightly: true
--    treeherder:
--        platform: windows2012-32-devedition/opt
--        symbol: tc(N)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_32_opt.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--        custom-build-variant-cfg: devedition
--    run-on-projects: [ 'mozilla-beta', ]
--    toolchains:
--        - win32-clang-cl
--        - win64-sccache
--
--win64-devedition-nightly/opt:
--    description: "Win64 Dev Edition Nightly"
--    index:
--        product: devedition
--        job-name: win64-opt
--        type: nightly
--    attributes:
--        nightly: true
--    treeherder:
--        platform: windows2012-64-devedition/opt
--        symbol: tc(N)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 7200
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/releng.manifest"
--    run:
--        using: mozharness
--        script: mozharness/scripts/fx_desktop_build.py
--        config:
--            - builds/taskcluster_firefox_windows_64_opt.py
--            - disable_signing.py
--            - taskcluster_nightly.py
--        custom-build-variant-cfg: devedition
--    run-on-projects: [ 'mozilla-beta', ]
--    toolchains:
--        - win64-clang-cl
--        - win64-sccache
-diff --git a/taskcluster/ci/checksums-signing/kind.yml b/taskcluster/ci/checksums-signing/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/checksums-signing/kind.yml
-+++ /dev/null
-@@ -1,17 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.checksums_signing:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - beetmover
--  - beetmover-l10n
--  - beetmover-repackage
--
--only-for-attributes:
--  - nightly
-diff --git a/taskcluster/ci/docker-image/kind.yml b/taskcluster/ci/docker-image/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/docker-image/kind.yml
-+++ /dev/null
-@@ -1,28 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--transforms:
--  - taskgraph.transforms.docker_image:transforms
--  - taskgraph.transforms.task:transforms
--
--# make a task for each docker-image we might want.  For the moment, since we
--# write artifacts for each, these are whitelisted, but ideally that will change
--# (to use subdirectory clones of the proper directory), at which point we can
--# generate tasks for every docker image in the directory, secure in the
--# knowledge that unnecessary images will be omitted from the target task graph
--jobs:
--  desktop-test:
--    symbol: I(dt)
--  desktop1604-test:
--    symbol: I(dt16t)
--  desktop-build:
--    symbol: I(db)
--  lint:
--    symbol: I(lnt)
--  android-gradle-build:
--    symbol: I(agb)
--  index-task:
--    symbol: I(idx)
-diff --git a/taskcluster/ci/hazard/kind.yml b/taskcluster/ci/hazard/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/hazard/kind.yml
-+++ /dev/null
-@@ -1,71 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.build_attrs:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--job-defaults:
--    treeherder:
--        kind: build
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        docker-image: {in-tree: desktop-build}
--
--jobs:
--    linux64-shell-haz/debug:
--        description: "JS Shell Hazard Analysis Linux"
--        index:
--            product: firefox
--            job-name: shell-haz-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(H)
--        worker:
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
--        run:
--            using: hazard
--            command: >
--                cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
--                && ./build-haz-linux.sh --project shell $HOME/workspace
--        when:
--            files-changed:
--                - js/public/**
--                - js/src/**
--        toolchains:
--            - linux64-clang
--            - linux64-gcc
--            - linux64-sccache
--
--    linux64-haz/debug:
--        description: "Browser Hazard Analysis Linux"
--        index:
--            product: firefox
--            job-name: browser-haz-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: tc(H)
--        worker:
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/hazard.manifest"
--        run:
--            using: hazard
--            mozconfig: "browser/config/mozconfigs/linux64/hazards"
--            command: >
--                cd /builds/worker/checkouts/gecko/taskcluster/scripts/builder
--                && ./build-haz-linux.sh --project browser $HOME/workspace
--        toolchains:
--            - linux64-clang
--            - linux64-gcc
--            - linux64-sccache
-diff --git a/taskcluster/ci/l10n/kind.yml b/taskcluster/ci/l10n/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/l10n/kind.yml
-+++ /dev/null
-@@ -1,113 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--
--transforms:
--   - taskgraph.transforms.l10n:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--   - build
--
--only-for-build-platforms:
--   - linux64/opt
--   - linux/opt
--   - android-api-16/opt
--   #- macosx64/opt
--
--job-template:
--   description:
--      by-build-platform:
--         default: Localization
--         android-api-16-l10n: Single Locale Repack
--   locales-file:
--      by-build-platform:
--         default: browser/locales/all-locales
--         android-api-16-l10n: mobile/locales/l10n-changesets.json
--   ignore-locales:
--      by-build-platform:
--         # OSX has a special locale for japanese
--         macosx64.*: [ja]
--         default: [ja-JP-mac]
--   run-time:
--      by-build-platform:
--         default: 36000
--         android-api-16-l10n: 18000
--   tooltool:
--      by-build-platform:
--         default: public
--         android-api-16-l10n: internal
--         macosx64-nightly: internal
--   worker-type:
--      by-build-platform:
--         default: aws-provisioner-v1/gecko-{level}-b-linux
--         android: aws-provisioner-v1/gecko-{level}-b-android
--   treeherder:
--      symbol: tc(L10n)
--      tier:
--         by-build-platform:
--            default: 2
--      platform:
--         by-build-platform:
--            linux64-l10n: linux64/opt
--            linux-l10n: linux32/opt
--            macosx64: osx-cross/opt
--            android-api-16-l10n: android-4-0-armv7-api16/opt
--   mozharness:
--      config:
--         by-build-platform:
--            linux-l10n:
--               - single_locale/tc_linux32.py
--            linux64-l10n:
--               - single_locale/tc_linux64.py
--            macosx64:
--               - single_locale/tc_macosx64.py
--            android-api-16-l10n:
--               - single_locale/{project}_android-api-16.py
--               - single_locale/tc_android-api-16.py
--            # no default, so we fail on new entries
--      options:
--         by-build-platform:
--            linux-l10n:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/linux32.py
--               - total-chunks=1
--               - this-chunk=1
--            linux64-l10n:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/linux64.py
--               - total-chunks=1
--               - this-chunk=1
--            macosx64:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/macosx64.py
--               - total-chunks=1
--               - this-chunk=1
--            default:
--               - total-chunks=1
--               - this-chunk=1
--      actions:
--         by-build-platform:
--            default: [clone-locales list-locales setup repack summary]
--            android-api-16-l10n: [clone-locales list-locales setup repack
--                                  upload-repacks summary]
--      script:
--         by-build-platform:
--            default: mozharness/scripts/desktop_l10n.py
--            android-api-16-l10n: mozharness/scripts/mobile_l10n.py
--   when:
--      files-changed:
--         - browser/locales/all-locales
--         - testing/mozharness/configs/single_locale/**
--         - testing/mozharness/mozharness/mozilla/l10n/locales.py
--         - testing/mozharness/scripts/desktop_l10n.py
--         - third_party/python/compare-locales/**
--         - toolkit/locales/**
--         - toolkit/mozapps/installer/**
-diff --git a/taskcluster/ci/nightly-fennec/docker_build.yml b/taskcluster/ci/nightly-fennec/docker_build.yml
-deleted file mode 100644
---- a/taskcluster/ci/nightly-fennec/docker_build.yml
-+++ /dev/null
-@@ -1,23 +0,0 @@
--$inherits:
--  from: 'build.yml'
--
--
--task:
--  workerType: b2gbuild
--
--  scopes:
--    # docker build tasks use tc-vcs so include the scope.
--    - 'docker-worker:cache:level-{{level}}-{{project}}-tc-vcs'
--
--  payload:
--
--    cache:
--      level-{{level}}-{{project}}-tc-vcs: '/home/worker/.tc-vcs'
--
--    # All docker builds share a common artifact directory for ease of uploading.
--    artifacts:
--      'public/build':
--        type: directory
--        path: '/home/worker/artifacts/'
--        expires:
--          relative-datestamp: '1 year'
-diff --git a/taskcluster/ci/nightly-l10n-signing/kind.yml b/taskcluster/ci/nightly-l10n-signing/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/nightly-l10n-signing/kind.yml
-+++ /dev/null
-@@ -1,16 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.nightly_l10n_signing:transforms
--   - taskgraph.transforms.signing:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - nightly-l10n
--
--only-for-attributes:
--  - nightly
-diff --git a/taskcluster/ci/nightly-l10n/kind.yml b/taskcluster/ci/nightly-l10n/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/nightly-l10n/kind.yml
-+++ /dev/null
-@@ -1,165 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.l10n:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--   - build
--
--only-for-build-platforms:
--   - linux64-nightly/opt
--   - linux-nightly/opt
--   - android-api-16-nightly/opt
--   - macosx64-nightly/opt
--   - win32-nightly/opt
--   - win64-nightly/opt
--
--job-template:
--   description:
--      by-build-platform:
--         default: Localization
--         android-api-16-nightly: Single Locale Repack
--   locales-file:
--      by-build-platform:
--         default: browser/locales/all-locales
--         android-api-16-nightly: mobile/locales/l10n-changesets.json
--   chunks:
--      by-build-platform:
--         macosx64-nightly: 8
--         win.*: 10
--         default: 6
--   run-on-projects: ['release']
--   ignore-locales:
--      by-build-platform:
--         # OSX has a special locale for japanese
--         macosx64.*: [ja]
--         default: [ja-JP-mac]
--   run-time:
--      by-build-platform:
--         default: 36000
--         android-api-16-nightly: 18000
--   tooltool:
--      by-build-platform:
--         default: public
--         android-api-16-nightly: internal
--         macosx64-nightly: internal
--         win32-nightly: internal
--         win64-nightly: internal
--   index:
--      type: l10n
--      product:
--         by-build-platform:
--            default: firefox
--            android-api-16-nightly: mobile
--      job-name:
--         by-build-platform:
--            linux-nightly: linux-opt
--            linux64-nightly: linux64-opt
--            macosx64-nightly: macosx64-opt
--            win32-nightly: win32-opt
--            win64-nightly: win64-opt
--            android-api-16-nightly: android-api-16-opt
--   worker-type:
--      by-build-platform:
--         default: aws-provisioner-v1/gecko-{level}-b-linux
--         android-api-16-nightly: aws-provisioner-v1/gecko-{level}-b-android
--         win.*: aws-provisioner-v1/gecko-{level}-b-win2012
--   treeherder:
--      symbol: tc-L10n(N)
--      tier: 1
--      platform:
--         by-build-platform:
--            linux64-nightly: linux64/opt
--            linux-nightly: linux32/opt
--            macosx64-nightly: osx-cross/opt
--            win32-nightly: windows2012-32/opt
--            win64-nightly: windows2012-64/opt
--            android-api-16-nightly: android-4-0-armv7-api16/opt
--   env:
--      by-build-platform:
--         linux.*:   # linux64 and 32 get same treatment here
--            EN_US_PACKAGE_NAME: target.tar.bz2
--            EN_US_BINARY_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<unsigned-build>/artifacts/public/build
--            MAR_TOOLS_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<unsigned-build>/artifacts/public/build/host/bin
--         macosx64-nightly:
--            EN_US_PACKAGE_NAME: target.dmg
--            EN_US_BINARY_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<unsigned-build>/artifacts/public/build
--            MAR_TOOLS_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<unsigned-build>/artifacts/public/build/host/bin
--         win.*:
--            EN_US_PACKAGE_NAME: target.zip
--            EN_US_BINARY_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<signed-build>/artifacts/public/build
--            EN_US_INSTALLER_BINARY_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<repackage-signed>/artifacts/public/build
--            MAR_TOOLS_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<unsigned-build>/artifacts/public/build/host/bin
--         android-api-16-nightly:
--            EN_US_PACKAGE_NAME: target.apk
--            EN_US_BINARY_URL:
--               task-reference: https://queue.taskcluster.net/v1/task/<unsigned-build>/artifacts/public/build/en-US
--   mozharness:
--      config:
--         by-build-platform:
--            linux-nightly:
--               - single_locale/tc_linux32.py
--               - taskcluster_nightly.py
--            linux64-nightly:
--               - single_locale/tc_linux64.py
--               - taskcluster_nightly.py
--            macosx64-nightly:
--               - single_locale/tc_macosx64.py
--               - taskcluster_nightly.py
--            win32-nightly: []
--            win64-nightly: []
--            android-api-16-nightly:
--               - taskcluster_nightly.py
--               - single_locale/{project}_android-api-16.py
--               - single_locale/tc_android-api-16.py
--            # no default, so we fail on new entries
--      options:
--         by-build-platform:
--            linux-nightly:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/linux32.py
--            linux64-nightly:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/linux64.py
--            macosx64-nightly:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/macosx64.py
--            win32-nightly:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/win32.py
--               - config=single_locale/tc_win32.py
--               - config=taskcluster_nightly.py
--            win64-nightly:
--               - environment-config=single_locale/production.py
--               - branch-config=single_locale/{project}.py
--               - platform-config=single_locale/win64.py
--               - config=single_locale/tc_win64.py
--               - config=taskcluster_nightly.py
--            default: [ ]
--      actions:
--         by-build-platform:
--            default: ['clone-locales', 'list-locales', 'setup', 'repack',
--                      'submit-to-balrog', 'summary']
--            android-api-16-nightly: ['clone-locales', 'list-locales', 'setup', 'repack',
--                                     'upload-repacks', 'submit-to-balrog', 'summary']
--      script:
--         by-build-platform:
--            default: mozharness/scripts/desktop_l10n.py
--            android-api-16-nightly: mozharness/scripts/mobile_l10n.py
-diff --git a/taskcluster/ci/push-apk-breakpoint/kind.yml b/taskcluster/ci/push-apk-breakpoint/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/push-apk-breakpoint/kind.yml
-+++ /dev/null
-@@ -1,29 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.push_apk:loader
--
--transforms:
--   - taskgraph.transforms.push_apk_breakpoint:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - build-signing
--
--jobs:
--    android-push-apk-breakpoint/opt:
--        description: PushApk breakpoint. Decides whether APK should be published onto Google Play Store
--        attributes:
--            build_platform: android-nightly
--            nightly: true
--        worker-type: # see transforms
--        worker:
--            implementation: push-apk-breakpoint
--        treeherder:
--            symbol: pub(Br)
--            platform: Android/opt
--            tier: 2
--            kind: other
--        run-on-projects: ['mozilla-central', 'mozilla-beta', 'mozilla-release']
--        deadline-after: 5 days
-diff --git a/taskcluster/ci/push-apk/kind.yml b/taskcluster/ci/push-apk/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/push-apk/kind.yml
-+++ /dev/null
-@@ -1,34 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.push_apk:loader
--
--transforms:
--   - taskgraph.transforms.push_apk:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - build-signing
--  - push-apk-breakpoint
--
--jobs:
--    push-apk/opt:
--        description: Publishes APK onto Google Play Store
--        attributes:
--            build_platform: android-nightly
--            nightly: true
--        worker-type: scriptworker-prov-v1/pushapk-v1
--        worker:
--            upstream-artifacts: # see transforms
--            google-play-track: # see transforms
--            implementation: push-apk
--            dry-run: # see transforms
--        scopes: # see transforms
--        treeherder:
--            symbol: pub(gp)
--            platform: Android/opt
--            tier: 2
--            kind: other
--        run-on-projects: ['mozilla-central', 'mozilla-beta', 'mozilla-release']
--        deadline-after: 5 days
-diff --git a/taskcluster/ci/repackage-l10n/kind.yml b/taskcluster/ci/repackage-l10n/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/repackage-l10n/kind.yml
-+++ /dev/null
-@@ -1,21 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.repackage_l10n:transforms
--   - taskgraph.transforms.repackage:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - nightly-l10n-signing
--
--only-for-build-platforms:
--  - linux-nightly/opt
--  - linux64-nightly/opt
--  - macosx64-nightly/opt
--  - win32-nightly/opt
--  - win64-nightly/opt
-diff --git a/taskcluster/ci/repackage-signing/kind.yml b/taskcluster/ci/repackage-signing/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/repackage-signing/kind.yml
-+++ /dev/null
-@@ -1,26 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.repackage_signing:transforms
--   - taskgraph.transforms.repackage_routes:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - repackage
--  - repackage-l10n
--
--only-for-build-platforms:
--  - linux-nightly/opt
--  - linux-devedition-nightly/opt
--  - linux64-nightly/opt
--  - linux64-devedition-nightly/opt
--  - macosx64-nightly/opt
--  - macosx64-devedition-nightly/opt
--  - win32-nightly/opt
--  - win32-devedition-nightly/opt
--  - win64-nightly/opt
--  - win64-devedition-nightly/opt
-diff --git a/taskcluster/ci/repackage/kind.yml b/taskcluster/ci/repackage/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/repackage/kind.yml
-+++ /dev/null
-@@ -1,26 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.repackage:transforms
--   - taskgraph.transforms.repackage_routes:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--  - build-signing
--
--only-for-build-platforms:
--  - linux-nightly/opt
--  - linux-devedition-nightly/opt
--  - linux64-nightly/opt
--  - linux64-devedition-nightly/opt
--  - macosx64-nightly/opt
--  - macosx64-devedition-nightly/opt
--  - win32-nightly/opt
--  - win32-devedition-nightly/opt
--  - win64-nightly/opt
--  - win64-devedition-nightly/opt
-diff --git a/taskcluster/ci/source-test/doc.yml b/taskcluster/ci/source-test/doc.yml
-deleted file mode 100644
---- a/taskcluster/ci/source-test/doc.yml
-+++ /dev/null
-@@ -1,28 +0,0 @@
--sphinx:
--    description: Generate the Sphinx documentation
--    platform: lint/opt
--    treeherder:
--        symbol: tc(Doc)
--        kind: test
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--        artifacts:
--            - type: file
--              name: public/docs.tar.gz
--              path: /home/worker/checkouts/gecko/docs.tar.gz
--    run:
--        using: run-task
--        command: >
--            cd /home/worker/checkouts/gecko &&
--            ./mach doc --outdir docs-out --no-open &&
--            rm -rf docs-out/html/Mozilla_Source_Tree_Docs/_venv &&
--            mv docs-out/html/Mozilla_Source_Tree_Docs docs &&
--            tar -czf docs.tar.gz docs
--    when:
--        files-changed:
--            - '**/*.py'
--            - '**/*.rst'
--            - 'tools/docs/**'
-diff --git a/taskcluster/ci/source-test/kind.yml b/taskcluster/ci/source-test/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/source-test/kind.yml
-+++ /dev/null
-@@ -1,27 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--transforms:
--   - taskgraph.transforms.source_test:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--jobs-from:
--    - python-tests.yml
--    - mocha.yml
--    - mozlint.yml
--    - doc.yml
--    - webidl.yml
--
--# This is used by run-task based tasks to lookup which build task it
--# should depend on based on its own platform.
--dependent-build-platforms:
--    linux64/debug:
--        label: build-linux64/debug
--        target-name: target.tar.bz2
--    linux64.*:
--        label: build-linux64/opt
--        target-name: target.tar.bz2
-diff --git a/taskcluster/ci/source-test/mocha.yml b/taskcluster/ci/source-test/mocha.yml
-deleted file mode 100644
---- a/taskcluster/ci/source-test/mocha.yml
-+++ /dev/null
-@@ -1,21 +0,0 @@
--eslint-plugin-mozilla:
--    description: eslint-plugin-mozilla integration tests
--    platform: linux64/opt
--    treeherder:
--        symbol: mocha(epm)
--        kind: test
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: run-task
--        cache-dotcache: true
--        command: >
--            cd /builds/worker/checkouts/gecko/tools/lint/eslint/eslint-plugin-mozilla &&
--            cp -r /build/node_modules_eslint-plugin-mozilla node_modules &&
--            npm run test
--    when:
--        files-changed:
--            - 'tools/lint/eslint/eslint-plugin-mozilla/**'
-diff --git a/taskcluster/ci/source-test/mozlint.yml b/taskcluster/ci/source-test/mozlint.yml
-deleted file mode 100644
---- a/taskcluster/ci/source-test/mozlint.yml
-+++ /dev/null
-@@ -1,81 +0,0 @@
--mozlint-eslint:
--    description: JS lint check
--    platform: lint/opt
--    treeherder:
--        symbol: ES
--        kind: test
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: run-task
--        command: >
--            cd /builds/worker/checkouts/gecko/ &&
--            cp -r /build/node_modules_eslint node_modules &&
--            ln -s ../tools/lint/eslint/eslint-plugin-mozilla node_modules &&
--            ln -s ../tools/lint/eslint/eslint-plugin-spidermonkey-js node_modules &&
--            ./mach lint -l eslint -f treeherder --quiet
--    when:
--        files-changed:
--            # Files that are likely audited.
--            - '**/*.js'
--            - '**/*.jsm'
--            - '**/*.jsx'
--            - '**/*.html'
--            - '**/*.xhtml'
--            - '**/*.xml'
--            # Run when eslint policies change.
--            - '**/.eslintignore'
--            - '**/*eslintrc*'
--            # The plugin implementing custom checks.
--            - 'tools/lint/eslint/eslint-plugin-mozilla/**'
--            - 'tools/lint/eslint/eslint-plugin-spidermonkey-js/**'
--            # Other misc lint related files.
--            - 'python/mozlint/**'
--            - 'tools/lint/**'
--
--mozlint-flake8:
--    description: flake8 run over the gecko codebase
--    platform: lint/opt
--    treeherder:
--        symbol: f8
--        kind: test
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: mach
--        mach: lint -l flake8 -f treeherder
--    when:
--        files-changed:
--            - '**/*.py'
--            - '**/.flake8'
--            - 'python/mozlint/**'
--            - 'tools/lint/**'
--
--wptlint-gecko:
--    description: web-platform-tests linter
--    platform: lint/opt
--    treeherder:
--        symbol: W
--        kind: test
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: mach
--        mach: lint -l wpt -l wpt_manifest -f treeherder
--    when:
--        files-changed:
--            - 'testing/web-platform/tests/**'
--            - 'testing/web-platform/mozilla/tests/**'
--            - 'testing/web-platform/meta/MANIFEST.json'
--            - 'testing/web-platform/mozilla/meta/MANIFEST.json'
--            - 'python/mozlint/**'
--            - 'tools/lint/**'
-diff --git a/taskcluster/ci/source-test/python-tests.yml b/taskcluster/ci/source-test/python-tests.yml
-deleted file mode 100644
---- a/taskcluster/ci/source-test/python-tests.yml
-+++ /dev/null
-@@ -1,149 +0,0 @@
--taskgraph-tests:
--    description: taskcluster/taskgraph unit tests
--    platform: linux64/opt
--    treeherder:
--        symbol: py(tg)
--        kind: test
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: mach
--        mach: python-test --subsuite taskgraph
--    when:
--        files-changed:
--            - 'taskcluster/**/*.py'
--            - 'config/mozunit.py'
--            - 'python/mach/**/*.py'
--
--marionette-harness:
--    description: testing/marionette/harness unit tests
--    platform: linux64/opt
--    treeherder:
--        symbol: py(mnh)
--        kind: test
--        tier: 2
--    worker-type:
--        by-platform:
--            linux64.*: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        by-platform:
--            linux64.*:
--                docker-image: {in-tree: "lint"}
--                max-run-time: 3600
--    run:
--        using: mach
--        mach: python-test --subsuite marionette-harness
--    when:
--        files-changed:
--          - 'testing/marionette/harness/**'
--          - 'testing/mozbase/mozlog/mozlog/**'
--          - 'testing/mozbase/mozlog/setup.py'
--          - 'testing/mozbase/packages.txt'
--          - 'python/mach_commands.py'
--
--mochitest-harness:
--    description: testing/mochitest unittests
--    platform:
--        - linux64/opt
--        - linux64/debug
--    require-build: true
--    treeherder:
--        symbol: py(mch)
--        kind: test
--        tier: 2
--    worker-type:
--        by-platform:
--            linux64.*: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        by-platform:
--            linux64.*:
--                docker-image: {in-tree: "desktop1604-test"}
--                max-run-time: 3600
--    run:
--        using: run-task
--        command: >
--            source /builds/worker/scripts/xvfb.sh &&
--            start_xvfb '1600x1200x24' 0 &&
--            cd /builds/worker/checkouts/gecko &&
--            ./mach python-test --subsuite mochitest
--    when:
--        files-changed:
--            - 'config/mozunit.py'
--            - 'python/mach_commands.py'
--            - 'testing/mochitest/**'
--            - 'testing/mozharness/mozharness/base/log.py'
--            - 'testing/mozharness/mozharness/mozilla/structuredlog.py'
--            - 'testing/mozharness/mozharness/mozilla/testing/errors.py'
--            - 'testing/profiles/prefs_general.js'
--
--mozbase:
--    description: testing/mozbase unit tests
--    platform:
--        - linux64/opt
--    treeherder:
--        symbol: py(mb)
--        kind: test
--        tier: 2
--    worker-type:
--        by-platform:
--            linux64.*: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        by-platform:
--            linux64.*:
--                docker-image: {in-tree: "lint"}
--                max-run-time: 3600
--    run:
--        using: mach
--        mach: python-test --subsuite mozbase
--    when:
--        files-changed:
--            - 'testing/mozbase/**'
--            - 'config/mozunit.py'
--            - 'python/mach_commands.py'
--
--mozharness:
--    description: mozharness integration tests
--    platform: linux64/opt
--    treeherder:
--        symbol: py(mh)
--        kind: test
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: run-task
--        cache-dotcache: true
--        command: >
--            cd /builds/worker/checkouts/gecko/testing/mozharness &&
--            /usr/local/bin/tox -e py27-hg4.3
--    when:
--        files-changed:
--            - 'testing/mozharness/**'
--
--mozlint:
--    description: python/mozlint unit tests
--    platform: linux64/opt
--    treeherder:
--        symbol: py(ml)
--        kind: test
--        tier: 2
--    worker-type:
--        by-platform:
--            linux64.*: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        by-platform:
--            linux64.*:
--                docker-image: {in-tree: "lint"}
--                max-run-time: 3600
--    run:
--        using: mach
--        mach: python-test --subsuite mozlint
--    when:
--        files-changed:
--            - 'python/mozlint/**'
--            - 'python/mach_commands.py'
-diff --git a/taskcluster/ci/source-test/webidl.yml b/taskcluster/ci/source-test/webidl.yml
-deleted file mode 100644
---- a/taskcluster/ci/source-test/webidl.yml
-+++ /dev/null
-@@ -1,20 +0,0 @@
--webidl-test:
--    description: WebIDL parser tests
--    platform: lint/opt
--    treeherder:
--        symbol: Wp
--        kind: test
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-t-linux-xlarge
--    worker:
--        docker-image: {in-tree: "lint"}
--        max-run-time: 1800
--    run:
--        using: mach
--        mach: webidl-parser-test --verbose
--    when:
--        files-changed:
--            - 'dom/bindings/parser/runtests.py'
--            - 'dom/bindings/parser/WebIDL.py'
--            - 'dom/bindings/parser/tests/**'
--            - 'other-licenses/ply/**'
-diff --git a/taskcluster/ci/spidermonkey/kind.yml b/taskcluster/ci/spidermonkey/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/spidermonkey/kind.yml
-+++ /dev/null
-@@ -1,206 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.build_attrs:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--job-defaults:
--    treeherder:
--        platform: linux64/opt
--        kind: build
--        tier: 1
--    index:
--        product: firefox
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        max-run-time: 36000
--        docker-image: {in-tree: desktop-build}
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--    run:
--        using: spidermonkey
--    when:
--        files-changed:
--            # any when.files-changed specified below in a job will be
--            # appended to this list
--            - build/**
--            - config/**
--            - configure.py
--            - dom/bindings/**
--            - intl/icu/**
--            - js/moz.configure
--            - js/public/**
--            - js/src/**
--            - layout/tools/reftest/reftest/**
--            - Makefile.in
--            - media/webrtc/trunk/tools/gyp/**
--            - memory/**
--            - mfbt/**
--            - modules/fdlibm/**
--            - modules/zlib/src/**
--            - mozglue/**
--            - moz.build
--            - moz.configure
--            - nsprpub/**
--            - python/**
--            - taskcluster/moz.build
--            - taskcluster/ci/spidermonkey/kind.yml
--            - testing/mozbase/**
--            - test.mozbuild
--            - toolkit/mozapps/installer/package-name.mk
--            - toolkit/mozapps/installer/upload-files.mk
--    toolchains:
--        - linux64-clang
--        - linux64-gcc
--        - linux64-sccache
--
--jobs:
--    sm-package/opt:
--        description: "Spidermonkey source package and test"
--        index:
--            job-name: sm-package-opt
--        treeherder:
--            symbol: SM-tc(pkg)
--        run:
--            using: spidermonkey-package
--            spidermonkey-variant: plain
--
--    sm-mozjs-sys/debug:
--        description: "Build js/src as the mozjs_sys Rust crate"
--        index:
--            job-name: sm-mozjs-sys-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(mozjs-crate)
--        run:
--            using: spidermonkey-mozjs-crate
--            spidermonkey-variant: plain
--        run-on-projects: ['integration', 'release', 'try']
--
--    sm-plain/debug:
--        description: "Spidermonkey Plain"
--        index:
--            job-name: sm-plaindebug-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(p)
--        run:
--            spidermonkey-variant: plaindebug
--
--    sm-plain/opt:
--        description: "Spidermonkey Plain"
--        index:
--            job-name: sm-plain-opt
--        treeherder:
--            symbol: SM-tc(p)
--        run:
--            spidermonkey-variant: plain
--
--    sm-arm-sim/debug:
--        description: "Spidermonkey ARM sim"
--        index:
--            job-name: sm-arm-sim-debug
--        treeherder:
--            platform: linux32/debug
--            symbol: SM-tc(arm)
--        worker:
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux32/releng.manifest"
--        run:
--            spidermonkey-variant: arm-sim
--
--    sm-arm64-sim/debug:
--        description: "Spidermonkey ARM64 sim"
--        index:
--            job-name: sm-arm64-sim-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(arm64)
--        run:
--            spidermonkey-variant: arm64-sim
--
--    sm-asan/opt:
--        description: "Spidermonkey Address Sanitizer"
--        index:
--            job-name: sm-asan-opt
--        treeherder:
--            symbol: SM-tc(asan)
--        worker:
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/asan.manifest"
--        run:
--            spidermonkey-variant: asan
--
--    sm-compacting/debug:
--        description: "Spidermonkey Compacting"
--        index:
--            job-name: sm-compacting-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(cgc)
--        run:
--            spidermonkey-variant: compacting
--
--    sm-msan/opt:
--        description: "Spidermonkey Memory Sanitizer"
--        index:
--            job-name: sm-msan-opt
--        treeherder:
--            symbol: SM-tc(msan)
--        worker:
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/msan.manifest"
--        run:
--            spidermonkey-variant: msan
--
--    sm-tsan/opt:
--        description: "Spidermonkey Thread Sanitizer"
--        index:
--            job-name: sm-tsan-opt
--        treeherder:
--            symbol: SM-tc(tsan)
--            tier: 3
--        worker:
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/tsan.manifest"
--        run:
--            spidermonkey-variant: tsan
--
--    sm-rootanalysis/debug:
--        description: "Spidermonkey Root Analysis"
--        index:
--            job-name: sm-rootanalysis-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(r)
--        run:
--            spidermonkey-variant: rootanalysis
--
--    sm-nonunified/debug:
--        description: "Spidermonkey Non-Unified Debug"
--        index:
--            job-name: sm-nonunified-debug
--        treeherder:
--            platform: linux64/debug
--            symbol: SM-tc(nu)
--        run:
--            spidermonkey-variant: nonunified
--
--    sm-fuzzing/opt:
--        description: "Spidermonkey Fuzzing"
--        index:
--            job-name: sm-fuzzing
--        treeherder:
--            platform: linux64/opt
--            symbol: SM-tc(f)
--        run:
--            spidermonkey-variant: fuzzing
-diff --git a/taskcluster/ci/static-analysis/kind.yml b/taskcluster/ci/static-analysis/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/static-analysis/kind.yml
-+++ /dev/null
-@@ -1,163 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.build_attrs:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--job-defaults:
--    index:
--        product: firefox
--    treeherder:
--        symbol: S
--        kind: build
--        tier: 1
--
--jobs:
--    linux64-st-an/debug:
--        description: "Linux64 Debug Static Analysis"
--        index:
--            job-name: linux64-st-an-debug
--        treeherder:
--            platform: linux64/debug
--        worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--        worker:
--            docker-image: {in-tree: desktop-build}
--            max-run-time: 36000
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/clang.manifest.centos6"
--        run:
--            using: mozharness
--            actions: [build]
--            config:
--                - builds/releng_sub_linux_configs/64_stat_and_debug.py
--                - balrog/production.py
--            script: "mozharness/scripts/fx_desktop_build.py"
--            tooltool-downloads: public
--            keep-artifacts: false
--        toolchains:
--            - linux64-sccache
--
--    linux64-st-an/opt:
--        description: "Linux64 Opt Static Analysis"
--        index:
--            job-name: linux64-st-an-opt
--        treeherder:
--            platform: linux64/opt
--        worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--        worker:
--            docker-image: {in-tree: desktop-build}
--            max-run-time: 36000
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/clang.manifest.centos6"
--        run:
--            using: mozharness
--            actions: [build]
--            config:
--                - builds/releng_sub_linux_configs/64_stat_and_opt.py
--                - balrog/production.py
--            script: "mozharness/scripts/fx_desktop_build.py"
--            tooltool-downloads: public
--            keep-artifacts: false
--        toolchains:
--            - linux64-sccache
--
--    win32-st-an/debug:
--        description: "Win32 Static Analysis Debug (clang-cl)"
--        index:
--            product: firefox
--            job-name: win32-st-an-debug
--        treeherder:
--            platform: windows2012-32/debug
--            symbol: tc(S)
--            tier: 1
--        worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--        worker:
--            max-run-time: 7200
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/clang.manifest"
--        run:
--            using: mozharness
--            script: mozharness/scripts/fx_desktop_build.py
--            config:
--                - builds/taskcluster_firefox_win32_clang_debug.py
--        toolchains:
--            - win32-clang-cl
--            - win64-sccache
--
--    win32-st-an/opt:
--        description: "Win32 Static Analysis Opt (clang-cl)"
--        index:
--            product: firefox
--            job-name: win32-st-an-opt
--        treeherder:
--            platform: windows2012-32/opt
--            symbol: tc(S)
--            tier: 1
--        worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--        worker:
--            max-run-time: 7200
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/clang.manifest"
--        run:
--            using: mozharness
--            script: mozharness/scripts/fx_desktop_build.py
--            config:
--                - builds/taskcluster_firefox_win32_clang.py
--        toolchains:
--            - win32-clang-cl
--            - win64-sccache
--
--    win64-st-an/debug:
--        description: "Win64 Static Analysis Debug (clang-cl)"
--        index:
--            product: firefox
--            job-name: win64-st-an-debug
--        treeherder:
--            platform: windows2012-64/debug
--            symbol: tc(S)
--            tier: 1
--        worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--        worker:
--            max-run-time: 7200
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/clang.manifest"
--        run:
--            using: mozharness
--            script: mozharness/scripts/fx_desktop_build.py
--            config:
--                - builds/taskcluster_firefox_win64_clang_debug.py
--        toolchains:
--            - win64-clang-cl
--            - win64-sccache
--
--    win64-st-an/opt:
--        description: "Win64 Static Analysis Opt (clang-cl)"
--        index:
--            product: firefox
--            job-name: win64-st-an-opt
--        treeherder:
--            platform: windows2012-64/opt
--            symbol: tc(S)
--            tier: 1
--        worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--        worker:
--            max-run-time: 7200
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/clang.manifest"
--        run:
--            using: mozharness
--            script: mozharness/scripts/fx_desktop_build.py
--            config:
--                - builds/taskcluster_firefox_win64_clang.py
--        toolchains:
--            - win64-clang-cl
--            - win64-sccache
-diff --git a/taskcluster/ci/test/kind.yml b/taskcluster/ci/test/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/test/kind.yml
-+++ /dev/null
-@@ -1,12 +0,0 @@
--loader: taskgraph.loader.test:loader
--
--kind-dependencies:
--    - build
--    - build-signing
--
--transforms:
--   - taskgraph.transforms.tests:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--parse-commit: taskgraph.try_option_syntax:parse_message
-diff --git a/taskcluster/ci/test/test-platforms.yml b/taskcluster/ci/test/test-platforms.yml
-deleted file mode 100644
---- a/taskcluster/ci/test/test-platforms.yml
-+++ /dev/null
-@@ -1,309 +0,0 @@
--# This file maps build platforms to test platforms.  In some cases, a
--# single build may be tested on multiple test platforms, but a single test
--# platform can only link to one build platform.  Both build and test platforms
--# are represented as <platform>/<type>, where <type> is what Treeherder calls a
--# collection.
--#
--# Each test platform further specifies the set of tests that will be scheduled
--# for the platform, referring to tests defined in test-sets.yml.
--#
--# Note that set does not depend on the tree; tree-dependent job selection
--# should be performed in the target task selection phase of task-graph
--# generation.
--
--
--##
--# Linux platforms (matching /linux.*/)
--
--linux32/debug:
--    build-platform: linux/debug
--    test-sets:
--        - linux32-tests
--linux32/opt:
--    build-platform: linux/opt
--    test-sets:
--        -  linux32-tests
--        -  linux32-opt-tests
--linux32-nightly/opt:
--    build-platform: linux-nightly/opt
--    test-sets:
--        -  linux32-tests
--        -  linux32-opt-tests
--linux32-devedition/opt:
--    build-platform: linux-devedition-nightly/opt
--    test-sets:
--        -  linux32-tests
--        -  linux32-opt-tests
--
--linux64/debug:
--    build-platform: linux64/debug
--    test-sets:
--        - common-tests
--        - web-platform-tests
--linux64/opt:
--    build-platform: linux64/opt
--    test-sets:
--        - common-tests
--        - web-platform-tests
--        - opt-only-tests
--        - desktop-screenshot-capture
--        - talos
--        - awsy
--linux64-nightly/opt:
--    build-platform: linux64-nightly/opt
--    test-sets:
--        - common-tests
--        - web-platform-tests
--        - opt-only-tests
--        - desktop-screenshot-capture
--        - awsy
--linux64-devedition/opt:
--    build-platform: linux64-devedition-nightly/opt
--    test-sets:
--        - common-tests
--        - web-platform-tests
--        - opt-only-tests
--        - desktop-screenshot-capture
--
--# TODO: use 'pgo' and 'asan' labels here, instead of -pgo/opt
--linux64-pgo/opt:
--    build-platform: linux64-pgo/opt
--    test-sets:
--        - common-tests
--        - web-platform-tests
--        - talos
--
--linux64-asan/opt:
--    build-platform: linux64-asan/opt
--    test-sets:
--        - common-tests
--
--# Stylo builds only run a subset of tests for the moment. So give them
--# their own test set.
--# Stylo doesn't work on 32-bit Linux yet (bug 1385025)
--# linux32-stylo/debug:
--#     build-platform: linux/debug
--#     test-sets:
--#         - stylo-tests
--# linux32-stylo/opt:
--#     build-platform: linux/opt
--#     test-sets:
--#         - stylo-tests
--linux64-stylo/debug:
--    build-platform: linux64/debug
--    test-sets:
--        - stylo-tests
--linux64-stylo/opt:
--    build-platform: linux64/opt
--    test-sets:
--        - stylo-tests
--        - talos
--linux64-stylo-sequential/debug:
--    build-platform: linux64/debug
--    test-sets:
--        - stylo-sequential-tests
--linux64-stylo-sequential/opt:
--    build-platform: linux64/opt
--    test-sets:
--        - stylo-sequential-tests
--        - talos
--macosx64-stylo/debug:
--    build-platform: macosx64/debug
--    test-sets:
--        - stylo-tests
--macosx64-stylo/opt:
--    build-platform: macosx64/opt
--    test-sets:
--        - stylo-tests
--# Windows needs more work to green up (bug 1385027)
--# windows7-32-stylo/debug:
--#     build-platform: win32/debug
--#     test-sets:
--#         - stylo-tests
--# windows7-32-stylo/opt:
--#     build-platform: win32/opt
--#     test-sets:
--#         - stylo-tests
--# windows10-64-stylo/debug:
--#     build-platform: win64/debug
--#     test-sets:
--#         - stylo-tests
--# windows10-64-stylo/opt:
--#     build-platform: win64/opt
--#     test-sets:
--#         - stylo-tests
--
--# QR builds just run a subset right now. Note that the tests in this
--# test set are further restricted in tests.yml to run on certain projects
--# only, to avoid adding too much infra load.
--linux64-qr/opt:
--    build-platform: linux64/opt
--    test-sets:
--        - qr-tests
--linux64-qr/debug:
--    build-platform: linux64/debug
--    test-sets:
--        - qr-tests
--
--linux64-ccov/opt:
--    build-platform: linux64-ccov/opt
--    test-sets:
--        - common-tests
--        - web-platform-tests
--        - awsy
--        - talos
--linux64-jsdcov/opt:
--    build-platform: linux64-jsdcov/opt
--    test-sets:
--        - jsdcov-code-coverage-tests
--
--##
--# Windows platforms (matching /windows.*/)
--
--# win32
--windows7-32/debug:
--    build-platform: win32/debug
--    test-sets:
--        - windows-tests
--windows7-32/opt:
--    build-platform: win32/opt
--    test-sets:
--        - awsy
--        - desktop-screenshot-capture
--        - windows-talos
--        - windows-tests
--
--windows7-32-pgo/opt:
--    build-platform: win32-pgo/opt
--    test-sets:
--        - awsy
--        - desktop-screenshot-capture
--        - windows-tests
--        - windows-talos
--
--windows7-32-nightly/opt:
--    build-platform: win32-nightly/opt
--    test-sets:
--        - awsy
--        - desktop-screenshot-capture
--        - windows-tests
--
--windows7-32-devedition/opt:
--    build-platform: win32-devedition-nightly/opt
--    test-sets:
--        - desktop-screenshot-capture
--        - windows-tests
--
--# win64
--windows10-64/debug:
--    build-platform: win64/debug
--    test-sets:
--        - windows-tests
--
--windows10-64/opt:
--    build-platform: win64/opt
--    test-sets:
--        - awsy
--        - desktop-screenshot-capture
--        - windows-talos
--        - windows-tests
--
--windows10-64-pgo/opt:
--    build-platform: win64-pgo/opt
--    test-sets:
--        - awsy
--        - desktop-screenshot-capture
--        - windows-talos
--        - windows-tests
--
--windows10-64-nightly/opt:
--    build-platform: win64-nightly/opt
--    test-sets:
--        - awsy
--        - desktop-screenshot-capture
--        - windows-tests
--
--windows10-64-devedition/opt:
--    build-platform: win64-devedition-nightly/opt
--    test-sets:
--        - desktop-screenshot-capture
--        - windows-tests
--
--windows10-64-asan/opt:
--    build-platform: win64-asan/opt
--    test-sets:
--        - common-tests
--
--# Windows8 tests; all via BBB
--windows8-64/debug:
--    build-platform: win64/debug
--    test-sets:
--        - windows8-tests
--windows8-64/opt:
--    build-platform: win64/opt
--    test-sets:
--        - windows8-tests
--windows8-64-pgo/opt:
--    build-platform: win64-pgo/opt
--    test-sets:
--        - windows8-tests
--windows8-64-nightly/opt:
--    build-platform: win64-nightly/opt
--    test-sets:
--        - windows8-tests
--windows8-64-devedition/opt:
--    build-platform: win64-devedition-nightly/opt
--    test-sets:
--        - windows8-tests
--
--##
--# MacOS X platforms (matching /macosx.*/)
--
--macosx64/debug:
--    build-platform: macosx64/debug
--    test-sets:
--        - macosx64-tests
--
--macosx64/opt:
--    build-platform: macosx64/opt
--    test-sets:
--        - macosx64-tests-talos
--        - macosx64-tests
--        - desktop-screenshot-capture
--        - awsy
--
--macosx64-nightly/opt:
--      build-platform: macosx64-nightly/opt
--      test-sets:
--        - macosx64-tests
--        - awsy
--
--macosx64-devedition/opt:
--    build-platform: macosx64-devedition-nightly/opt
--    test-sets:
--        - macosx64-tests
--
--##
--# Android platforms (matching /android.*/)
--
--android-4.3-arm7-api-16/debug:
--    build-platform: android-api-16/debug
--    test-sets:
--        - android-common-tests
--        - android-debug-tests
--
--android-4.3-arm7-api-16/opt:
--    build-platform: android-api-16/opt
--    test-sets:
--        - android-common-tests
--        - android-opt-tests
--
--android-4.2-x86/opt:
--    build-platform: android-x86/opt
--    test-sets:
--        - android-x86-tests
--
--android-4.3-arm7-api-16-gradle/opt:
--    build-platform: android-api-16-gradle/opt
--    test-sets:
--        - android-gradle-tests
-diff --git a/taskcluster/ci/test/test-sets.yml b/taskcluster/ci/test/test-sets.yml
-deleted file mode 100644
---- a/taskcluster/ci/test/test-sets.yml
-+++ /dev/null
-@@ -1,259 +0,0 @@
--# Each key in this file specifies a set of tests to run.  Different test sets
--# may, for example, be bound to different test platforms.
--#
--# Note that set does not depend on the tree; tree-dependent job selection
--# should be performed in the target task selection phase of task-graph
--# generation.
--#
--# A test set has a name, and a list of tests that it contains.
--#
--# Test names given here reference tests.yml.
--
--##
--# Universal tests
--#
--# Tests that run just about everywhere; common-tests is the shared core, with
--# a few add-on test sets that are included or omitted on some platforms.
--
--common-tests:
--    - cppunit
--    - crashtest
--    - firefox-ui-functional-local
--    - firefox-ui-functional-remote
--    - gtest
--    - jittest
--    - jsreftest
--    - marionette
--    - marionette-headless
--    - mochitest
--    - mochitest-a11y
--    - mochitest-browser-chrome
--    - mochitest-chrome
--    - mochitest-clipboard
--    - mochitest-devtools-chrome
--    - mochitest-gpu
--    - mochitest-jetpack
--    - mochitest-media
--    - mochitest-webgl
--    - reftest
--    - reftest-no-accel
--    - telemetry-tests-client
--    - xpcshell
--
--web-platform-tests:
--    - web-platform-tests
--    - web-platform-tests-reftests
--    - web-platform-tests-wdspec
--
--opt-only-tests:
--    - mochitest-valgrind
--
--talos:
--    - talos-chrome
--    - talos-dromaeojs
--    - talos-g1
--    - talos-g2
--    - talos-g3
--    - talos-g4
--    - talos-g5
--    - talos-other
--    - talos-svgr
--    - talos-tp5o
--    - talos-perf-reftest
--    - talos-perf-reftest-singletons
--
--awsy:
--    - awsy
--
--##
--# Limited test sets for specific platforms
--
--stylo-tests:
--    - cppunit
--    - crashtest
--    - reftest
--    - reftest-stylo
--    - mochitest-style
--    - mochitest-chrome-style
--    - web-platform-tests
--    - web-platform-tests-reftests
--
--stylo-sequential-tests:
--    - crashtest
--    - reftest-stylo
--    - mochitest-style
--
--qr-tests:
--    - cppunit
--    - crashtest
--    - gtest
--    - jittest
--    - jsreftest
--    - mochitest-a11y
--    - mochitest-gpu
--    - mochitest-media
--    - mochitest-webgl
--    - reftest
--    - reftest-no-accel
--    - xpcshell
--
--jsdcov-code-coverage-tests:
--    - mochitest
--    - mochitest-browser-chrome
--    - mochitest-devtools-chrome
--    - xpcshell
--
--windows-tests:
--    - cppunit
--    - crashtest
--    - firefox-ui-functional-local
--    - firefox-ui-functional-remote
--    - gtest
--    - jittest
--    - jsreftest
--    - marionette
--    - marionette-headless
--    - mochitest
--    - mochitest-a11y
--    - mochitest-browser-chrome
--    - mochitest-chrome
--    - mochitest-clipboard
--    - mochitest-devtools-chrome
--    - mochitest-gpu
--    - mochitest-jetpack
--    - mochitest-media
--    - mochitest-webgl
--    - reftest
--    - reftest-gpu
--    - reftest-no-accel
--    - web-platform-tests
--    - web-platform-tests-reftests
--    - xpcshell
--
--windows-talos:
--    - talos-chrome
--    - talos-dromaeojs
--    - talos-g1
--    - talos-g2
--    - talos-g4
--    - talos-g5
--    - talos-other
--    - talos-perf-reftest
--    - talos-perf-reftest-singletons
--    - talos-svgr
--    - talos-tp5o
--    - talos-xperf
--    - talos-quantum-pageload
--    - talos-quantum-pageload-stylo
--
--macosx64-tests:
--    - cppunit
--    - crashtest
--    - firefox-ui-functional-local
--    - firefox-ui-functional-remote
--    - gtest
--    - jittest
--    - jsreftest
--    - marionette
--    - marionette-headless
--    - mochitest
--    - mochitest-a11y
--    - mochitest-browser-chrome
--    - mochitest-chrome
--    - mochitest-clipboard
--    - mochitest-devtools-chrome
--    - mochitest-gpu
--    - mochitest-jetpack
--    - mochitest-media
--    - mochitest-webgl
--    - reftest
--    - web-platform-tests
--    - web-platform-tests-reftests
--    - xpcshell
--
--macosx64-tests-talos:
--    - talos-chrome
--    - talos-dromaeojs
--    - talos-g1
--    - talos-g2
--    - talos-g4
--    - talos-g5
--    - talos-other
--    - talos-svgr
--    - talos-tp5o
--    - talos-perf-reftest
--    - talos-perf-reftest-singletons
--
--linux32-tests:
--    - cppunit
--    - crashtest
--    - firefox-ui-functional-local
--    - firefox-ui-functional-remote
--    - gtest
--    - jittest
--    - jsreftest
--    - marionette
--    - mochitest
--    - mochitest-a11y
--    - mochitest-browser-chrome
--    - mochitest-chrome
--    - mochitest-clipboard
--    - mochitest-gpu
--    - mochitest-jetpack
--    - mochitest-media
--    - mochitest-webgl
--    - reftest
--    - reftest-no-accel
--    - web-platform-tests
--    - web-platform-tests-reftests
--    - xpcshell
--
--linux32-opt-tests:
--    # mochitest-dt is too slow on linux32/debug
--    - mochitest-devtools-chrome
--
--desktop-screenshot-capture:
--    - mochitest-browser-screenshots
--
--android-common-tests:
--    - cppunit
--    - crashtest
--    - jsreftest
--    - mochitest
--    - mochitest-chrome
--    - mochitest-clipboard
--    - mochitest-gpu
--    - mochitest-media
--    - reftest
--    - xpcshell
--
--android-debug-tests:
--    # Marionette only available on Fennec debug builds as a security precaution
--    - marionette
--
--android-opt-tests:
--    - robocop
--
--android-gradle-tests:
--    - mochitest-chrome
--    - robocop
--    - geckoview
--
--android-x86-tests:
--    - mochitest-chrome
--    - xpcshell
--
--windows8-tests:
--    - crashtest
--    - mochitest
--    - mochitest-browser-chrome
--    - mochitest-devtools-chrome
--    - mochitest-media
--    - jsreftest
--    - reftest
--    - reftest-no-accel
--    - mochitest-gpu
--    - mochitest-webgl
--    - mochitest-clipboard
--    - mochitest-chrome
--    - xpcshell
-diff --git a/taskcluster/ci/test/tests.yml b/taskcluster/ci/test/tests.yml
-deleted file mode 100644
---- a/taskcluster/ci/test/tests.yml
-+++ /dev/null
-@@ -1,1947 +0,0 @@
--# Each stanza here describes a particular test suite or sub-suite.  These are
--# processed through the transformations described in kind.yml to produce a
--# bunch of tasks.  See the schema in `taskcluster/taskgraph/transforms/tests.py`
--# for a description of the fields used here.
--
--# Note that these are in lexical order, as enforced by the task loader.
--
--awsy:
--    description: "Are we slim yet"
--    suite: awsy
--    treeherder-symbol: tc-SY(sy)
--    docker-image: {"in-tree": "desktop1604-test"}
--    max-run-time: 7200
--    e10s: true
--    instance-size: xlarge
--    allow-software-gl-layers: false
--    run-on-projects:
--        by-test-platform:
--            .*-devedition/.*: [] # don't run on devedition
--            default: built-projects
--    mozharness:
--        script: awsy_script.py
--        config:
--            by-test-platform:
--                windows.*/opt:
--                    - awsy/taskcluster_windows_config.py
--                macosx.*/opt:
--                    - awsy/macosx_config.py
--                default:
--                    - awsy/linux_config.py
--
--cppunit:
--    description: "CPP Unit Tests"
--    suite: cppunittest
--    treeherder-symbol: tc(Cpp)
--    e10s: false
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            windows10-64-asan/opt: []  # as an exception to windows.*
--            linux64-qr/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    mozharness:
--        by-test-platform:
--            android.*:
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=cppunittest
--            default:
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --cppunittest-suite=cppunittest
--
--crashtest:
--    description: "Crashtest run"
--    suite: reftest/crashtest
--    treeherder-symbol: tc-R(C)
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: default
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    chunks:
--        by-test-platform:
--            android-4.3-arm7-api-16/debug: 10
--            android.*: 4
--            default: 1
--    e10s:
--        by-test-platform:
--            macosx64/debug: true
--            # Bug 1384701: e10s-off parallel reftest runs crash on shutdown
--            linux64-stylo/debug: true
--            default: both
--    mozharness:
--        by-test-platform:
--            android.*:
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=crashtest
--            default:
--                script: desktop_unittest.py
--                chunked: false
--                no-read-buildbot-config: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --reftest-suite=crashtest
--    tier:
--        by-test-platform:
--            linux64-qr/.*: 1
--            default: default
--
--firefox-ui-functional-local:
--    description: "Firefox-ui-tests functional run"
--    suite: "firefox-ui/functional local"
--    treeherder-symbol: tc-Fxfn-l(en-US)
--    max-run-time: 5400
--    docker-image: {"in-tree": "desktop1604-test"}
--    mozharness:
--        script: firefox_ui_tests/functional.py
--        config:
--            by-test-platform:
--                windows.*:
--                    - firefox_ui_tests/taskcluster_windows.py
--                macosx.*:
--                    - firefox_ui_tests/taskcluster.py
--                    - firefox_ui_tests/taskcluster_mac.py
--                linux.*:
--                    - firefox_ui_tests/taskcluster.py
--                    - remove_executables.py
--        extra-options:
--            - "--tag"
--            - "local"
--
--firefox-ui-functional-remote:
--    description: "Firefox-ui-tests functional run"
--    suite: "firefox-ui/functional remote"
--    treeherder-symbol: tc-Fxfn-r(en-US)
--    max-run-time: 5400
--    tier: 2
--    docker-image: {"in-tree": "desktop1604-test"}
--    mozharness:
--        script: firefox_ui_tests/functional.py
--        config:
--            by-test-platform:
--                windows.*:
--                    - firefox_ui_tests/taskcluster_windows.py
--                macosx.*:
--                    - firefox_ui_tests/taskcluster.py
--                    - firefox_ui_tests/taskcluster_mac.py
--                linux.*:
--                    - firefox_ui_tests/taskcluster.py
--                    - remove_executables.py
--        extra-options:
--            - "--tag"
--            - "remote"
--
--geckoview:
--    description: "Geckoview run"
--    suite: geckoview
--    treeherder-symbol: tc(gv)
--    instance-size: xlarge
--    loopback-video: true
--    e10s: false
--    mozharness:
--        script: android_emulator_unittest.py
--        no-read-buildbot-config: true
--        config:
--            - android/androidarm_4_3.py
--        extra-options:
--            - --test-suite=geckoview
--
--gtest:
--    description: "GTests run"
--    suite: gtest
--    treeherder-symbol: tc(GTest)
--    e10s: false
--    instance-size: xlarge
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            linux64-qr/.*: ['mozilla-central', 'try']
--            windows.*-pgo/.*: [] # permafails on pgo
--            windows.*-nightly/.*: [] # permafails on nightly too
--            .*-devedition/.*: [] # don't run on devedition
--            default: built-projects
--    mozharness:
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --gtest-suite=gtest
--    tier:
--        by-test-platform:
--            windows7-32-pgo.*: 3
--            windows10-64-pgo.*: 3
--            default: default
--
--jittest:
--    description: "JIT Test run"
--    suite: jittest/jittest-chunked
--    treeherder-symbol: tc(Jit)
--    e10s: false
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            linux64-qr/.*: ['mozilla-central', 'try']
--            default: built-projects
--    chunks:
--        by-test-platform:
--            windows.*: 1
--            macosx.*: 1
--            default: 6
--    mozharness:
--        chunked:
--          by-test-platform:
--            windows.*: false
--            macosx.*: false
--            default: true
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --jittest-suite=jittest-chunked
--    when:
--        files-changed:
--            - js/src/**
--            - js/public/**
--
--jsreftest:
--    description: "JS Reftest run"
--    suite: reftest/jsreftest
--    treeherder-symbol: tc-R(J)
--    docker-image: {"in-tree": "desktop1604-test"}
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: default
--    chunks:
--        by-test-platform:
--            android-4.3-arm7-api-16/debug: 100
--            android.*: 40
--            windows.*: 2
--            linux64-ccov/.*: 5
--            linux64-qr/.*: 4
--            macosx.*: 2
--            default: 3
--    e10s:
--        by-test-platform:
--            android.*: false
--            macosx.*: true
--            default: both
--    max-run-time:
--        by-test-platform:
--            android.*: 7200
--            default: 3600
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    mozharness:
--        by-test-platform:
--            android.*:
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=jsreftest
--            default:
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --reftest-suite=jsreftest
--    when:
--        files-changed:
--            - js/src/**
--            - js/public/**
--    tier:
--        by-test-platform:
--            linux64-qr/.*: 1
--            windows10.*: 3
--            default: default
--
--marionette:
--    description: "Marionette unittest run"
--    suite: marionette
--    treeherder-symbol: tc(Mn)
--    max-run-time:
--        by-test-platform:
--            android.*: 3600
--            default: 5400
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: default
--    docker-image: {"in-tree": "desktop1604-test"}
--    tier:
--        by-test-platform:
--            android.*: 2
--            default: default
--    chunks:
--        by-test-platform:
--            android.*: 10
--            default: 1
--    e10s:
--        by-test-platform:
--            macosx.*: true
--            default: both
--    mozharness:
--        by-test-platform:
--            android.*:
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=marionette
--            default:
--                script: marionette.py
--                no-read-buildbot-config: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - marionette/windows_taskcluster_config.py
--                        macosx.*:
--                            - marionette/prod_config.py
--                            - marionette/mac_taskcluster_config.py
--                        default:
--                            - marionette/prod_config.py
--                            - remove_executables.py
--
--marionette-headless:
--    description: "Marionette headless unittest run"
--    suite: marionette
--    treeherder-symbol: tc(MnH)
--    max-run-time:
--        by-test-platform:
--            default: 5400
--    instance-size:
--        by-test-platform:
--            default: default
--    docker-image: {"in-tree": "desktop1604-test"}
--    tier:
--        by-test-platform:
--            default: default
--    chunks:
--        by-test-platform:
--            default: 1
--    e10s:
--        by-test-platform:
--            default: both
--    mozharness:
--        by-test-platform:
--            default:
--                script: marionette.py
--                no-read-buildbot-config: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - marionette/windows_taskcluster_config.py
--                        macosx.*:
--                            - marionette/prod_config.py
--                            - marionette/mac_taskcluster_config.py
--                        default:
--                            - marionette/prod_config.py
--                            - remove_executables.py
--                extra-options:
--                    by-test-platform:
--                        default:
--                            - --headless
--
--mochitest:
--    description: "Mochitest plain run"
--    suite:
--        by-test-platform:
--            linux64-jsdcov/opt: mochitest/plain-chunked-coverage
--            default: mochitest/plain-chunked
--    treeherder-symbol: tc-M()
--    loopback-video: true
--    docker-image: {"in-tree": "desktop1604-test"}
--    instance-size:
--        by-test-platform:
--            linux64-jsdcov/opt: xlarge
--            android.*: xlarge
--            default: legacy # Bug 1281241: migrating to m3.large instances
--    chunks:
--        by-test-platform:
--            android-4.3-arm7-api-16/debug: 48
--            android.*: 20
--            macosx.*: 5
--            windows.*: 5
--            linux32/debug: 16
--            linux.*: 10
--    e10s:
--        by-test-platform:
--            linux64-jsdcov/opt: false
--            android.*: false
--            macosx64/debug: true
--            default: both
--    max-run-time:
--        by-test-platform:
--            android-4.3-arm7-api-16/debug: 10800
--            linux64-jsdcov/opt: 10800
--            default: 5400
--    allow-software-gl-layers: false
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    mozharness:
--        by-test-platform:
--            android.*:
--                mochitest-flavor: plain
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=mochitest
--            default:
--                mochitest-flavor: plain
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    by-test-platform:
--                        linux64-jsdcov/opt:
--                            - --mochitest-suite=plain-chunked-coverage
--                        default:
--                            - --mochitest-suite=plain-chunked
--
--mochitest-a11y:
--    description: "Mochitest a11y run"
--    suite: mochitest/a11y
--    treeherder-symbol: tc-M(a11y)
--    loopback-video: true
--    e10s: false
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            linux64-qr/.*: ['mozilla-central', 'try']
--            default: built-projects
--    mozharness:
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        chunked: false
--        mochitest-flavor: a11y
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --mochitest-suite=a11y
--
--mochitest-browser-chrome:
--    description: "Mochitest browser-chrome run"
--    suite:
--        by-test-platform:
--            linux64-jsdcov/opt: mochitest/browser-chrome-coverage
--            default: mochitest/browser-chrome-chunked
--    treeherder-symbol: tc-M(bc)
--    loopback-video: true
--    docker-image: {"in-tree": "desktop1604-test"}
--    chunks:
--        by-test-platform:
--            linux64-jsdcov/opt: 35
--            linux64/debug: 16
--            linux32/debug: 16
--            linux64-asan/opt: 16
--            default: 7
--    e10s:
--        by-test-platform:
--            linux64-jsdcov/opt: false
--            macosx64/debug: true
--            default: both
--    max-run-time:
--        by-test-platform:
--            linux64-jsdcov/opt: 7200
--            linux64-ccov/opt: 7200
--            linux64/debug: 5400
--            linux32/debug: 5400
--            default: 3600
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    mozharness:
--        mochitest-flavor: browser
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        chunked: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            by-test-platform:
--                linux64-jsdcov/opt:
--                    - --mochitest-suite=browser-chrome-coverage
--                default:
--                    - --mochitest-suite=browser-chrome-chunked
--    # Bug 1281241: migrating to m3.large instances
--    instance-size:
--        by-test-platform:
--            linux64-jsdcov/opt: xlarge
--            default: legacy
--    allow-software-gl-layers: false
--
--mochitest-browser-screenshots:
--    description: "Mochitest Browser Screenshots"
--    suite: mochitest/browser-chrome-screenshots
--    treeherder-symbol: tc-M(ss)
--    loopback-video: true
--    run-on-projects:
--        by-test-platform:
--            windows.*/opt: ['mozilla-central', 'try']
--            linux64/opt: ['mozilla-central', 'try']
--            macosx.*/opt: ['mozilla-central', 'try']
--            windows7-32-devedition/opt: []
--            windows10-64-devedition/opt: []
--            default: []
--    e10s: both
--    max-run-time: 3600
--    mozharness:
--        mochitest-flavor: browser
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --mochitest-suite=browser-chrome-screenshots
--    instance-size: legacy
--    allow-software-gl-layers: false
--
--mochitest-chrome:
--    description: "Mochitest chrome run"
--    suite: mochitest/chrome
--    treeherder-symbol: tc-M(c)
--    loopback-video: true
--    virtualization: hardware
--    docker-image: {"in-tree": "desktop1604-test"}
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: default
--    chunks:
--        by-test-platform:
--            android-4.3-arm7-api-16/debug: 4
--            android.*: 2
--            default: 3
--    max-run-time: 3600
--    e10s: false
--    run-on-projects:
--        by-test-platform:
--            windows7-32.*: ['try']
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows7-32.*: buildbot-bridge/buildbot-bridge
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    mozharness:
--        by-test-platform:
--            android.*:
--                mochitest-flavor: chrome
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    by-test-platform:
--                        android-4.2-x86/opt:
--                            - android/androidx86.py
--                        default:
--                            - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=mochitest-chrome
--            default:
--                mochitest-flavor: chrome
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - remove_executables.py
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --mochitest-suite=chrome
--    tier:
--        by-test-platform:
--            windows7-32.*: 2
--            default: default
--
--mochitest-chrome-style:
--    description: "Mochitest chrome run for style system"
--    suite: mochitest/chrome-style
--    treeherder-symbol: tc-M(cs)
--    loopback-video: true
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    e10s: false
--    mozharness:
--        mochitest-flavor: chrome
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                default:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --mochitest-suite=chrome-style
--
--mochitest-clipboard:
--    description: "Mochitest clipboard run"
--    suite: mochitest/clipboard
--    treeherder-symbol: tc-M(cl)
--    loopback-video: true
--    virtualization: hardware
--    docker-image: {"in-tree": "desktop1604-test"}
--    instance-size: xlarge
--    e10s:
--      by-test-platform:
--        macosx64/debug: true
--        default: both
--    run-on-projects:
--        by-test-platform:
--            windows.*: ['try']
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows7-32.*: buildbot-bridge/buildbot-bridge
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    mozharness:
--        by-test-platform:
--            android.*:
--                mochitest-flavor: plain
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    # note that Android runs fewer suites than other platforms
--                    - --test-suite=mochitest-plain-clipboard
--            default:
--                mochitest-flavor: plain
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked: false
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - remove_executables.py
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --mochitest-suite=plain-clipboard,chrome-clipboard,browser-chrome-clipboard,jetpack-package-clipboard
--    tier:
--        by-test-platform:
--            windows7-32.*: 2
--            default: default
--
--mochitest-devtools-chrome:
--    description: "Mochitest devtools-chrome run"
--    suite:
--        by-test-platform:
--            linux64-jsdcov/opt: mochitest/mochitest-devtools-chrome-coverage
--            default: mochitest/mochitest-devtools-chrome-chunked
--    treeherder-symbol: tc-M(dt)
--    loopback-video: true
--    max-run-time: 5400
--    docker-image: {"in-tree": "desktop1604-test"}
--    chunks:
--        by-test-platform:
--            windows.*: 8
--            macosx.*: 8
--            default: 10
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    e10s:
--        by-test-platform:
--            linux64-jsdcov/opt: false
--            macosx64/debug: true
--            default: both
--    mozharness:
--        mochitest-flavor: chrome
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        chunked: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            by-test-platform:
--                linux64-jsdcov/opt:
--                    - --mochitest-suite=mochitest-devtools-chrome-coverage
--                default:
--                    - --mochitest-suite=mochitest-devtools-chrome-chunked
--    instance-size:
--        by-test-platform:
--            # Bug 1361476 - try xlarge on asan to see if it avoids OOM
--            linux64-asan/opt: xlarge
--            default: default
--    # Bug 1296086: high number of intermittents observed with software GL and large instances
--    allow-software-gl-layers: false
--
--mochitest-gpu:
--    description: "Mochitest GPU run"
--    suite: mochitest/gpu
--    treeherder-symbol: tc-M(gpu)
--    loopback-video: true
--    virtualization: virtual-with-gpu
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    e10s:
--        by-test-platform:
--            windows.*: both
--            android.*: false
--            macosx64/opt: both
--            default: true
--    mozharness:
--        by-test-platform:
--            android.*:
--                mochitest-flavor: plain
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    # note that Android runs fewer suites than other platforms
--                    - --test-suite=mochitest-plain-gpu
--            default:
--                mochitest-flavor: plain
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked: false
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --mochitest-suite=plain-gpu,chrome-gpu,browser-chrome-gpu
--    tier:
--        by-test-platform:
--            linux64-qr/.*: 1
--            default: default
--
--mochitest-jetpack:
--    description: "Mochitest jetpack run"
--    suite: mochitest/jetpack-package
--    treeherder-symbol: tc-M(JP)
--    loopback-video: true
--    e10s: false
--    max-run-time: 5400
--    docker-image: {"in-tree": "desktop1604-test"}
--    mozharness:
--        mochitest-flavor: jetpack-package
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        chunked: false
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --mochitest-suite=jetpack-package
--            - --mochitest-suite=jetpack-addon
--
--mochitest-media:
--    description: "Mochitest media run"
--    suite: mochitest/mochitest-media
--    treeherder-symbol: tc-M(mda)
--    max-run-time: 5400
--    loopback-video: true
--    docker-image: {"in-tree": "desktop1604-test"}
--    e10s:
--      by-test-platform:
--        macosx64/debug: true
--        default: both
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: large
--    chunks:
--        by-test-platform:
--            android.*: 3
--            macosx64.*: 1
--            windows8-64.*: 1
--            default: 3
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    mozharness:
--        by-test-platform:
--            android.*:
--                mochitest-flavor: plain
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=mochitest-media
--            default:
--                mochitest-flavor: plain
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked:
--                  by-test-platform:
--                    macosx64.*: false
--                    windows8-64.*: false
--                    default: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --mochitest-suite=mochitest-media
--    tier:
--        by-test-platform:
--            linux64-qr/.*: 1
--            default: default
--
--mochitest-style:
--    description: "Mochitest plain run for style system"
--    suite: mochitest/plain-style
--    treeherder-symbol: tc-M(s)
--    loopback-video: true
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    e10s:
--        by-test-platform:
--            # Bug 1384701: e10s-off parallel reftest runs crash on shutdown
--            linux64-stylo/debug: true
--            default: both
--    mozharness:
--        mochitest-flavor: plain
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                default:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --mochitest-suite=plain-style
--
--mochitest-valgrind:
--    description: "Mochitest plain Valgrind run"
--    suite: mochitest/valgrind-plain
--    treeherder-symbol: tc-M-V()
--    run-on-projects: []
--    loopback-video: true
--    chunks: 40
--    max-run-time: 14400
--    # We could re-enable e10s later.
--    # There's no intrinsic reason not to use it.
--    e10s: false
--    allow-software-gl-layers: false
--    mozharness:
--        mochitest-flavor: plain
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        chunked: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --mochitest-suite=valgrind-plain
--
--mochitest-webgl:
--    description: "Mochitest webgl run"
--    suite: mochitest/mochitest-gl
--    treeherder-symbol: tc-M(gl)
--    virtualization: virtual-with-gpu
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    chunks:
--        by-test-platform:
--            android.*: 10
--            default: 3
--    e10s:
--      by-test-platform:
--        windows7.*: true
--        macosx.*: true
--        default: both
--    loopback-video: true
--    max-run-time:
--        by-test-platform:
--            windows.*: 5400
--            android.*: 7200
--            default: 3600
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: default
--    # Bug 1296733: llvmpipe with mesa 9.2.1 lacks thread safety
--    allow-software-gl-layers: false
--    mozharness:
--        by-test-platform:
--            android.*:
--                mochitest-flavor: plain
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=mochitest-gl
--            default:
--                mochitest-flavor: plain
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --mochitest-suite=mochitest-gl
--    tier:
--        by-test-platform:
--            linux64-qr/.*: 1
--            default: default
--
--reftest:
--    description: "Reftest run"
--    suite: reftest/reftest
--    treeherder-symbol: tc-R(R)
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10.*: []
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: default
--    virtualization: virtual-with-gpu
--    chunks:
--        by-test-platform:
--            android-4.3-arm7-api-16/debug: 48
--            android.*: 16
--            macosx64.*/opt: 1
--            macosx64.*/debug: 2
--            windows8-64.*/opt: 1
--            windows8-64.*/debug: 2
--            default: 8
--    max-run-time:
--        by-test-platform:
--            android.*: 10800
--            default: 3600
--    e10s:
--        by-test-platform:
--            macosx.*: true
--            # Bug 1384701: e10s-off parallel reftest runs crash on shutdown
--            linux64-stylo/debug: true
--            default: both
--    mozharness:
--        by-test-platform:
--            android.*:
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    - android/androidarm_4_3.py
--                extra-options:
--                    - --test-suite=reftest
--            default:
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked:
--                  by-test-platform:
--                    macosx64/opt: false
--                    windows8-64.*/opt: false
--                    windows8-64.*/debug: true
--                    default: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --reftest-suite=reftest
--    tier:
--        by-test-platform:
--            linux64-qr/.*: 1
--            default: default
--
--reftest-gpu:
--    description: "Reftest GPU run"
--    suite: reftest/reftest-gpu
--    treeherder-symbol: tc-R(Rg)
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            windows10.*: []
--            windows8-64.*: []
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows7-32.*/debug: buildbot-bridge/buildbot-bridge
--            default: null
--    instance-size:
--        by-test-platform:
--            default: default
--    virtualization: virtual-with-gpu
--    max-run-time:
--        by-test-platform:
--            default: 3600
--    e10s: true
--    mozharness:
--        by-test-platform:
--            default:
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                chunked:
--                  by-test-platform:
--                    default: false
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    - --reftest-suite=reftest-gpu
--    tier: default
--
--reftest-no-accel:
--    description: "Reftest not accelerated run"
--    suite: reftest/reftest-no-accel
--    treeherder-symbol: tc-R(Ru)
--    virtualization: virtual-with-gpu
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            windows10.*: []
--            linux64-qr/.*: ['mozilla-central', 'try']
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    chunks:
--        by-test-platform:
--            macosx.*: 1
--            windows8-64.*/debug: 2
--            windows8-64.*/opt: 1
--            default: 8
--    mozharness:
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        chunked:
--            by-test-platform:
--                windows8-64.*/opt: true
--                macosx.*: false
--                default: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                linux.*:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --reftest-suite=reftest-no-accel
--
--reftest-stylo:
--    description: "Reftest run for Stylo"
--    suite: reftest/reftest-stylo
--    treeherder-symbol: tc-R(Rs)
--    docker-image: {"in-tree": "desktop1604-test"}
--    chunks: 8
--    run-on-projects:
--        by-test-platform:
--            .*-stylo/opt: [ 'autoland', 'mozilla-central', 'try' ] # not on mozilla-inbound
--            macosx64-stylo/debug: [ 'autoland', 'mozilla-central', 'try' ] # not on mozilla-inbound
--            linux64-stylo-sequential/opt: [ 'mozilla-central']
--            default: built-projects
--    e10s:
--        # run only e10s tests for this platform see bug 1343301
--        by-test-platform:
--            .*-stylo/opt:
--                # no opt on inbound see bug 1339604
--                by-project:
--                    autoland: true
--                    mozilla-central: true
--                    default: true
--            .*-stylo/debug:
--                by-project:
--                    autoland: true
--                    mozilla-central: true
--                    mozilla-inbound: true
--                    default: true
--            linux64-stylo-sequential/opt:
--                by-project:
--                    mozilla-central: true
--                    default: true
--            linux64-stylo-sequential/debug:
--                by-project:
--                    mozilla-central: true
--                    default: true
--    mozharness:
--        script: desktop_unittest.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - unittests/win_taskcluster_unittest.py
--                macosx.*:
--                    - unittests/mac_unittest.py
--                default:
--                    - unittests/linux_unittest.py
--                    - remove_executables.py
--        extra-options:
--            - --reftest-suite=reftest-stylo
--
--robocop:
--    description: "Robocop run"
--    suite: robocop
--    treeherder-symbol: tc-M(rc)
--    instance-size: xlarge
--    chunks:
--        by-test-platform:
--            # android-4.3-arm7-api-16/debug -- not run
--            android-4.3-arm7-api-16/opt: 4
--            android-4.3-arm7-api-16-gradle/opt: 4
--    loopback-video: true
--    e10s: false
--    mozharness:
--        script: android_emulator_unittest.py
--        no-read-buildbot-config: true
--        config:
--            - android/androidarm_4_3.py
--        extra-options:
--            - --test-suite=robocop
--
--talos-chrome:
--    description: "Talos chrome"
--    suite: talos
--    try-name: chromez
--    treeherder-symbol: tc-T(c)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=chromez
--            - --add-option
--            - --webServer,localhost
--
--talos-dromaeojs:
--    description: "Talos dromaeojs"
--    suite: talos
--    try-name: dromaeojs
--    treeherder-symbol: tc-T(d)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=dromaeojs
--            - --add-option
--            - --webServer,localhost
--
--talos-g1:
--    description: "Talos g1"
--    suite: talos
--    try-name: g1
--    treeherder-symbol: tc-T(g1)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 7200
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=g1
--            - --add-option
--            - --webServer,localhost
--
--talos-g2:
--    description: "Talos g2"
--    suite: talos
--    try-name: g2
--    treeherder-symbol: tc-T(g2)
--    virtualization: hardware
--    max-run-time: 7200
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=g2
--            - --add-option
--            - --webServer,localhost
--
--talos-g3:
--    description: "Talos g3"
--    suite: talos
--    try-name: g3
--    treeherder-symbol: tc-T(g3)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta','mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=g3
--            - --add-option
--            - --webServer,localhost
--
--talos-g4:
--    description: "Talos g4"
--    suite: talos
--    try-name: g4
--    treeherder-symbol: tc-T(g4)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta','mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=g4
--            - --add-option
--            - --webServer,localhost
--
--talos-g5:
--    description: "Talos g5"
--    suite: talos
--    try-name: g5
--    treeherder-symbol: tc-T(g5)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--                    - remove_executables.py
--        extra-options:
--            - --suite=g5
--            - --add-option
--            - --webServer,localhost
--
--talos-other:
--    description: "Talos other"
--    suite: talos
--    try-name: other
--    treeherder-symbol: tc-T(o)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta','mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=other
--            - --add-option
--            - --webServer,localhost
--
--talos-perf-reftest:
--    description: "Talos perf-reftest"
--    suite: talos
--    try-name: perf-reftest
--    treeherder-symbol: tc-T(p)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central', 'try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=perf-reftest
--
--talos-perf-reftest-singletons:
--    description: "Talos perf-reftest singletons"
--    suite: talos
--    try-name: perf-reftest-singletons
--    treeherder-symbol: tc-T(ps)
--    virtualization: hardware
--    run-on-projects: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=perf-reftest-singletons
--
--talos-quantum-pageload:
--    description: "Talos quantum-pageload"
--    suite: talos
--    try-name: quantum-pageload
--    treeherder-symbol: tc-T(tp6)
--    run-on-projects:
--        by-test-platform:
--            windows.*: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try', 'date']
--            default: []
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=quantum-pageload
--            - --add-option
--            - --webServer,localhost
--
--talos-quantum-pageload-stylo:
--    description: "Talos Quantum Pageload Stylo"
--    suite: talos
--    try-name: quantum-pageload-stylo
--    treeherder-symbol: tc-T(tp6s)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            windows.*: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: []
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=quantum-pageload-stylo
--
--talos-svgr:
--    description: "Talos svgr"
--    suite: talos
--    try-name: svgr
--    treeherder-symbol: tc-T(s)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=svgr
--            - --add-option
--            - --webServer,localhost
--
--talos-tp5o:
--    description: "Talos tp5o"
--    suite: talos
--    try-name: tp5o
--    treeherder-symbol: tc-T(tp)
--    virtualization: hardware
--    run-on-projects:
--        by-test-platform:
--            linux64-stylo-sequential/.*: ['mozilla-central','try']
--            linux64-stylo/.*: ['mozilla-central', 'try']
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try']
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=tp5o
--            - --add-option
--            - --webServer,localhost
--
--talos-xperf:
--    description: "Talos xperf"
--    suite: talos
--    try-name: xperf
--    treeherder-symbol: tc-T(x)
--    run-on-projects:
--        by-test-platform:
--            windows7-32.*: ['mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland', 'try', 'date']
--            default: []
--    max-run-time: 3600
--    e10s: true
--    mozharness:
--        script: talos_script.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                macosx.*:
--                    - talos/mac_config.py
--                windows.*:
--                    - talos/windows_config.py
--                default:
--                    - talos/linux_config.py
--        extra-options:
--            - --suite=xperf
--            - --add-option
--            - --webServer,localhost
--
--telemetry-tests-client:
--    description: "Telemetry tests client run"
--    suite: telemetry-tests-client
--    treeherder-symbol: tc-e10s
--    max-run-time: 5400
--    checkout: true
--    e10s: true
--    tier: 3
--    docker-image: {"in-tree": "desktop1604-test"}
--    mozharness:
--        script: telemetry/telemetry_client.py
--        config:
--            by-test-platform:
--                linux.*:
--                    - remove_executables.py
--                windows.*: []
--
--
--web-platform-tests:
--    description: "Web platform test run"
--    suite: web-platform-tests
--    treeherder-symbol: tc-W(wpt)
--    chunks:
--      by-test-platform:
--        macosx64/opt: 5
--        macosx64/debug: 10
--        default: 12
--    e10s:
--        by-test-platform:
--            macosx.*: true
--            default: both
--    max-run-time: 7200
--    instance-size: xlarge
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    mozharness:
--        script: web_platform_tests.py
--        no-read-buildbot-config: true
--        chunked: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - web_platform_tests/prod_config_windows_taskcluster.py
--                macosx.*:
--                    - web_platform_tests/prod_config.py
--                default:
--                    - web_platform_tests/prod_config.py
--                    - remove_executables.py
--        extra-options:
--            - --test-type=testharness
--
--web-platform-tests-reftests:
--    description: "Web platform reftest run"
--    suite: web-platform-tests-reftests
--    treeherder-symbol: tc-W(Wr)
--    max-run-time: 5400
--    instance-size: xlarge
--    chunks:
--      by-test-platform:
--        macosx.*: 1
--        windows.*: 1
--        default: 6
--    e10s:
--        by-test-platform:
--            macosx.*: true
--            default: both
--    docker-image: {"in-tree": "desktop1604-test"}
--    run-on-projects:
--        by-test-platform:
--            macosx64-stylo/.*: ['autoland', 'mozilla-central', 'try']
--            default: built-projects
--    mozharness:
--        script: web_platform_tests.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - web_platform_tests/prod_config_windows_taskcluster.py
--                macosx.*:
--                    - web_platform_tests/prod_config.py
--                default:
--                    - web_platform_tests/prod_config.py
--                    - remove_executables.py
--        extra-options:
--            - --test-type=reftest
--
--web-platform-tests-wdspec:
--    description: "Web platform webdriver-spec run"
--    suite: web-platform-tests-wdspec
--    treeherder-symbol: tc-W(Wd)
--    max-run-time: 5400
--    instance-size: xlarge
--    docker-image: {"in-tree": "desktop1604-test"}
--    mozharness:
--        script: web_platform_tests.py
--        no-read-buildbot-config: true
--        config:
--            by-test-platform:
--                windows.*:
--                    - web_platform_tests/prod_config_windows_taskcluster.py
--                macosx.*:
--                    - web_platform_tests/prod_config.py
--                default:
--                    - web_platform_tests/prod_config.py
--                    - remove_executables.py
--        extra-options:
--            - --test-type=wdspec
--
--xpcshell:
--    description: "xpcshell test run"
--    suite:
--        by-test-platform:
--            linux64-jsdcov/opt: xpcshell-coverage
--            default: xpcshell
--    treeherder-symbol: tc-X(X)
--    run-on-projects:
--        by-test-platform:
--            linux64-qr/.*: ['mozilla-central', 'try']
--            # Deactivate try on Buildbot, by default. We don't have enough machines
--            windows8-64.*: ['mozilla-release', 'mozilla-beta', 'mozilla-central', 'mozilla-inbound', 'autoland']
--            windows8-64-devedition/opt: built-projects  # No dev edition outside of supported branches
--            windows10-64-asan/opt: []  # No XPCShell on ASAN yet
--            default: built-projects
--    worker-type:
--        by-test-platform:
--            windows8-64.*: buildbot-bridge/buildbot-bridge
--            default: null
--    docker-image: {"in-tree": "desktop1604-test"}
--    tier:
--        by-test-platform:
--            windows10-64.*: 2
--            default: default
--    chunks:
--        by-test-platform:
--            linux64/debug: 10
--            android-4.2-x86/opt: 6
--            macosx.*: 1
--            windows.*: 1
--            default: 8
--    instance-size:
--        by-test-platform:
--            android.*: xlarge
--            default: legacy # Bug 1281241: migrating to m3.large instances
--    max-run-time: 5400
--    e10s: false
--    allow-software-gl-layers: false
--    mozharness:
--        by-test-platform:
--            android.*:
--                script: android_emulator_unittest.py
--                no-read-buildbot-config: true
--                extra-options:
--                    - --test-suite=xpcshell
--                config:
--                    by-test-platform:
--                        android-4.2-x86/opt:
--                            - android/androidx86.py
--                        default:
--                            - android/androidarm_4_3.py
--            default:
--                script: desktop_unittest.py
--                no-read-buildbot-config: true
--                config:
--                    by-test-platform:
--                        windows.*:
--                            - unittests/win_taskcluster_unittest.py
--                        macosx.*:
--                            - unittests/mac_unittest.py
--                        linux.*:
--                            - unittests/linux_unittest.py
--                            - remove_executables.py
--                extra-options:
--                    by-test-platform:
--                        linux64-jsdcov/opt:
--                            - --xpcshell-suite=xpcshell-coverage
--                        default:
--                            - --xpcshell-suite=xpcshell
--                requires-signed-builds:
--                    by-test-platform:
--                        windows10-64-asan/opt: false    # No XPCShell on ASAN yet
--                        windows.*: true
--                        default: false
-diff --git a/taskcluster/ci/toolchain/kind.yml b/taskcluster/ci/toolchain/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/toolchain/kind.yml
-+++ /dev/null
-@@ -1,16 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--transforms:
--   - taskgraph.transforms.try_job:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--jobs-from:
--   - linux.yml
--   - macosx.yml
--   - windows.yml
-diff --git a/taskcluster/ci/toolchain/linux.yml b/taskcluster/ci/toolchain/linux.yml
-deleted file mode 100644
---- a/taskcluster/ci/toolchain/linux.yml
-+++ /dev/null
-@@ -1,189 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--linux64-clang-3.9:
--    description: "Clang 3.9 toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(clang3.9)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-clang-3.9-linux.sh
--        resources:
--            - 'build/build-clang/build-clang.py'
--            - 'build/build-clang/clang-3.9-linux64.json'
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-alias: linux64-clang
--        toolchain-artifact: public/build/clang.tar.xz
--    toolchains:
--        - linux64-gcc
--
--linux64-clang-4:
--    description: "Clang 4 toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(clang4)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-clang-4-linux.sh
--        resources:
--            - 'build/build-clang/build-clang.py'
--            - 'build/build-clang/clang-4-linux64.json'
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/clang.tar.xz
--    toolchains:
--        - linux64-gcc-4.8
--
--linux64-clang-tidy:
--    description: "Clang-tidy build"
--    index:
--        product: static-analysis
--        job-name: linux64-clang-tidy
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(clang-tidy)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-clang-tidy-linux.sh
--        resources:
--            - 'build/clang-plugin/**'
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/clang-tidy.tar.xz
--    toolchains:
--        - linux64-gcc
--
--linux64-gcc-4.9:
--    description: "GCC 4.9 toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(gcc4.9)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-gcc-4.9-linux.sh
--        resources:
--            - 'build/unix/build-gcc/build-gcc.sh'
--        toolchain-alias: linux64-gcc
--        toolchain-artifact: public/build/gcc.tar.xz
--
--linux64-binutils:
--    description: "Binutils toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(binutil)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-binutils-linux.sh
--        resources:
--            - 'build/unix/build-binutils/**'
--        toolchain-artifact: public/build/binutils.tar.xz
--
--linux64-cctools-port:
--    description: "cctools-port toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(cctools)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-cctools-port.sh
--        resources:
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/cctools.tar.xz
--    toolchains:
--        - linux64-clang
--
--linux64-hfsplus:
--    description: "hfsplus toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(hfs+)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-hfsplus-linux.sh
--        resources:
--            - 'build/unix/build-hfsplus/**'
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/hfsplus-tools.tar.xz
--    toolchains:
--        - linux64-clang
--
--linux64-libdmg:
--    description: "libdmg-hfsplus toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(libdmg-hfs+)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--    run:
--        using: toolchain-script
--        script: build-libdmg-hfsplus.sh
--        toolchain-artifact: public/build/dmg.tar.xz
--
--linux64-sccache:
--    description: "sccache toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TL(sccache)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/clang.manifest"
--    run:
--        using: toolchain-script
--        script: build-sccache.sh
--        resources:
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/sccache2.tar.xz
--    toolchains:
--        - linux64-clang
-diff --git a/taskcluster/ci/toolchain/macosx.yml b/taskcluster/ci/toolchain/macosx.yml
-deleted file mode 100644
---- a/taskcluster/ci/toolchain/macosx.yml
-+++ /dev/null
-@@ -1,83 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--macosx64-clang:
--    description: "Clang toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TM(clang)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-clang.manifest"
--    run:
--        using: toolchain-script
--        script: build-clang-macosx.sh
--        tooltool-downloads: internal
--        resources:
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/clang.tar.xz
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-gcc
--
--macosx64-clang-tidy:
--    description: "Clang-tidy build"
--    index:
--        product: static-analysis
--        job-name: macosx64-clang-tidy
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TM(clang-tidy)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-clang.manifest"
--    run:
--        using: toolchain-script
--        script: build-clang-tidy-macosx.sh
--        tooltool-downloads: internal
--        resources:
--            - 'build/clang-plugin/**'
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/clang-tidy.tar.xz
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
--        - linux64-gcc
--
--macosx64-cctools-port:
--    description: "cctools-port toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TM(cctools)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-macosx64
--    worker:
--        docker-image: {in-tree: desktop-build}
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/macosx64/cross-clang.manifest"
--    run:
--        using: toolchain-script
--        script: build-cctools-port-macosx.sh
--        tooltool-downloads: internal
--        resources:
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/cctools.tar.bz2
--    toolchains:
--        - linux64-cctools-port
--        - linux64-clang
-diff --git a/taskcluster/ci/toolchain/windows.yml b/taskcluster/ci/toolchain/windows.yml
-deleted file mode 100644
---- a/taskcluster/ci/toolchain/windows.yml
-+++ /dev/null
-@@ -1,108 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--win32-clang-cl:
--    description: "Clang-cl toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TW32(clang-cl)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/build-clang-cl.manifest"
--    run:
--        using: toolchain-script
--        script: build-clang32-windows.sh
--        resources:
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/build-clang-windows-helper32.sh'
--        toolchain-artifact: public/build/clang.tar.bz2
--
--win64-clang-cl:
--    description: "Clang-cl toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TW64(clang-cl)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/build-clang-cl.manifest"
--    run:
--        using: toolchain-script
--        script: build-clang64-windows.sh
--        resources:
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/build-clang-windows-helper64.sh'
--        toolchain-artifact: public/build/clang.tar.bz2
--
--win32-clang-tidy:
--    description: "Clang-tidy toolchain build"
--    index:
--        product: static-analysis
--        job-name: win32-clang-tidy
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TW32(clang-tidy)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/build-clang-cl.manifest"
--    run:
--        using: toolchain-script
--        script: build-clang-tidy32-windows.sh
--        resources:
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/build-clang-windows-helper32.sh'
--        toolchain-artifact: public/build/clang-tidy.tar.bz2
--
--win64-clang-tidy:
--    description: "Clang-tidy toolchain build"
--    index:
--        product: static-analysis
--        job-name: win64-clang-tidy
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TW64(clang-tidy)
--        tier: 2
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win32/build-clang-cl.manifest"
--    run:
--        using: toolchain-script
--        script: build-clang-tidy64-windows.sh
--        resources:
--            - 'build/build-clang/**'
--            - 'taskcluster/scripts/misc/build-clang-windows-helper64.sh'
--        toolchain-artifact: public/build/clang-tidy.tar.bz2
--
--win64-sccache:
--    description: "sccache toolchain build"
--    treeherder:
--        kind: build
--        platform: toolchains/opt
--        symbol: TW64(sccache)
--        tier: 1
--    worker-type: aws-provisioner-v1/gecko-{level}-b-win2012
--    worker:
--        max-run-time: 36000
--        env:
--            TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/win64/sccache-build.manifest"
--    run:
--        using: toolchain-script
--        script: build-sccache.sh
--        resources:
--            - 'taskcluster/scripts/misc/tooltool-download.sh'
--        toolchain-artifact: public/build/sccache2.tar.bz2
-diff --git a/taskcluster/ci/upload-symbols/kind.yml b/taskcluster/ci/upload-symbols/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/upload-symbols/kind.yml
-+++ /dev/null
-@@ -1,47 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.single_dep:loader
--
--transforms:
--   - taskgraph.transforms.upload_symbols:transforms
--   - taskgraph.transforms.task:transforms
--
--kind-dependencies:
--    - build
--
--only-for-build-platforms:
--    - linux64/opt
--    - linux64/debug
--    - linux64-nightly/opt
--    - linux-nightly/opt
--    - android-aarch64-nightly/opt
--    - android-api-16/opt
--    - android-api-16-nightly/opt
--    - android-api-16-old-id-nightly/opt
--    - android-x86-nightly/opt
--    - android-x86-old-id-nightly/opt
--    - macosx64-nightly/opt
--    - win32-nightly/opt
--    - win64-nightly/opt
--
--job-template:
--   label: # see transforms
--   description: Upload Symbols
--   dependencies: # see transforms
--   expires-after: 7 days
--   deadline-after: 24 hours
--   worker-type: aws-provisioner-v1/gecko-symbol-upload
--   worker:
--       implementation: docker-worker
--       os: linux
--       max-run-time: 600
--       command: ["/bin/bash", "bin/upload.sh"]
--       docker-image: taskclusterprivate/upload_symbols:0.0.4
--       env:
--           GECKO_HEAD_REPOSITORY: # see transforms
--           GECKO_HEAD_REV: # see transforms
--           ARTIFACT_TASKID: {"task-reference": "<build>"}
--   scopes:
--       - docker-worker:image:taskclusterprivate/upload_symbols:0.0.4
-diff --git a/taskcluster/ci/valgrind/kind.yml b/taskcluster/ci/valgrind/kind.yml
-deleted file mode 100644
---- a/taskcluster/ci/valgrind/kind.yml
-+++ /dev/null
-@@ -1,47 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--loader: taskgraph.loader.transform:loader
--
--kind-dependencies:
--   - toolchain
--
--transforms:
--   - taskgraph.transforms.build_attrs:transforms
--   - taskgraph.transforms.toolchain:transforms
--   - taskgraph.transforms.job:transforms
--   - taskgraph.transforms.task:transforms
--
--jobs:
--    linux64-valgrind/opt:
--        description: "Linux64 Valgrind Opt"
--        index:
--            product: firefox
--            job-name: linux64-valgrind-opt
--        treeherder:
--            platform: linux64/opt
--            symbol: tc(V)
--            kind: build
--            tier: 1
--        worker-type: aws-provisioner-v1/gecko-{level}-b-linux
--        worker:
--            docker-image: {in-tree: desktop-build}
--            max-run-time: 72000
--            env:
--                TOOLTOOL_MANIFEST: "browser/config/tooltool-manifests/linux64/releng.manifest"
--        run:
--            using: mozharness
--            actions: [get-secrets build valgrind-test]
--            custom-build-variant-cfg: valgrind
--            config:
--                - builds/releng_base_linux_64_builds.py
--                - balrog/production.py
--            script: "mozharness/scripts/fx_desktop_build.py"
--            secrets: true
--            tooltool-downloads: public
--            need-xvfb: true
--        toolchains:
--            - linux64-clang
--            - linux64-gcc
--            - linux64-sccache
-diff --git a/taskcluster/docker/README.md b/taskcluster/docker/README.md
-deleted file mode 100644
---- a/taskcluster/docker/README.md
-+++ /dev/null
-@@ -1,158 +0,0 @@
--# Docker Images for use in TaskCluster
--
--This folder contains various docker images used in [taskcluster](http://docs.taskcluster.net/) as well as other misc docker images which may be useful for
--hacking on gecko.
--
--## Organization
--
--Each folder describes a single docker image.  We have two types of images that can be defined:
--
--1. [Task Images (build-on-push)](#task-images-build-on-push)
--2. [Docker Images (prebuilt)](#docker-registry-images-prebuilt)
--
--These images depend on one another, as described in the [`FROM`](https://docs.docker.com/v1.8/reference/builder/#from)
--line at the top of the Dockerfile in each folder.
--
--Images could either be an image intended for pushing to a docker registry, or one that is meant either
--for local testing or being built as an artifact when pushed to vcs.
--
--### Task Images (build-on-push)
--
--Images can be uploaded as a task artifact, [indexed](#task-image-index-namespace) under
--a given namespace, and used in other tasks by referencing the task ID.
--
--Important to note, these images do not require building and pushing to a docker registry, and are
--build per push (if necessary) and uploaded as task artifacts.
--
--The decision task that is run per push will [determine](#context-directory-hashing)
--if the image needs to be built based on the hash of the context directory and if the image
--exists under the namespace for a given branch.
--
--As an additional convenience, and a precaution to loading images per branch, if an image
--has been indexed with a given context hash for mozilla-central, any tasks requiring that image
--will use that indexed task.  This is to ensure there are not multiple images built/used
--that were built from the same context. In summary, if the image has been built for mozilla-central,
--pushes to any branch will use that already built image.
--
--To use within an in-tree task definition, the format is:
--
--```yaml
--image:
--  type: 'task-image'
--  path: 'public/image.tar.zst'
--  taskId: '{{#task_id_for_image}}builder{{/task_id_for_image}}'
--```
--
--##### Context Directory Hashing
--
--Decision tasks will calculate the sha256 hash of the contents of the image
--directory and will determine if the image already exists for a given branch and hash
--or if a new image must be built and indexed.
--
--Note: this is the contents of *only* the context directory, not the
--image contents.
--
--The decision task will:
--1. Recursively collect the paths of all files within the context directory
--2. Sort the filenames alphabetically to ensure the hash is consistently calculated
--3. Generate a sha256 hash of the contents of each file.
--4. All file hashes will then be combined with their path and used to update the hash
--of the context directory.
--
--This ensures that the hash is consistently calculated and path changes will result
--in different hashes being generated.
--
--##### Task Image Index Namespace
--
--Images that are built on push and uploaded as an artifact of a task will be indexed under the
--following namespaces.
--
--* docker.images.v2.level-{level}.{image_name}.latest
--* docker.images.v2.level-{level}.{image_name}.pushdate.{year}.{month}-{day}-{pushtime}
--* docker.images.v2.level-{level}.{image_name}.hash.{context_hash}
--
--Not only can images be browsed by the pushdate and context hash, but the 'latest' namespace
--is meant to view the latest built image.  This functions similarly to the 'latest' tag
--for docker images that are pushed to a registry.
--
--### Docker Registry Images (prebuilt)
--
--***Deprecation Warning: Use of prebuilt images should only be used for base images (those that other images
--will inherit from), or private images that must be stored in a private docker registry account.  Existing
--public images will be converted to images that are built on push and any newly added image should
--follow this pattern.***
--
--These are images that are intended to be pushed to a docker registry and used by specifying the
--folder name in task definitions.  This information is automatically populated by using the 'docker_image'
--convenience method in task definitions.
--
--Example:
--  image: {#docker_image}builder{/docker_image}
--
--Each image has a hash and a version, given by its `HASH` and `VERSION` files.
--When rebuilding a prebuilt image the `VERSION` should be bumped. Once a new
--version of the image has been built the `HASH` file should be updated with the
--hash of the image.
--
--The `HASH` file is the image hash as computed by docker, this is always on the
--format `sha256:<digest>`. In production images will be referenced by image hash.
--This mitigates attacks against the registry as well as simplifying validate of
--correctness. The `VERSION` file only serves to provide convenient names, such
--that old versions are easy to discover in the registry (and ensuring old
--versions aren't deleted by garbage-collection).
--
--This way, older tasks which were designed to run on an older version of the image
--can still be executed in taskcluster, while new tasks can use the new version.
--Further more, this mitigates attacks against the registry as docker will verify
--the image hash when loading the image.
--
--Each image also has a `REGISTRY`, defaulting to the `REGISTRY` in this directory,
--and specifying the image registry to which the completed image should be uploaded.
--
--## Building images
--
--Generally, images can be pulled from the [registry](./REGISTRY) rather than
--built locally, however, for developing new images it's often helpful to hack on
--them locally.
--
--To build an image, invoke `mach taskcluster-build-image` with the name of the
--folder (without a trailing slash):
--```sh
--./mach taskcluster-build-image <image-name>
--```
--
--This is a tiny wrapper around `docker build -t $REGISTRY/$FOLDER:$VERSION`.
--Once a new version image has been built and pushed to the remote registry using
--`docker push $REGISTRY/$FOLDER:$VERSION` the `HASH` file must be updated for the
--change to effect in production.
--
--Note: If no "VERSION" file present in the image directory, the tag 'latest' will be used and no
--registry will be defined. The image is only meant to run locally and will overwrite
--any existing image with the same name and tag.
--
--## Adding a new image
--
--The docker image primitives are very basic building block for
--constructing an "image" but generally don't help much with tagging it
--for deployment so we have a wrapper (./build.sh) which adds some sugar
--to help with tagging/versioning... Each folder should look something
--like this:
--
--```
--  - your_amazing_image/
--    - your_amazing_image/Dockerfile: Standard docker file syntax
--    - your_amazing_image/VERSION: The version of the docker file
--      (required* used during tagging)
--    - your_amazing_image/REGISTRY: Override default registry
--      (useful for secret registries)
--```
--
--## Conventions
--
--In some image folders you will see `.env` files these can be used in
--conjunction with the `--env-file` flag in docker to provide a
--environment with the given environment variables. These are primarily
--for convenience when manually hacking on the images.
--
--You will also see a `system-setup.sh` script used to build the image.
--Do not replicate this technique - prefer to include the commands and options directly in the Dockerfile.
-diff --git a/taskcluster/docker/REGISTRY b/taskcluster/docker/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--quay.io/mozilla
-diff --git a/taskcluster/docker/android-gradle-build/Dockerfile b/taskcluster/docker/android-gradle-build/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/Dockerfile
-+++ /dev/null
-@@ -1,99 +0,0 @@
--# TODO remove VOLUME below when the base image is updated next.
--FROM          taskcluster/centos6-build-upd:0.1.6.20160329195300
--MAINTAINER    Nick Alexander <nalexander@mozilla.com>
--
--# BEGIN ../desktop-build/Dockerfile
--
--# TODO remove when base image is updated
--VOLUME /home/worker/workspace
--VOLUME /home/worker/tooltool-cache
--
--# Add build scripts; these are the entry points from the taskcluster worker, and
--# operate on environment variables
--ADD             bin /home/worker/bin
--RUN             chmod +x /home/worker/bin/*
--
--# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
--# %include taskcluster/docker/recipes/xvfb.sh
--ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
--
--# Add configuration
--COPY            dot-config                    /home/worker/.config
--
--# Generate machine uuid file
--RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
--
--# Stubbed out credentials; mozharness looks for this file an issues a WARNING
--# if it's not found, which causes the build to fail.  Note that this needs to
--# be in the parent of the workspace directory and in the directory where
--# mozharness is run (not its --work-dir).  See Bug 1169652.
--ADD           oauth.txt /home/worker/
--
--# stubbed out buildprops, which keeps mozharness from choking
--# Note that this needs to be in the parent of the workspace directory and in
--# the directory where mozharness is run (not its --work-dir)
--ADD           buildprops.json /home/worker/
--
--# install tooltool directly from github where tooltool_wrapper.sh et al. expect
--# to find it
--RUN wget -O /builds/tooltool.py https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py
--RUN chmod +x /builds/tooltool.py
--
--# END ../desktop-build/Dockerfile
--
--# Reset user/workdir from parent image so we can install software.
--WORKDIR /
--USER root
--
--# Update base.
--RUN rpm --rebuilddb && yum upgrade -y
--
--# Install Sonatype Nexus.  Cribbed directly from
--# https://github.com/sonatype/docker-nexus/blob/fffd2c61b2368292040910c055cf690c8e76a272/oss/Dockerfile.
--
--# Install the screen package here to use with xvfb.
--# Move installation to base centos6-build image once Bug 1272629 is fixed
--# tzdata-java is symlinked from the JRE: see https://bugzilla.redhat.com/show_bug.cgi?id=1130800
--RUN rpm --rebuilddb && yum install -y \
--  createrepo \
--  curl \
--  screen \
--  sudo \
--  tar \
--  tzdata-java \
--  unzip \
--  wget \
--  zip \
--  && yum clean all
--
--ENV NEXUS_VERSION 2.12.0-01
--ENV NEXUS_SHA1SUM 1a9aaad8414baffe0a2fd46eed1f41b85f4049e6
--
--RUN mkdir -p /opt/sonatype/nexus
--
--WORKDIR /tmp
--RUN curl --fail --silent --location --retry 3 \
--    https://download.sonatype.com/nexus/oss/nexus-${NEXUS_VERSION}-bundle.tar.gz \
--    -o /tmp/nexus-${NEXUS_VERSION}-bundle.tar.gz
--
--# Observe the two spaces below.  Seriously.
--RUN echo "${NEXUS_SHA1SUM}  nexus-${NEXUS_VERSION}-bundle.tar.gz" > nexus-${NEXUS_VERSION}-bundle.tar.gz.sha1
--RUN sha1sum --check nexus-${NEXUS_VERSION}-bundle.tar.gz.sha1
--
--RUN tar zxf nexus-${NEXUS_VERSION}-bundle.tar.gz \
--  && mv /tmp/nexus-${NEXUS_VERSION}/* /opt/sonatype/nexus/ \
--  && rm -rf /tmp/nexus-${NEXUS_VERSION} \
--  && rm -rf /tmp/nexus-${NEXUS_VERSION}-bundle.tar.gz
--
--COPY nexus.xml /workspace/nexus/conf/nexus.xml
--
--# Install tooltool directly from github.
--RUN mkdir /build
--ADD https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py /build/tooltool.py
--RUN chmod +rx /build/tooltool.py
--
--# Back to the centos6-build workdir, matching desktop-build.
--WORKDIR /home/worker
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/android-gradle-build/README.md b/taskcluster/docker/android-gradle-build/README.md
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/README.md
-+++ /dev/null
-@@ -1,2 +0,0 @@
--This is a docker script for fetching Android Gradle dependenices for
--use in Mozilla's build clusters.
-diff --git a/taskcluster/docker/android-gradle-build/REGISTRY b/taskcluster/docker/android-gradle-build/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--taskcluster
-diff --git a/taskcluster/docker/android-gradle-build/VERSION b/taskcluster/docker/android-gradle-build/VERSION
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/VERSION
-+++ /dev/null
-@@ -1,1 +0,0 @@
--0.0.2
-diff --git a/taskcluster/docker/android-gradle-build/bin/after-checkout.sh b/taskcluster/docker/android-gradle-build/bin/after-checkout.sh
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/bin/after-checkout.sh
-+++ /dev/null
-@@ -1,16 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/workspace}
--
--set -v
--
--# Populate /home/worker/workspace/build/src/java_home.
--cp -R /workspace/java/usr/lib/jvm/java_home /home/worker/workspace/build/src
--
--export JAVA_HOME=/home/worker/workspace/build/src/java_home
--export PATH=$PATH:$JAVA_HOME/bin
--
--# Populate /home/worker/.mozbuild/android-sdk-linux.
--python2.7 /home/worker/workspace/build/src/python/mozboot/mozboot/android.py --artifact-mode --no-interactive
-diff --git a/taskcluster/docker/android-gradle-build/bin/after.sh b/taskcluster/docker/android-gradle-build/bin/after.sh
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/bin/after.sh
-+++ /dev/null
-@@ -1,59 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/workspace}
--: GRADLE_VERSION ${GRADLE_VERSION:=2.14.1}
--
--set -v
--
--# Package everything up.
--pushd ${WORKSPACE}
--
--cp -R /home/worker/.mozbuild/android-sdk-linux android-sdk-linux
--tar cJf android-sdk-linux.tar.xz android-sdk-linux
--
--# We can't redistribute the Android SDK publicly.
--mkdir -p /home/worker/private/android-sdk
--mv android-sdk-linux.tar.xz /home/worker/private/android-sdk
--
--cp -R /home/worker/workspace/build/src/java_home java_home
--tar cJf java_home.tar.xz java_home
--
--# We can't redistribute Java publicly.
--mkdir -p /home/worker/private/java_home
--mv java_home.tar.xz /home/worker/private/java_home
--
--cp -R /workspace/nexus/storage/jcenter jcenter
--tar cJf jcenter.tar.xz jcenter
--
--cp -R /workspace/nexus/storage/google google
--tar cJf google.tar.xz google
--
--# The Gradle wrapper will have downloaded and verified the hash of exactly one
--# Gradle distribution.  It will be located in $GRADLE_USER_HOME, like
--# ~/.gradle/wrapper/dists/gradle-2.7-all/$PROJECT_HASH/gradle-2.7-all.zip.  We
--# want to remove the version from the internal directory for use via tooltool in
--# a mozconfig.
--cp $GRADLE_USER_HOME/wrapper/dists/gradle-${GRADLE_VERSION}-all/*/gradle-${GRADLE_VERSION}-all.zip gradle-${GRADLE_VERSION}-all.zip
--unzip -q gradle-${GRADLE_VERSION}-all.zip
--mv gradle-${GRADLE_VERSION} gradle-dist
--tar cJf gradle-dist.tar.xz gradle-dist
--
--mkdir -p /home/worker/artifacts
--mv jcenter.tar.xz /home/worker/artifacts
--mv google.tar.xz /home/worker/artifacts
--mv gradle-dist.tar.xz /home/worker/artifacts
--popd
--
--# Bug 1245170: at some point in the future, we'll be able to upload
--# things directly to tooltool.
--# pushd /home/worker/artifacts
--# /build/tooltool.py add --visibility=public jcenter.tar.xz
--# /build/tooltool.py add --visibility=public google.tar.xz
--# /build/tooltool.py add --visibility=public gradle-dist.tar.xz
--# /build/tooltool.py add --visibility=internal android-sdk-linux.tar.xz
--# /build/tooltool.py add --visibility=internal java_home.tar.xz
--# /build/tooltool.py upload -v --url=http://relengapi/tooltool/ \
--#   --message="No message - Archives uploaded from taskcluster."
--# popd
-diff --git a/taskcluster/docker/android-gradle-build/bin/before.sh b/taskcluster/docker/android-gradle-build/bin/before.sh
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/bin/before.sh
-+++ /dev/null
-@@ -1,27 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/workspace}
--: GRADLE_VERSION ${GRADLE_VERSION:=2.7}
--
--set -v
--
--# Populate $WORKSPACE/java/urs/lib/jvm/java_home.  $topsrcdir hasn't
--# been checked out yet, so we can't put this directly into
--# $topsrcdir/java_home.
--. $HOME/bin/repackage-jdk-centos.sh
--
--export JAVA_HOME=$WORKSPACE/java/usr/lib/jvm/java_home
--export PATH=$PATH:$JAVA_HOME/bin
--
--# Frowned upon, but simplest.
--RUN_AS_USER=root NEXUS_WORK=${WORKSPACE}/nexus /opt/sonatype/nexus/bin/nexus restart
--
--# Wait "a while" for Nexus to actually start.  Don't fail if this fails.
--wget --quiet --retry-connrefused --waitretry=2 --tries=100 \
--  http://localhost:8081/nexus/service/local/status || true
--rm -rf status
--
--# Verify Nexus has actually started.  Fail if this fails.
--curl --fail --silent --location http://localhost:8081/nexus/service/local/status | grep '<state>STARTED</state>'
-diff --git a/taskcluster/docker/android-gradle-build/bin/build.sh b/taskcluster/docker/android-gradle-build/bin/build.sh
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/bin/build.sh
-+++ /dev/null
-@@ -1,31 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e -v
--
--# TODO: when bug 1093833 is solved and tasks can run as non-root, reduce this
--# to a simple fail-if-root check
--if [ $(id -u) = 0 ]; then
--    # each of the caches we have mounted are owned by root, so update that ownership
--    # to 'worker'
--    for cache in /home/worker/.tc-vcs /home/worker/workspace /home/worker/tooltool-cache; do
--        if [ -d $cache ]; then
--            # -R probably isn't necessary forever, but it fixes some poisoned
--            # caches for now
--            chown -R worker:worker $cache
--        fi
--    done
--
--    # ..then drop privileges by re-running this script
--    exec su worker /home/worker/bin/build.sh
--fi
--
--####
--# The default build works for any fx_desktop_build based mozharness job:
--# via linux-build.sh
--####
--
--. $HOME/bin/checkout-sources.sh
--
--. $HOME/bin/after-checkout.sh
--
--. $WORKSPACE/build/src/taskcluster/scripts/builder/build-linux.sh
-diff --git a/taskcluster/docker/android-gradle-build/bin/checkout-script.sh b/taskcluster/docker/android-gradle-build/bin/checkout-script.sh
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/bin/checkout-script.sh
-+++ /dev/null
-@@ -1,17 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--: GECKO_HEAD_REPOSITORY              ${GECKO_HEAD_REPOSITORY:=https://hg.mozilla.org/mozilla-central}
--: GECKO_HEAD_REV                ${GECKO_HEAD_REV:=default}
--
--: SCRIPT_DOWNLOAD_PATH          ${SCRIPT_DOWNLOAD_PATH:=$PWD}
--: SCRIPT_PATH                   ${SCRIPT_PATH:?"script path must be set"}
--set -v
--
--# download script from the gecko repository
--url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/${SCRIPT_PATH}
--wget --directory-prefix=${SCRIPT_DOWNLOAD_PATH} $url
--chmod +x `basename ${SCRIPT_PATH}`
-diff --git a/taskcluster/docker/android-gradle-build/bin/checkout-sources.sh b/taskcluster/docker/android-gradle-build/bin/checkout-sources.sh
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/bin/checkout-sources.sh
-+++ /dev/null
-@@ -1,55 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--# mozharness builds use three repositories: gecko (source), mozharness (build
--# scripts) and tools (miscellaneous) for each, specify *_REPOSITORY.  If the
--# revision is not in the standard repo for the codebase, specify *_BASE_REPO as
--# the canonical repo to clone and *_HEAD_REPO as the repo containing the
--# desired revision.  For Mercurial clones, only *_HEAD_REV is required; for Git
--# clones, specify the branch name to fetch as *_HEAD_REF and the desired sha1
--# as *_HEAD_REV.
--
--: GECKO_REPOSITORY              ${GECKO_REPOSITORY:=https://hg.mozilla.org/mozilla-central}
--: GECKO_BASE_REPOSITORY         ${GECKO_BASE_REPOSITORY:=${GECKO_REPOSITORY}}
--: GECKO_HEAD_REPOSITORY         ${GECKO_HEAD_REPOSITORY:=${GECKO_REPOSITORY}}
--: GECKO_HEAD_REV                ${GECKO_HEAD_REV:=default}
--: GECKO_HEAD_REF                ${GECKO_HEAD_REF:=${GECKO_HEAD_REV}}
--
--: TOOLS_REPOSITORY              ${TOOLS_REPOSITORY:=https://hg.mozilla.org/build/tools}
--: TOOLS_BASE_REPOSITORY         ${TOOLS_BASE_REPOSITORY:=${TOOLS_REPOSITORY}}
--: TOOLS_HEAD_REPOSITORY         ${TOOLS_HEAD_REPOSITORY:=${TOOLS_REPOSITORY}}
--: TOOLS_HEAD_REV                ${TOOLS_HEAD_REV:=default}
--: TOOLS_HEAD_REF                ${TOOLS_HEAD_REF:=${TOOLS_HEAD_REV}}
--: TOOLS_DISABLE                 ${TOOLS_DISABLE:=false}
--
--: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
--
--set -v
--
--# check out tools where mozharness expects it to be ($PWD/build/tools and $WORKSPACE/build/tools)
--if [ ! "$TOOLS_DISABLE" = true ]
--then
--    tc-vcs checkout $WORKSPACE/build/tools $TOOLS_BASE_REPOSITORY $TOOLS_HEAD_REPOSITORY $TOOLS_HEAD_REV $TOOLS_HEAD_REF
--
--    if [ ! -d build ]; then
--        mkdir -p build
--        ln -s $WORKSPACE/build/tools build/tools
--    fi
--fi
--
--# TODO - include tools repository in EXTRA_CHECKOUT_REPOSITORIES list
--for extra_repo in $EXTRA_CHECKOUT_REPOSITORIES; do
--    BASE_REPO="${extra_repo}_BASE_REPOSITORY"
--    HEAD_REPO="${extra_repo}_HEAD_REPOSITORY"
--    HEAD_REV="${extra_repo}_HEAD_REV"
--    HEAD_REF="${extra_repo}_HEAD_REF"
--    DEST_DIR="${extra_repo}_DEST_DIR"
--
--    tc-vcs checkout ${!DEST_DIR} ${!BASE_REPO} ${!HEAD_REPO} ${!HEAD_REV} ${!HEAD_REF}
--done
--
--export GECKO_DIR=$WORKSPACE/build/src
--tc-vcs checkout $GECKO_DIR $GECKO_BASE_REPOSITORY $GECKO_HEAD_REPOSITORY $GECKO_HEAD_REV $GECKO_HEAD_REF
-diff --git a/taskcluster/docker/android-gradle-build/bin/repackage-jdk-centos.sh b/taskcluster/docker/android-gradle-build/bin/repackage-jdk-centos.sh
-deleted file mode 100755
---- a/taskcluster/docker/android-gradle-build/bin/repackage-jdk-centos.sh
-+++ /dev/null
-@@ -1,53 +0,0 @@
--#! /bin/bash
--
--set -e -x
--
--: WORKSPACE ${WORKSPACE:=/workspace}
--
--set -v
--
--mkdir -p $WORKSPACE/java
--pushd $WORKSPACE/java
--
--# change these variables when updating java version
--mirror_url_base="http://mirror.centos.org/centos/6/os/x86_64/Packages"
--openjdk=java-1.8.0-openjdk-headless-1.8.0.121-1.b13.el6.x86_64.rpm
--openjdk_devel=java-1.8.0-openjdk-devel-1.8.0.121-1.b13.el6.x86_64.rpm
--jvm_openjdk_dir=java-1.8.0-openjdk-1.8.0.121-1.b13.el6.x86_64
--
--# grab the rpm and unpack it
--wget ${mirror_url_base}/${openjdk}
--wget ${mirror_url_base}/${openjdk_devel}
--rpm2cpio $openjdk | cpio -ivd
--rpm2cpio $openjdk_devel | cpio -ivd
--
--cd usr/lib/jvm
--mv $jvm_openjdk_dir java_home
--
--# cacerts is a relative symlink, which doesn't work when we repackage.  Make it
--# absolute.  We could use tar's --dereference option, but there's a subtle
--# difference between making the symlink absolute and using --dereference.
--# Making the symlink absolute lets the consuming system set the cacerts; using
--# --dereference takes the producing system's cacerts and sets them in stone.  We
--# prefer the flexibility of the former.
--rm java_home/jre/lib/security/cacerts
--ln -s /etc/pki/java/cacerts java_home/jre/lib/security/cacerts
--
--# tzdb.dat is an absolute symlink, which might not work when we
--# repackage.  Copy the underlying timezone database.  Copying the
--# target file from the toolchain producing system avoids requiring the
--# consuming system to have java-tzdb installed, which would bake in a
--# subtle dependency that has been addressed in modern versions of
--# CentOS.  See https://bugzilla.redhat.com/show_bug.cgi?id=1130800 for
--# discussion.
--rm java_home/jre/lib/tzdb.dat
--cp /usr/share/javazi-1.8/tzdb.dat java_home/jre/lib/tzdb.dat
--
--# document version this is based on
--echo "Built from ${mirror_url_Base}
--    ${openjdk}
--    ${openjdk_devel}
--
--Run through rpm2cpio | cpio, and /usr/lib/jvm/${jvm_openjdk_dir} renamed to 'java_home'." > java_home/VERSION
--
--popd
-diff --git a/taskcluster/docker/android-gradle-build/buildprops.json b/taskcluster/docker/android-gradle-build/buildprops.json
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/buildprops.json
-+++ /dev/null
-@@ -1,9 +0,0 @@
--{
--  "properties": {
--    "buildername": ""
--  },
--  "sourcestamp": {
--    "changes": []
--  },
--  "comments": "TaskCluster Job"
--}
-diff --git a/taskcluster/docker/android-gradle-build/dot-config/pip/pip.conf b/taskcluster/docker/android-gradle-build/dot-config/pip/pip.conf
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/dot-config/pip/pip.conf
-+++ /dev/null
-@@ -1,2 +0,0 @@
--[global]
--disable-pip-version-check = true
-diff --git a/taskcluster/docker/android-gradle-build/nexus.xml b/taskcluster/docker/android-gradle-build/nexus.xml
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/nexus.xml
-+++ /dev/null
-@@ -1,377 +0,0 @@
--<?xml version="1.0" encoding="UTF-8"?>
--<nexusConfiguration>
--  <version>2.8.0</version>
--  <nexusVersion>2.12.0-01</nexusVersion>
--  <globalConnectionSettings>
--    <connectionTimeout>20000</connectionTimeout>
--    <retrievalRetryCount>3</retrievalRetryCount>
--    <queryString></queryString>
--  </globalConnectionSettings>
--  <restApi>
--    <uiTimeout>60000</uiTimeout>
--  </restApi>
--  <httpProxy>
--    <enabled>true</enabled>
--    <port>8082</port>
--    <proxyPolicy>strict</proxyPolicy>
--  </httpProxy>
--  <routing>
--    <resolveLinks>true</resolveLinks>
--  </routing>
--  <repositories>
--    <repository>
--      <id>jcenter</id>
--      <name>jcenter</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheActive>true</notFoundCacheActive>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>READ_ONLY</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <remoteStorage>
--        <url>https://jcenter.bintray.com/</url>
--      </remoteStorage>
--      <externalConfiguration>
--        <repositoryPolicy>RELEASE</repositoryPolicy>
--        <checksumPolicy>STRICT</checksumPolicy>
--        <fileTypeValidation>true</fileTypeValidation>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <artifactMaxAge>-1</artifactMaxAge>
--        <metadataMaxAge>1440</metadataMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <autoBlockActive>true</autoBlockActive>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>google</id>
--      <name>google</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheActive>true</notFoundCacheActive>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>READ_ONLY</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <remoteStorage>
--        <url>https://maven.google.com/</url>
--      </remoteStorage>
--      <externalConfiguration>
--        <repositoryPolicy>RELEASE</repositoryPolicy>
--        <checksumPolicy>STRICT</checksumPolicy>
--        <fileTypeValidation>true</fileTypeValidation>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <artifactMaxAge>-1</artifactMaxAge>
--        <metadataMaxAge>1440</metadataMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <autoBlockActive>true</autoBlockActive>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>central</id>
--      <name>Central</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheActive>true</notFoundCacheActive>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>READ_ONLY</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <remoteStorage>
--        <url>https://repo1.maven.org/maven2/</url>
--      </remoteStorage>
--      <externalConfiguration>
--        <proxyMode>ALLOW</proxyMode>
--        <artifactMaxAge>-1</artifactMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <cleanseRepositoryMetadata>false</cleanseRepositoryMetadata>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <checksumPolicy>WARN</checksumPolicy>
--        <repositoryPolicy>RELEASE</repositoryPolicy>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>apache-snapshots</id>
--      <name>Apache Snapshots</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheActive>true</notFoundCacheActive>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>READ_ONLY</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <remoteStorage>
--        <url>https://repository.apache.org/snapshots/</url>
--      </remoteStorage>
--      <externalConfiguration>
--        <proxyMode>ALLOW</proxyMode>
--        <artifactMaxAge>1440</artifactMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <cleanseRepositoryMetadata>false</cleanseRepositoryMetadata>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <checksumPolicy>WARN</checksumPolicy>
--        <repositoryPolicy>SNAPSHOT</repositoryPolicy>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>releases</id>
--      <name>Releases</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>ALLOW_WRITE_ONCE</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <externalConfiguration>
--        <proxyMode>ALLOW</proxyMode>
--        <artifactMaxAge>-1</artifactMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <cleanseRepositoryMetadata>false</cleanseRepositoryMetadata>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <checksumPolicy>WARN</checksumPolicy>
--        <repositoryPolicy>RELEASE</repositoryPolicy>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>snapshots</id>
--      <name>Snapshots</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>ALLOW_WRITE</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <externalConfiguration>
--        <proxyMode>ALLOW</proxyMode>
--        <artifactMaxAge>1440</artifactMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <cleanseRepositoryMetadata>false</cleanseRepositoryMetadata>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <checksumPolicy>WARN</checksumPolicy>
--        <repositoryPolicy>SNAPSHOT</repositoryPolicy>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>thirdparty</id>
--      <name>3rd party</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.Repository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheTTL>1440</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>ALLOW_WRITE_ONCE</writePolicy>
--      <indexable>true</indexable>
--      <searchable>true</searchable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <externalConfiguration>
--        <proxyMode>ALLOW</proxyMode>
--        <artifactMaxAge>-1</artifactMaxAge>
--        <itemMaxAge>1440</itemMaxAge>
--        <cleanseRepositoryMetadata>false</cleanseRepositoryMetadata>
--        <downloadRemoteIndex>false</downloadRemoteIndex>
--        <checksumPolicy>WARN</checksumPolicy>
--        <repositoryPolicy>RELEASE</repositoryPolicy>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>central-m1</id>
--      <name>Central M1 shadow</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.ShadowRepository</providerRole>
--      <providerHint>m2-m1-shadow</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheTTL>15</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>READ_ONLY</writePolicy>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <externalConfiguration>
--        <masterRepositoryId>central</masterRepositoryId>
--        <syncAtStartup>false</syncAtStartup>
--      </externalConfiguration>
--    </repository>
--    <repository>
--      <id>public</id>
--      <name>Public Repositories</name>
--      <providerRole>org.sonatype.nexus.proxy.repository.GroupRepository</providerRole>
--      <providerHint>maven2</providerHint>
--      <localStatus>IN_SERVICE</localStatus>
--      <notFoundCacheTTL>15</notFoundCacheTTL>
--      <userManaged>true</userManaged>
--      <exposed>true</exposed>
--      <browseable>true</browseable>
--      <writePolicy>READ_ONLY</writePolicy>
--      <indexable>true</indexable>
--      <localStorage>
--        <provider>file</provider>
--      </localStorage>
--      <externalConfiguration>
--        <mergeMetadata>true</mergeMetadata>
--        <memberRepositories>
--          <memberRepository>releases</memberRepository>
--          <memberRepository>snapshots</memberRepository>
--          <memberRepository>thirdparty</memberRepository>
--          <memberRepository>central</memberRepository>
--        </memberRepositories>
--      </externalConfiguration>
--    </repository>
--  </repositories>
--  <repositoryGrouping>
--    <pathMappings>
--      <pathMapping>
--        <id>inhouse-stuff</id>
--        <groupId>*</groupId>
--        <routeType>inclusive</routeType>
--        <routePatterns>
--          <routePattern>^/(com|org)/somecompany/.*</routePattern>
--        </routePatterns>
--        <repositories>
--          <repository>snapshots</repository>
--          <repository>releases</repository>
--        </repositories>
--      </pathMapping>
--      <pathMapping>
--        <id>apache-stuff</id>
--        <groupId>*</groupId>
--        <routeType>exclusive</routeType>
--        <routePatterns>
--          <routePattern>^/org/some-oss/.*</routePattern>
--        </routePatterns>
--        <repositories>
--          <repository>releases</repository>
--          <repository>snapshots</repository>
--        </repositories>
--      </pathMapping>
--    </pathMappings>
--  </repositoryGrouping>
--  <repositoryTargets>
--    <repositoryTarget>
--      <id>1</id>
--      <name>All (Maven2)</name>
--      <contentClass>maven2</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>2</id>
--      <name>All (Maven1)</name>
--      <contentClass>maven1</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>3</id>
--      <name>All but sources (Maven2)</name>
--      <contentClass>maven2</contentClass>
--      <patterns>
--        <pattern>(?!.*-sources.*).*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>4</id>
--      <name>All Metadata (Maven2)</name>
--      <contentClass>maven2</contentClass>
--      <patterns>
--        <pattern>.*maven-metadata\.xml.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>any</id>
--      <name>All (Any Repository)</name>
--      <contentClass>any</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>site</id>
--      <name>All (site)</name>
--      <contentClass>site</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>npm</id>
--      <name>All (npm)</name>
--      <contentClass>npm</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>nuget</id>
--      <name>All (nuget)</name>
--      <contentClass>nuget</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--    <repositoryTarget>
--      <id>rubygems</id>
--      <name>All (rubygems)</name>
--      <contentClass>rubygems</contentClass>
--      <patterns>
--        <pattern>.*</pattern>
--      </patterns>
--    </repositoryTarget>
--  </repositoryTargets>
--  <smtpConfiguration>
--    <hostname>smtp-host</hostname>
--    <port>25</port>
--    <username>smtp-username</username>
--    <password>{jyU2gDFaNz8HQ4ybBAIdtJ6KL+YB08GXQs7vLPnia3o=}</password>
--    <systemEmailAddress>system@nexus.org</systemEmailAddress>
--  </smtpConfiguration>
--  <notification />
--</nexusConfiguration>
-diff --git a/taskcluster/docker/android-gradle-build/oauth.txt b/taskcluster/docker/android-gradle-build/oauth.txt
-deleted file mode 100644
---- a/taskcluster/docker/android-gradle-build/oauth.txt
-+++ /dev/null
-@@ -1,2 +0,0 @@
--taskcluster_clientId = None
--taskcluster_accessToken = None
-diff --git a/taskcluster/docker/beet-mover/Dockerfile b/taskcluster/docker/beet-mover/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/beet-mover/Dockerfile
-+++ /dev/null
-@@ -1,23 +0,0 @@
--FROM ubuntu:vivid
--
--RUN apt-get -q update \
--    && apt-get install --yes -q \
--    mercurial \
--    python-dev \
--    python-pip \
--    python-virtualenv \
--    libffi-dev \
--    liblzma-dev \
--    libssl-dev \
--    libyaml-dev \
--    libmysqlclient-dev \
--    clamav \
--    clamav-freshclam \
--    curl \
--    wget \
--    && apt-get clean
--
--COPY requirements.txt /tmp/
--RUN pip install -r /tmp/requirements.txt
--# Freshclam may be flaky, retry if it fails
--RUN for i in 1 2 3 4 5; do freshclam --verbose && break || sleep 15; done
-diff --git a/taskcluster/docker/beet-mover/requirements.txt b/taskcluster/docker/beet-mover/requirements.txt
-deleted file mode 100644
---- a/taskcluster/docker/beet-mover/requirements.txt
-+++ /dev/null
-@@ -1,2 +0,0 @@
--sh
--redo
-diff --git a/taskcluster/docker/centos6-build-upd/Dockerfile b/taskcluster/docker/centos6-build-upd/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build-upd/Dockerfile
-+++ /dev/null
-@@ -1,10 +0,0 @@
--FROM          taskcluster/centos6-build:0.1.7
--MAINTAINER    Dustin J. Mitchell <dustin@mozilla.com>
--
--### update to latest from upstream repositories
--# if this becomes a long list of packages, consider bumping the
--# centos6-build version
--RUN yum update -y
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/centos6-build-upd/HASH b/taskcluster/docker/centos6-build-upd/HASH
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build-upd/HASH
-+++ /dev/null
-@@ -1,1 +0,0 @@
--sha256:63f1b29d40b63f1f9232661190decb906200f9e8a9209eecbc7a9433344a9f96
-diff --git a/taskcluster/docker/centos6-build-upd/REGISTRY b/taskcluster/docker/centos6-build-upd/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build-upd/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--taskcluster
-diff --git a/taskcluster/docker/centos6-build-upd/VERSION b/taskcluster/docker/centos6-build-upd/VERSION
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build-upd/VERSION
-+++ /dev/null
-@@ -1,1 +0,0 @@
--0.1.7.20170801103900
-diff --git a/taskcluster/docker/centos6-build/Dockerfile b/taskcluster/docker/centos6-build/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build/Dockerfile
-+++ /dev/null
-@@ -1,35 +0,0 @@
--FROM          centos:6
--MAINTAINER    Dustin J. Mitchell <dustin@mozilla.com>
--
--### add worker user and setup its workspace
--RUN useradd -d /home/worker -s /bin/bash -m worker
--# Declare default working folder
--WORKDIR       /home/worker
--
--# This will create a host mounted filesystem when the cache is stripped
--# on Try. This cancels out some of the performance losses of aufs. See
--# bug 1291940.
--VOLUME /home/worker/workspace
--VOLUME /home/worker/tooltool-cache
--
--# %include build/valgrind/valgrind-epochs.patch
--ADD topsrcdir/build/valgrind/valgrind-epochs.patch /tmp/valgrind-epochs.patch
--
--# install non-build specific dependencies in a single layer
--ADD           system-setup.sh   /tmp/system-setup.sh
--RUN           bash /tmp/system-setup.sh
--
--# Builds need the share module enabled
--ADD           hgrc /home/worker/.hgrc
--RUN chown -R worker:worker /home/worker/.hgrc
--
--# Set variable normally configured at login, by the shells parent process, these
--# are taken from GNU su manual
--ENV           HOME          /home/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
--ENV           HOSTNAME      taskcluster-worker
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/centos6-build/HASH b/taskcluster/docker/centos6-build/HASH
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build/HASH
-+++ /dev/null
-@@ -1,1 +0,0 @@
--sha256:4142a0d594062c2d7221b61be14915c52e3776b96680780413afebc45f594654
-diff --git a/taskcluster/docker/centos6-build/REGISTRY b/taskcluster/docker/centos6-build/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--taskcluster
-diff --git a/taskcluster/docker/centos6-build/VERSION b/taskcluster/docker/centos6-build/VERSION
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build/VERSION
-+++ /dev/null
-@@ -1,1 +0,0 @@
--0.1.7
-diff --git a/taskcluster/docker/centos6-build/hgrc b/taskcluster/docker/centos6-build/hgrc
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build/hgrc
-+++ /dev/null
-@@ -1,2 +0,0 @@
--[extensions]
--share =
-diff --git a/taskcluster/docker/centos6-build/system-setup.sh b/taskcluster/docker/centos6-build/system-setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/centos6-build/system-setup.sh
-+++ /dev/null
-@@ -1,483 +0,0 @@
--#!/usr/bin/env bash
--
--set -ve
--
--test `whoami` == 'root'
--
--# lots of goodies in EPEL
--yum install -y epel-release
--
--# this sometimes fails, so we repeat it
--yum makecache || yum makecache
--
--yum shell -y <<'EOF'
--# This covers a bunch of requirements
--groupinstall Base
--
--install findutils
--install gawk
--install ppl
--install cpp
--install grep
--install gzip
--install sed
--install tar
--install util-linux
--install autoconf213
--install perl-Test-Simple
--install perl-Config-General
--
--# fonts required for PGO
--install xorg-x11-font*
--
--# lots of required packages that we build against.  We need the i686 and x86_64
--# versions of each, along with -devel packages, and yum does a poor job of
--# figuring out the interdependencies so we list all four.
--
--install alsa-lib-devel.i686
--install alsa-lib-devel.x86_64
--install alsa-lib.i686
--install alsa-lib.x86_64
--install atk-devel.i686
--install atk-devel.x86_64
--install atk.i686
--install atk.x86_64
--install cairo-devel.i686
--install cairo-devel.x86_64
--install cairo.i686
--install cairo.x86_64
--install check-devel.i686
--install check-devel.x86_64
--install check.i686
--install check.x86_64
--install dbus-glib-devel.i686
--install dbus-glib-devel.x86_64
--install dbus-glib.i686
--install dbus-glib.x86_64
--install fontconfig-devel.i686
--install fontconfig-devel.x86_64
--install fontconfig.i686
--install fontconfig.x86_64
--install freetype-devel.i686
--install freetype-devel.x86_64
--install freetype.i686
--install freetype.x86_64
--install GConf2-devel.i686
--install GConf2-devel.x86_64
--install GConf2.i686
--install GConf2.x86_64
--install gdk-pixbuf2-devel.i686
--install gdk-pixbuf2-devel.x86_64
--install glib2-devel.i686
--install glib2-devel.x86_64
--install glib2.i686
--install glib2.x86_64
--install glibc-devel.i686
--install glibc-devel.x86_64
--install glibc.i686
--install glibc.x86_64
--install gnome-vfs2-devel.i686
--install gnome-vfs2-devel.x86_64
--install gnome-vfs2.i686
--install gnome-vfs2.x86_64
--install gstreamer-devel.i686
--install gstreamer-devel.x86_64
--install gstreamer.i686
--install gstreamer-plugins-base-devel.i686
--install gstreamer-plugins-base-devel.x86_64
--install gstreamer-plugins-base.i686
--install gstreamer-plugins-base.x86_64
--install gstreamer.x86_64
--install gtk2-devel.i686
--install gtk2-devel.x86_64
--install gtk2.i686
--install gtk2.x86_64
--install libcurl-devel.i686
--install libcurl-devel.x86_64
--install libcurl.i686
--install libcurl.x86_64
--install libdrm-devel.i686
--install libdrm-devel.x86_64
--install libdrm.i686
--install libdrm.x86_64
--install libICE-devel.i686
--install libICE-devel.x86_64
--install libICE.i686
--install libICE.x86_64
--install libIDL-devel.i686
--install libIDL-devel.x86_64
--install libIDL.i686
--install libIDL.x86_64
--install libidn-devel.i686
--install libidn-devel.x86_64
--install libidn.i686
--install libidn.x86_64
--install libnotify-devel.i686
--install libnotify-devel.x86_64
--install libnotify.i686
--install libnotify.x86_64
--install libpng-devel.i686
--install libpng-devel.x86_64
--install libpng.i686
--install libpng.x86_64
--install libSM-devel.i686
--install libSM-devel.x86_64
--install libSM.i686
--install libSM.x86_64
--install libstdc++-devel.i686
--install libstdc++-devel.x86_64
--install libstdc++.i686
--install libstdc++.x86_64
--install libX11-devel.i686
--install libX11-devel.x86_64
--install libX11.i686
--install libX11.x86_64
--install libXau-devel.i686
--install libXau-devel.x86_64
--install libXau.i686
--install libXau.x86_64
--install libxcb-devel.i686
--install libxcb-devel.x86_64
--install libxcb.i686
--install libxcb.x86_64
--install libXcomposite-devel.i686
--install libXcomposite-devel.x86_64
--install libXcomposite.i686
--install libXcomposite.x86_64
--install libXcursor-devel.i686
--install libXcursor-devel.x86_64
--install libXcursor.i686
--install libXcursor.x86_64
--install libXdamage-devel.i686
--install libXdamage-devel.x86_64
--install libXdamage.i686
--install libXdamage.x86_64
--install libXdmcp-devel.i686
--install libXdmcp-devel.x86_64
--install libXdmcp.i686
--install libXdmcp.x86_64
--install libXext-devel.i686
--install libXext-devel.x86_64
--install libXext.i686
--install libXext.x86_64
--install libXfixes-devel.i686
--install libXfixes-devel.x86_64
--install libXfixes.i686
--install libXfixes.x86_64
--install libXft-devel.i686
--install libXft-devel.x86_64
--install libXft.i686
--install libXft.x86_64
--install libXi-devel.i686
--install libXi-devel.x86_64
--install libXi.i686
--install libXinerama-devel.i686
--install libXinerama-devel.x86_64
--install libXinerama.i686
--install libXinerama.x86_64
--install libXi.x86_64
--install libxml2-devel.i686
--install libxml2-devel.x86_64
--install libxml2.i686
--install libxml2.x86_64
--install libXrandr-devel.i686
--install libXrandr-devel.x86_64
--install libXrandr.i686
--install libXrandr.x86_64
--install libXrender-devel.i686
--install libXrender-devel.x86_64
--install libXrender.i686
--install libXrender.x86_64
--install libXt-devel.i686
--install libXt-devel.x86_64
--install libXt.i686
--install libXt.x86_64
--install libXxf86vm-devel.i686
--install libXxf86vm-devel.x86_64
--install libXxf86vm.i686
--install libXxf86vm.x86_64
--install mesa-libGL-devel.i686
--install mesa-libGL-devel.x86_64
--install mesa-libGL.i686
--install mesa-libGL.x86_64
--install ORBit2-devel.i686
--install ORBit2-devel.x86_64
--install ORBit2.i686
--install ORBit2.x86_64
--install pango-devel.i686
--install pango-devel.x86_64
--install pango.i686
--install pango.x86_64
--install pixman-devel.i686
--install pixman-devel.x86_64
--install pixman.i686
--install pixman.x86_64
--install pulseaudio-libs-devel.i686
--install pulseaudio-libs-devel.x86_64
--install pulseaudio-libs.i686
--install pulseaudio-libs.x86_64
--install wireless-tools-devel.i686
--install wireless-tools-devel.x86_64
--install wireless-tools.i686
--install wireless-tools.x86_64
--install zlib-devel.i686
--install zlib-devel.x86_64
--install zlib.i686
--install zlib.x86_64
--
--# x86_64 only packages
--install hal-devel.x86_64
--install hal.x86_64
--install perl-devel.x86_64
--install perl.x86_64
--install dbus-x11.x86_64
--
--# glibc-static has no -devel
--install glibc-static.i686
--install glibc-static.x86_64
--
--# dbus-devel comes in two architectures, although dbus does not
--install dbus-devel.i686
--install dbus-devel.x86_64
--install dbus.x86_64
--
--# required for the Python build, below
--install bzip2-devel
--install openssl-devel
--install xz-libs
--install sqlite-devel
--
--# required for the git build, below
--install autoconf
--install perl-devel
--install perl-ExtUtils-MakeMaker
--install gettext-devel
--
--# build utilities
--install ccache
--
--# a basic node environment so that we can run TaskCluster tools
--install nodejs
--install npm
--
--# enough X to run `make check` and do a PGO build
--install Xvfb
--install xvinfo
--
--# required for building OS X tools
--install patch
--install libuuid-devel
--install openssl-static
--install cmake
--install subversion
--run
--EOF
--
--BUILD=/root/build
--mkdir $BUILD
--
--# for the builds below, there's no sense using ccache
--export CCACHE_DISABLE=1
--
--cd $BUILD
--curl https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py > tooltool.py
--
--tooltool_fetch() {
--    cat >manifest.tt
--    python $BUILD/tooltool.py fetch
--    rm manifest.tt
--}
--
--# For a few packges, we want to run the very latest, which is hard to find for
--# stable old CentOS 6.  Custom yum repostiories are cumbersome and can cause
--# unhappy failures when they contain multiple versions of the same package.  So
--# we either build from source or install an RPM from tooltool (the former being
--# the preferred solution for transparency).  Each of these source files was
--# downloaded directly from the upstream project site, although the RPMs are of
--# unknown origin.
--
--cd $BUILD
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 830601,
--    "digest": "c04dadf29a3ac676e93cb684b619f753584f8414167135eb766602671d08c85d7bc564511310564bdf2651d72da911b017f0969b9a26d84df724aebf8733f268",
--    "algorithm": "sha512",
--    "filename": "yasm-1.1.0-1.x86_64.rpm"
--}
--]
--EOF
--yum install -y yasm-*.rpm
--
--# Valgrind
--# Install valgrind from sources to make sure we don't strip symbols
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 14723076,
--    "digest": "34e1013cd3815d30a459b86220e871bb0a6209cc9e87af968f347083693779f022e986f211bdf1a5184ad7370cde12ff2cfca8099967ff94732970bd04a97009",
--    "algorithm": "sha512",
--    "filename": "valgrind-3.13.0.tar.bz2"
--}
--]
--EOF
--
--valgrind_version=3.13.0
--tar -xjf valgrind-$valgrind_version.tar.bz2
--cd valgrind-$valgrind_version
--
--# This patch by Julian Seward allows us to write a suppression for
--# a leak in a library that gets unloaded before shutdown.
--# ref: https://bugs.kde.org/show_bug.cgi?id=79362
--patch -p0 < /tmp/valgrind-epochs.patch
--
--./configure --prefix=/usr
--make -j$(grep -c ^processor /proc/cpuinfo) install
--
--# Git
--cd $BUILD
--# NOTE: rc builds are in https://www.kernel.org/pub/software/scm/git/testing/
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 3938976,
--    "visibility": "public",
--    "digest": "f31cedb6d7c813d5cc9f40daa54ec6b34b046b8ec1b7a09a37598637f747449147a22736e95e4388d1a29fd01d7974b82342114b91d63b9d5df163ea3659fe72",
--    "algorithm": "sha512",
--    "filename": "git-2.8.0.rc3.tar.xz",
--    "unpack": true
--}
--]
--EOF
--cd git-2.8.0.rc3
--make configure
--./configure --prefix=/usr --without-tcltk
--make all install
--git config --global user.email "nobody@mozilla.com"
--git config --global user.name "mozilla"
--
--# Python
--cd $BUILD
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 12250696,
--    "digest": "67615a6defbcda062f15a09f9dd3b9441afd01a8cc3255e5bc45b925378a0ddc38d468b7701176f6cc153ec52a4f21671b433780d9bde343aa9b9c1b2ae29feb",
--    "algorithm": "sha512",
--    "filename": "Python-2.7.10.tar.xz",
--    "unpack": true
--}
--]
--EOF
--cd Python-2.7.10
--./configure --prefix=/usr
--make
--# `altinstall` means that /usr/bin/python still points to CentOS's Python 2.6 install.
--# If you want Python 2.7, use `python2.7`
--make altinstall
--
--# Enough python utilities to get "peep" working
--cd $BUILD
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 630700,
--    "digest": "1367f3a10c1fef2f8061e430585f1927f6bd7c416e764d65cea1f4255824d549efa77beef8ff784bbd62c307b4b1123502e7b3fd01a243c0cc5b433a841cc8b5",
--    "algorithm": "sha512",
--    "filename": "setuptools-18.1.tar.gz",
--    "unpack": true
--},
--{
--    "size": 1051205,
--    "digest": "e7d2e003ec60fce5a75a6a23711d7f9b155e898faebcf55f3abdd912ef513f4e0cf43daca8f9da7179a7a4efe6e4a625a532d051349818847df1364eb5b326de",
--    "algorithm": "sha512",
--    "filename": "pip-6.1.1.tar.gz",
--    "unpack": true
--},
--{
--    "size": 26912,
--    "digest": "9d730ed7852d4d217aaddda959cd5f871ef1b26dd6c513a3780bbb04a5a93a49d6b78e95c2274451a1311c10cc0a72755b269dc9af62640474e6e73a1abec370",
--    "algorithm": "sha512",
--    "filename": "peep-2.4.1.tar.gz",
--    "unpack": false
--}
--]
--EOF
--
--cd $BUILD
--cd setuptools-18.1
--python2.7 setup.py install
--# NOTE: latest peep is not compatible with pip>=7.0
--# https://github.com/erikrose/peep/pull/94
--
--cd $BUILD
--cd pip-6.1.1
--python2.7 setup.py install
--
--cd $BUILD
--pip2.7 install peep-2.4.1.tar.gz
--
--# Peep (latest)
--cd $BUILD
--pip2.7 install peep
--
--# remaining Python utilities are installed with `peep` from upstream
--# repositories; peep verifies file integrity for us
--cat >requirements.txt <<'EOF'
--# sha256: 90pZQ6kAXB6Je8-H9-ivfgDAb6l3e5rWkfafn6VKh9g
--virtualenv==13.1.2
--
--# sha256: wJnELXTi1SC2HdNyzZlrD6dgXAZheDT9exPHm5qaWzA
--mercurial==3.7.3
--EOF
--peep install -r requirements.txt
--
--# TC-VCS
--npm install -g taskcluster-vcs@2.3.18
--
--# Ninja
--cd $BUILD
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 174501,
--    "digest": "551a9e14b95c2d2ddad6bee0f939a45614cce86719748dc580192dd122f3671e3d95fd6a6fb3facb2d314ba100d61a004af4df77f59df119b1b95c6fe8c38875",
--    "algorithm": "sha512",
--    "filename": "ninja-1.6.0.tar.gz",
--    "unpack": true
--}
--]
--EOF
--cd ninja-1.6.0
--./configure.py --bootstrap
--cp ninja /usr/local/bin/ninja
--# Old versions of Cmake can only find ninja in this location!
--ln -s /usr/local/bin/ninja /usr/local/bin/ninja-build
--
--# note that TC will replace workspace with a cache mount; there's no sense
--# creating anything inside there
--mkdir -p /builds/worker/workspace
--chown worker:worker /builds/worker/workspace
--
--# /builds is *not* replaced with a mount in the docker container. The worker
--# user writes to lots of subdirectories, though, so it's owned by that user
--mkdir -p /builds
--chown worker:worker /builds
--
--# remove packages installed for the builds above
--yum shell -y <<'EOF'
--remove bzip2-devel
--remove openssl-devel
--remove xz-libs
--remove autoconf
--remove perl-ExtUtils-MakeMaker
--remove gettext-devel
--remove sqlite-devel
--remove perl-devel
--EOF
--
--# clean up caches from all that downloading and building
--cd /
--rm -rf $BUILD ~/.ccache ~/.cache ~/.npm
--yum clean all
--rm $0
-diff --git a/taskcluster/docker/decision/Dockerfile b/taskcluster/docker/decision/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/decision/Dockerfile
-+++ /dev/null
-@@ -1,28 +0,0 @@
--FROM          ubuntu:16.04
--MAINTAINER    Greg Arndt <garndt@mozilla.com>
--
--# Add worker user
--RUN useradd -d /home/worker -s /bin/bash -m worker
--RUN mkdir /home/worker/artifacts && chown worker:worker /home/worker/artifacts
--
--# %include python/mozbuild/mozbuild/action/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /tmp/tooltool.py
--
--# %include testing/mozharness/external_tools/robustcheckout.py
--ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
--
--# %include taskcluster/docker/recipes/install-mercurial.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /tmp/install-mercurial.sh
--
--ADD system-setup.sh /tmp/system-setup.sh
--RUN bash /tmp/system-setup.sh
--
--# %include taskcluster/docker/recipes/run-task
--ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
--
--ENV PATH /home/worker/bin:$PATH
--ENV SHELL /bin/bash
--ENV HOME /home/worker
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/decision/HASH b/taskcluster/docker/decision/HASH
-deleted file mode 100644
---- a/taskcluster/docker/decision/HASH
-+++ /dev/null
-@@ -1,1 +0,0 @@
--sha256:195d8439c8e90d59311d877bd2a8964849b2e43bfc6c234092618518d8b2891b
-diff --git a/taskcluster/docker/decision/README.md b/taskcluster/docker/decision/README.md
-deleted file mode 100644
---- a/taskcluster/docker/decision/README.md
-+++ /dev/null
-@@ -1,5 +0,0 @@
--# Decision Tasks
--
--The decision image is a "boostrapping" image for the in tree logic it
--deals with cloning gecko and the related utilities for providing an
--environment where we can run gecko.
-diff --git a/taskcluster/docker/decision/REGISTRY b/taskcluster/docker/decision/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/decision/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--taskcluster
-diff --git a/taskcluster/docker/decision/VERSION b/taskcluster/docker/decision/VERSION
-deleted file mode 100644
---- a/taskcluster/docker/decision/VERSION
-+++ /dev/null
-@@ -1,1 +0,0 @@
--0.1.8
-diff --git a/taskcluster/docker/decision/system-setup.sh b/taskcluster/docker/decision/system-setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/decision/system-setup.sh
-+++ /dev/null
-@@ -1,29 +0,0 @@
--#!/usr/bin/env bash
--
--set -v -e
--
--test `whoami` == 'root'
--
--apt-get update
--apt-get install -y --force-yes --no-install-recommends \
--    ca-certificates \
--    python \
--    sudo
--
--BUILD=/root/build
--mkdir $BUILD
--
--tooltool_fetch() {
--    cat >manifest.tt
--    python2.7 /tmp/tooltool.py fetch
--    rm manifest.tt
--}
--
--cd $BUILD
--. /tmp/install-mercurial.sh
--
--cd /
--rm -rf $BUILD
--apt-get clean
--apt-get autoclean
--rm $0
-diff --git a/taskcluster/docker/desktop-build/Dockerfile b/taskcluster/docker/desktop-build/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/Dockerfile
-+++ /dev/null
-@@ -1,77 +0,0 @@
--# TODO remove VOLUME below when the base image is updated next.
--FROM          taskcluster/centos6-build-upd:0.1.7.20170801103900
--MAINTAINER    Dustin J. Mitchell <dustin@mozilla.com>
--
--# TODO remove when base image is updated
--VOLUME /home/worker/workspace
--VOLUME /home/worker/tooltool-cache
--
--# Add build scripts; these are the entry points from the taskcluster worker, and
--# operate on environment variables
--ADD             bin /home/worker/bin
--RUN             chmod +x /home/worker/bin/*
--
--# %include python/mozbuild/mozbuild/action/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /builds/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
--
--# %include testing/mozharness/external_tools/robustcheckout.py
--ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
--
--# %include taskcluster/docker/recipes/common.sh
--ADD topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh
--
--# %include taskcluster/docker/recipes/install-mercurial.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh
--
--# %include taskcluster/docker/recipes/install-make.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-make.sh /setup/install-make.sh
--
--# %include taskcluster/docker/recipes/install-cmake.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-cmake.sh /setup/install-cmake.sh
--
--# %include taskcluster/docker/recipes/centos6-build-system-setup.sh
--ADD topsrcdir/taskcluster/docker/recipes/centos6-build-system-setup.sh /setup/system-setup.sh
--
--# %include taskcluster/docker/recipes/centos-install-debug-symbols.sh
--ADD topsrcdir/taskcluster/docker/recipes/centos-install-debug-symbols.sh /setup/install-debug-symbols.sh
--
--# TODO remove once base image doesn't install Mercurial
--RUN pip uninstall -y Mercurial
--
--RUN bash /setup/system-setup.sh
--
--# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
--# %include taskcluster/docker/recipes/xvfb.sh
--ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /builds/worker/scripts/xvfb.sh
--
--# %include taskcluster/docker/recipes/run-task
--ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
--
--# Add configuration
--COPY            dot-config                    /builds/worker/.config
--
--# Generate machine uuid file
--RUN dbus-uuidgen --ensure=/var/lib/dbus/machine-id
--
--# Stubbed out credentials; mozharness looks for this file an issues a WARNING
--# if it's not found, which causes the build to fail.  Note that this needs to
--# be in the parent of the workspace directory and in the directory where
--# mozharness is run (not its --work-dir).  See Bug 1169652.
--ADD           oauth.txt /builds/worker/
--
--# stubbed out buildprops, which keeps mozharness from choking
--# Note that this needs to be in the parent of the workspace directory and in
--# the directory where mozharness is run (not its --work-dir)
--ADD           buildprops.json /builds/worker/
--
--# Move installation to base centos6-build image once Bug 1272629 is fixed
--# Install the screen package here to use with xvfb.
--# Install bison to build binutils.
--RUN yum install -y bison screen
--
--# Install libtool.
--RUN yum install -y libtool
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/desktop-build/bin/build.sh b/taskcluster/docker/desktop-build/bin/build.sh
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/bin/build.sh
-+++ /dev/null
-@@ -1,36 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e -v
--
--# Relative path to in-tree script
--: JOB_SCRIPT                ${JOB_SCRIPT:=taskcluster/scripts/builder/build-linux.sh}
--
--script_args="${@}"
--
--# TODO: when bug 1093833 is solved and tasks can run as non-root, reduce this
--# to a simple fail-if-root check
--if [ $(id -u) = 0 ]; then
--    # each of the caches we have mounted are owned by root, so update that ownership
--    # to 'worker'
--    for cache in /builds/worker/.tc-vcs /builds/worker/workspace /builds/worker/tooltool-cache; do
--        if [ -d $cache ]; then
--            # -R probably isn't necessary forever, but it fixes some poisoned
--            # caches for now
--            chown -R worker:worker $cache
--        fi
--    done
--
--    # ..then drop privileges by re-running this script
--    exec su worker -c "/builds/worker/bin/build.sh $script_args"
--fi
--
--####
--# The default build works for any fx_desktop_build based mozharness job:
--# via build-linux.sh
--####
--
--. $HOME/bin/checkout-sources.sh
--
--script=$WORKSPACE/build/src/$JOB_SCRIPT
--chmod +x $script
--exec $script $script_args
-diff --git a/taskcluster/docker/desktop-build/bin/checkout-script.sh b/taskcluster/docker/desktop-build/bin/checkout-script.sh
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/bin/checkout-script.sh
-+++ /dev/null
-@@ -1,17 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--: GECKO_HEAD_REPOSITORY              ${GECKO_HEAD_REPOSITORY:=https://hg.mozilla.org/mozilla-central}
--: GECKO_HEAD_REV                ${GECKO_HEAD_REV:=default}
--
--: SCRIPT_DOWNLOAD_PATH          ${SCRIPT_DOWNLOAD_PATH:=$PWD}
--: SCRIPT_PATH                   ${SCRIPT_PATH:?"script path must be set"}
--set -v
--
--# download script from the gecko repository
--url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/${SCRIPT_PATH}
--wget --directory-prefix=${SCRIPT_DOWNLOAD_PATH} $url
--chmod +x `basename ${SCRIPT_PATH}`
-diff --git a/taskcluster/docker/desktop-build/bin/checkout-sources.sh b/taskcluster/docker/desktop-build/bin/checkout-sources.sh
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/bin/checkout-sources.sh
-+++ /dev/null
-@@ -1,55 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--# mozharness builds use two repositories: gecko (source)
--# and build-tools (miscellaneous) for each, specify *_REPOSITORY.  If the
--# revision is not in the standard repo for the codebase, specify *_BASE_REPO as
--# the canonical repo to clone and *_HEAD_REPO as the repo containing the
--# desired revision.  For Mercurial clones, only *_HEAD_REV is required; for Git
--# clones, specify the branch name to fetch as *_HEAD_REF and the desired sha1
--# as *_HEAD_REV.
--
--: GECKO_REPOSITORY              ${GECKO_REPOSITORY:=https://hg.mozilla.org/mozilla-central}
--: GECKO_BASE_REPOSITORY         ${GECKO_BASE_REPOSITORY:=${GECKO_REPOSITORY}}
--: GECKO_HEAD_REPOSITORY         ${GECKO_HEAD_REPOSITORY:=${GECKO_REPOSITORY}}
--: GECKO_HEAD_REV                ${GECKO_HEAD_REV:=default}
--: GECKO_HEAD_REF                ${GECKO_HEAD_REF:=${GECKO_HEAD_REV}}
--
--: TOOLS_REPOSITORY              ${TOOLS_REPOSITORY:=https://hg.mozilla.org/build/tools}
--: TOOLS_BASE_REPOSITORY         ${TOOLS_BASE_REPOSITORY:=${TOOLS_REPOSITORY}}
--: TOOLS_HEAD_REPOSITORY         ${TOOLS_HEAD_REPOSITORY:=${TOOLS_REPOSITORY}}
--: TOOLS_HEAD_REV                ${TOOLS_HEAD_REV:=default}
--: TOOLS_HEAD_REF                ${TOOLS_HEAD_REF:=${TOOLS_HEAD_REV}}
--: TOOLS_DISABLE                 ${TOOLS_DISABLE:=false}
--
--: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
--
--set -v
--
--# check out tools where mozharness expects it to be ($PWD/build/tools and $WORKSPACE/build/tools)
--if [ ! "$TOOLS_DISABLE" = true ]
--then
--    tc-vcs checkout $WORKSPACE/build/tools $TOOLS_BASE_REPOSITORY $TOOLS_HEAD_REPOSITORY $TOOLS_HEAD_REV $TOOLS_HEAD_REF
--
--    if [ ! -d build ]; then
--        mkdir -p build
--        ln -s $WORKSPACE/build/tools build/tools
--    fi
--fi
--
--# TODO - include tools repository in EXTRA_CHECKOUT_REPOSITORIES list
--for extra_repo in $EXTRA_CHECKOUT_REPOSITORIES; do
--    BASE_REPO="${extra_repo}_BASE_REPOSITORY"
--    HEAD_REPO="${extra_repo}_HEAD_REPOSITORY"
--    HEAD_REV="${extra_repo}_HEAD_REV"
--    HEAD_REF="${extra_repo}_HEAD_REF"
--    DEST_DIR="${extra_repo}_DEST_DIR"
--
--    tc-vcs checkout ${!DEST_DIR} ${!BASE_REPO} ${!HEAD_REPO} ${!HEAD_REV} ${!HEAD_REF}
--done
--
--export GECKO_DIR=$WORKSPACE/build/src
--tc-vcs checkout $GECKO_DIR $GECKO_BASE_REPOSITORY $GECKO_HEAD_REPOSITORY $GECKO_HEAD_REV $GECKO_HEAD_REF
-diff --git a/taskcluster/docker/desktop-build/buildprops.json b/taskcluster/docker/desktop-build/buildprops.json
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/buildprops.json
-+++ /dev/null
-@@ -1,9 +0,0 @@
--{
--  "properties": {
--    "buildername": ""
--  },
--  "sourcestamp": {
--    "changes": []
--  },
--  "comments": "TaskCluster Job"
--}
-diff --git a/taskcluster/docker/desktop-build/dot-config/pip/pip.conf b/taskcluster/docker/desktop-build/dot-config/pip/pip.conf
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/dot-config/pip/pip.conf
-+++ /dev/null
-@@ -1,2 +0,0 @@
--[global]
--disable-pip-version-check = true
-diff --git a/taskcluster/docker/desktop-build/oauth.txt b/taskcluster/docker/desktop-build/oauth.txt
-deleted file mode 100644
---- a/taskcluster/docker/desktop-build/oauth.txt
-+++ /dev/null
-@@ -1,2 +0,0 @@
--taskcluster_clientId = None
--taskcluster_accessToken = None
-diff --git a/taskcluster/docker/desktop-test/Dockerfile b/taskcluster/docker/desktop-test/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/Dockerfile
-+++ /dev/null
-@@ -1,108 +0,0 @@
--FROM          ubuntu:12.04
--MAINTAINER    Jonas Finnemann Jensen <jopsen@gmail.com>
--
--RUN useradd -d /home/worker -s /bin/bash -m worker
--WORKDIR /home/worker
--
--# %include python/mozbuild/mozbuild/action/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
--
--# %include testing/mozharness/external_tools/robustcheckout.py
--ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
--
--# %include taskcluster/docker/recipes/install-mercurial.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /tmp/install-mercurial.sh
--
--# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
--# %include taskcluster/docker/recipes/xvfb.sh
--ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
--
--# Add the tooltool manifest containing the minidump_stackwalk binary.
--# %include testing/config/tooltool-manifests/linux64/releng.manifest
--ADD topsrcdir/testing/config/tooltool-manifests/linux64/releng.manifest /tmp/minidump_stackwalk.manifest
--
--# %include taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh
--ADD topsrcdir/taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh /setup/system-setup.sh
--RUN bash /setup/system-setup.sh
--
--# %include taskcluster/docker/recipes/run-task
--ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
--
--# %include taskcluster/scripts/tester/test-linux.sh
--ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /home/worker/bin/test-linux.sh
--
--# This will create a host mounted filesystem when the cache is stripped
--# on Try. This cancels out some of the performance losses of aufs. See
--# bug 1291940.
--VOLUME /home/worker/checkouts
--VOLUME /home/worker/workspace
--
--# Set variable normally configured at login, by the shells parent process, these
--# are taken from GNU su manual
--ENV           HOME          /home/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
--ENV           HOSTNAME      taskcluster-worker
--ENV           LANG          en_US.UTF-8
--ENV           LC_ALL        en_US.UTF-8
--
--# Add utilities and configuration
--COPY           dot-files/config              /home/worker/.config
--COPY           dot-files/pulse               /home/worker/.pulse
--RUN            chmod +x bin/*
--# TODO: remove this when buildbot is gone
--COPY           buildprops.json               /home/worker/buildprops.json
--COPY           tc-vcs-config.yml /etc/taskcluster-vcs.yml
--
--# TODO: remove
--ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
--RUN chmod u+x /home/worker/bin/buildbot_step
--
--# allow the worker user to access video devices
--RUN usermod -a -G video worker
--
--RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
--
--# install tc-vcs and tc-npm-cache
--RUN npm install -g taskcluster-vcs@2.3.12 \
-- && npm install -g taskcluster-npm-cache@1.1.14 \
-- && rm -rf ~/.npm
--ENV PATH $PATH:/home/worker/bin
--
--# TODO Re-enable worker when bug 1093833 lands
--#USER          worker
--
--# clean up
--RUN rm -Rf .cache && mkdir -p .cache
--
--# Disable Ubuntu update prompt
--# http://askubuntu.com/questions/515161/ubuntu-12-04-disable-release-notification-of-14-04-in-update-manager
--ADD release-upgrades /etc/update-manager/release-upgrades
--
--# Disable tools with on-login popups that interfere with tests; see bug 1240084 and bug 984944.
--ADD jockey-gtk.desktop deja-dup-monitor.desktop /etc/xdg/autostart/
--
--# In test.sh we accept START_VNC to start a vnc daemon.
--# Exposing this port allows it to work.
--EXPOSE 5900
--
--# This helps not forgetting setting DISPLAY=:0 when running
--# tests outside of test.sh
--ENV DISPLAY :0
--
--# Disable apport (Ubuntu app crash reporter) to avoid stealing focus from test runs
--ADD apport /etc/default/apport
--
--# Disable font antialiasing for now to match releng's setup
--ADD fonts.conf /home/worker/.fonts.conf
--
--# Set up first-run experience for interactive mode
--ADD motd /etc/taskcluster-motd
--ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell
--RUN chmod +x /bin/taskcluster-interactive-shell
--
--RUN chown -R worker:worker /home/worker
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/desktop-test/apport b/taskcluster/docker/desktop-test/apport
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/apport
-+++ /dev/null
-@@ -1,1 +0,0 @@
--enabled=0
-diff --git a/taskcluster/docker/desktop-test/buildprops.json b/taskcluster/docker/desktop-test/buildprops.json
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/buildprops.json
-+++ /dev/null
-@@ -1,8 +0,0 @@
--{
--  "properties": {
--    "buildername": ""
--  },
--  "sourcestamp": {
--    "changes": []
--  }
--}
-diff --git a/taskcluster/docker/desktop-test/deja-dup-monitor.desktop b/taskcluster/docker/desktop-test/deja-dup-monitor.desktop
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/deja-dup-monitor.desktop
-+++ /dev/null
-@@ -1,19 +0,0 @@
--[Desktop Entry]
--Version=1.0
--X-Ubuntu-Gettext-Domain=deja-dup
--
--Name=Backup Monitor
--Comment=Schedules backups at regular intervals
--
--Icon=deja-dup
--TryExec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor
--Exec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor
--
--# Bug 984944/1240084 - It prevents taking screenshots
--X-GNOME-Autostart-Delay=false
--
--StartupNotify=false
--NoDisplay=true
--
--Type=Application
--Categories=System;Utility;Archiving;
-diff --git a/taskcluster/docker/desktop-test/dot-files/config/pip/pip.conf b/taskcluster/docker/desktop-test/dot-files/config/pip/pip.conf
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/dot-files/config/pip/pip.conf
-+++ /dev/null
-@@ -1,2 +0,0 @@
--[global]
--disable-pip-version-check = true
-diff --git a/taskcluster/docker/desktop-test/dot-files/config/user-dirs.dirs b/taskcluster/docker/desktop-test/dot-files/config/user-dirs.dirs
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/dot-files/config/user-dirs.dirs
-+++ /dev/null
-@@ -1,15 +0,0 @@
--# This file is written by xdg-user-dirs-update
--# If you want to change or add directories, just edit the line you're
--# interested in. All local changes will be retained on the next run
--# Format is XDG_xxx_DIR="$HOME/yyy", where yyy is a shell-escaped
--# homedir-relative path, or XDG_xxx_DIR="/yyy", where /yyy is an
--# absolute path. No other format is supported.
--
--XDG_DESKTOP_DIR="$HOME/Desktop"
--XDG_DOWNLOAD_DIR="$HOME/Downloads"
--XDG_TEMPLATES_DIR="$HOME/Templates"
--XDG_PUBLICSHARE_DIR="$HOME/Public"
--XDG_DOCUMENTS_DIR="$HOME/Documents"
--XDG_MUSIC_DIR="$HOME/Music"
--XDG_PICTURES_DIR="$HOME/Pictures"
--XDG_VIDEOS_DIR="$HOME/Videos"
-diff --git a/taskcluster/docker/desktop-test/dot-files/config/user-dirs.locale b/taskcluster/docker/desktop-test/dot-files/config/user-dirs.locale
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/dot-files/config/user-dirs.locale
-+++ /dev/null
-@@ -1,1 +0,0 @@
--en_US
-diff --git a/taskcluster/docker/desktop-test/dot-files/pulse/default.pa b/taskcluster/docker/desktop-test/dot-files/pulse/default.pa
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/dot-files/pulse/default.pa
-+++ /dev/null
-@@ -1,164 +0,0 @@
--#!/usr/bin/pulseaudio -nF
--#
--# This file is part of PulseAudio.
--#
--# PulseAudio is free software; you can redistribute it and/or modify it
--# under the terms of the GNU Lesser General Public License as published by
--# the Free Software Foundation; either version 2 of the License, or
--# (at your option) any later version.
--#
--# PulseAudio is distributed in the hope that it will be useful, but
--# WITHOUT ANY WARRANTY; without even the implied warranty of
--# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
--# General Public License for more details.
--#
--# You should have received a copy of the GNU Lesser General Public License
--# along with PulseAudio; if not, write to the Free Software Foundation,
--# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
--
--# This startup script is used only if PulseAudio is started per-user
--# (i.e. not in system mode)
--
--.nofail
--
--### Load something into the sample cache
--#load-sample-lazy x11-bell /usr/share/sounds/gtk-events/activate.wav
--#load-sample-lazy pulse-hotplug /usr/share/sounds/startup3.wav
--#load-sample-lazy pulse-coldplug /usr/share/sounds/startup3.wav
--#load-sample-lazy pulse-access /usr/share/sounds/generic.wav
--
--.fail
--
--### Automatically restore the volume of streams and devices
--load-module module-device-restore
--load-module module-stream-restore
--load-module module-card-restore
--
--### Automatically augment property information from .desktop files
--### stored in /usr/share/application
--load-module module-augment-properties
--
--### Load audio drivers statically
--### (it's probably better to not load these drivers manually, but instead
--### use module-udev-detect -- see below -- for doing this automatically)
--#load-module module-alsa-sink
--#load-module module-alsa-source device=hw:1,0
--#load-module module-oss device="/dev/dsp" sink_name=output source_name=input
--#load-module module-oss-mmap device="/dev/dsp" sink_name=output source_name=input
--#load-module module-null-sink
--#load-module module-pipe-sink
--
--### Automatically load driver modules depending on the hardware available
--.ifexists module-udev-detect.so
--load-module module-udev-detect
--.else
--### Use the static hardware detection module (for systems that lack udev/hal support)
--load-module module-detect
--.endif
--
--### Automatically connect sink and source if JACK server is present
--.ifexists module-jackdbus-detect.so
--.nofail
--load-module module-jackdbus-detect
--.fail
--.endif
--
--### Automatically load driver modules for Bluetooth hardware
--# This module causes a pulseaudio startup failure on "gecko-tester"
--#.ifexists module-bluetooth-discover.so
--#load-module module-bluetooth-discover
--#.endif
--
--### Load several protocols
--.ifexists module-esound-protocol-unix.so
--load-module module-esound-protocol-unix
--.endif
--load-module module-native-protocol-unix
--
--### Network access (may be configured with paprefs, so leave this commented
--### here if you plan to use paprefs)
--#load-module module-esound-protocol-tcp
--#load-module module-native-protocol-tcp
--#load-module module-zeroconf-publish
--
--### Load the RTP receiver module (also configured via paprefs, see above)
--#load-module module-rtp-recv
--
--### Load the RTP sender module (also configured via paprefs, see above)
--#load-module module-null-sink sink_name=rtp format=s16be channels=2 rate=44100 sink_properties="device.description='RTP Multicast Sink'"
--#load-module module-rtp-send source=rtp.monitor
--
--### Load additional modules from GConf settings. This can be configured with the paprefs tool.
--### Please keep in mind that the modules configured by paprefs might conflict with manually
--### loaded modules.
--.ifexists module-gconf.so
--.nofail
--load-module module-gconf
--.fail
--.endif
--
--### Automatically restore the default sink/source when changed by the user
--### during runtime
--### NOTE: This should be loaded as early as possible so that subsequent modules
--### that look up the default sink/source get the right value
--load-module module-default-device-restore
--
--### Automatically move streams to the default sink if the sink they are
--### connected to dies, similar for sources
--load-module module-rescue-streams
--
--### Make sure we always have a sink around, even if it is a null sink.
--load-module module-always-sink
--
--### Honour intended role device property
--load-module module-intended-roles
--
--### Automatically suspend sinks/sources that become idle for too long
--load-module module-suspend-on-idle
--
--### If autoexit on idle is enabled we want to make sure we only quit
--### when no local session needs us anymore.
--# This module causes a pulseaudio startup failure on "gecko-tester"
--#.ifexists module-console-kit.so
--#load-module module-console-kit
--#.endif
--
--### Enable positioned event sounds
--load-module module-position-event-sounds
--
--### Cork music streams when a phone stream is active
--#load-module module-cork-music-on-phone
--
--### Modules to allow autoloading of filters (such as echo cancellation)
--### on demand. module-filter-heuristics tries to determine what filters
--### make sense, and module-filter-apply does the heavy-lifting of
--### loading modules and rerouting streams.
--load-module module-filter-heuristics
--load-module module-filter-apply
--
--### Load DBus protocol
--#.ifexists module-dbus-protocol.so
--#load-module module-dbus-protocol
--#.endif
--
--# X11 modules should not be started from default.pa so that one daemon
--# can be shared by multiple sessions.
--
--### Load X11 bell module
--#load-module module-x11-bell sample=bell-windowing-system
--
--### Register ourselves in the X11 session manager
--#load-module module-x11-xsmp
--
--### Publish connection data in the X11 root window
--#.ifexists module-x11-publish.so
--#.nofail
--#load-module module-x11-publish
--#.fail
--#.endif
--
--load-module module-switch-on-port-available
--
--### Make some devices default
--#set-default-sink output
--#set-default-source input
-diff --git a/taskcluster/docker/desktop-test/fonts.conf b/taskcluster/docker/desktop-test/fonts.conf
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/fonts.conf
-+++ /dev/null
-@@ -1,5 +0,0 @@
--<match target="font">
--  <edit name="antialias" mode="assign">
--   <bool>false</bool>
--  </edit>
--</match>
-diff --git a/taskcluster/docker/desktop-test/jockey-gtk.desktop b/taskcluster/docker/desktop-test/jockey-gtk.desktop
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/jockey-gtk.desktop
-+++ /dev/null
-@@ -1,15 +0,0 @@
--[Desktop Entry]
--Name=Check for new hardware drivers
--Comment=Notify about new hardware drivers available for the system
--Icon=jockey
--Exec=sh -c "test -e /var/cache/jockey/check || exec jockey-gtk --check"
--Terminal=false
--Type=Application
--Categories=System;Settings;GTK;HardwareSettings;
--NotShowIn=KDE;
--X-Ubuntu-Gettext-Domain=jockey
--
--# Bug 984944/1240084 - It prevents taking screenshots
--X-GNOME-Autostart-Delay=false
--
--NoDisplay=true
-diff --git a/taskcluster/docker/desktop-test/motd b/taskcluster/docker/desktop-test/motd
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/motd
-+++ /dev/null
-@@ -1,6 +0,0 @@
--Welcome to your taskcluster interactive shell! The regularly scheduled task
--has been paused to give you a chance to set up your debugging environment.
--
--For your convenience, the exact mozharness command needed for this task can
--be invoked using the 'run-mozharness' command.
--
-diff --git a/taskcluster/docker/desktop-test/release-upgrades b/taskcluster/docker/desktop-test/release-upgrades
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/release-upgrades
-+++ /dev/null
-@@ -1,17 +0,0 @@
--# Default behavior for the release upgrader.
--
--[DEFAULT]
--# Default prompting behavior, valid options:
--#
--#  never  - Never check for a new release.
--#  normal - Check to see if a new release is available.  If more than one new
--#           release is found, the release upgrader will attempt to upgrade to
--#           the release that immediately succeeds the currently-running
--#           release.
--#  lts    - Check to see if a new LTS release is available.  The upgrader
--#           will attempt to upgrade to the first LTS release available after
--#           the currently-running one.  Note that this option should not be
--#           used if the currently-running release is not itself an LTS
--#           release, since in that case the upgrader won't be able to
--#           determine if a newer release is available.
--Prompt=never
-diff --git a/taskcluster/docker/desktop-test/taskcluster-interactive-shell b/taskcluster/docker/desktop-test/taskcluster-interactive-shell
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/taskcluster-interactive-shell
-+++ /dev/null
-@@ -1,22 +0,0 @@
--#!/usr/bin/env bash
--
--download() {
--    name=`basename $1`
--    url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/$1
--    if ! curl --fail --silent -o ./$name --retry 10 $url; then
--        fail "failed downloading $1 from ${GECKO_HEAD_REPOSITORY}"
--    fi
--}
--
--cd $HOME/bin;
--download taskcluster/scripts/tester/run-wizard;
--chmod +x run-wizard;
--./run-wizard;
--
--SPAWN="$SHELL";
--if [ "$SHELL" = "bash" ]; then
--  SPAWN="bash -li";
--fi;
--
--cd $HOME;
--exec $SPAWN;
-diff --git a/taskcluster/docker/desktop-test/tc-vcs-config.yml b/taskcluster/docker/desktop-test/tc-vcs-config.yml
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/tc-vcs-config.yml
-+++ /dev/null
-@@ -1,40 +0,0 @@
--# Default configuration used by the tc-vs tools these can be overridden by
--# passing the config you wish to use over the command line...
--git: git
--hg: hg
--
--repoCache:
--  # Repo url to clone when running repo init..
--  repoUrl: https://gerrit.googlesource.com/git-repo.git
--  # Version of repo to utilize...
--  repoRevision: master
--  # The root where all downloaded cache files are stored on the local machine...
--  cacheDir: '{{env.HOME}}/.tc-vcs-repo/'
--  # Name/prefixed used as part of the base url.
--  cacheName: sources/{{name}}.tar.gz
--  # Command used to upload the tarball
--  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
--  # Large http get requests are often slower using nodes built in http layer so
--  # we utilize a subprocess which is responsible for fetching...
--  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
--  # Used to create clone tarball
--  compress: tar -czf {{dest}} {{source}}
--  # All cache urls use tar + gz this is the command used to extract those files
--  # downloaded by the "get" command.
--  extract: tar -x -z -C {{dest}} -f {{source}}
--
--cloneCache:
--  # The root where all downloaded cache files are stored on the local machine...
--  cacheDir: '{{env.HOME}}/.tc-vcs/'
--  # Command used to upload the tarball
--  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
--  # Large http get requests are often slower using nodes built in http layer so
--  # we utilize a subprocess which is responsible for fetching...
--  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
--  # Used to create clone tarball
--  compress: tar -czf {{dest}} {{source}}
--  # All cache urls use tar + gz this is the command used to extract those files
--  # downloaded by the "get" command.
--  extract: tar -x -z --strip-components 1 -C {{dest}} -f {{source}}
--  # Name/prefixed used as part of the base url.
--  cacheName: clones/{{name}}.tar.gz
-diff --git a/taskcluster/docker/desktop-test/tester.env b/taskcluster/docker/desktop-test/tester.env
-deleted file mode 100644
---- a/taskcluster/docker/desktop-test/tester.env
-+++ /dev/null
-@@ -1,4 +0,0 @@
--GAIA_REV=tip
--GAIA_REF=tip
--GAIA_BASE_REPOSITORY=https://hg.mozilla.org/integration/gaia-central
--GAIA_HEAD_REPOSITORY=https://hg.mozilla.org/integration/gaia-central
-diff --git a/taskcluster/docker/desktop1604-test/Dockerfile b/taskcluster/docker/desktop1604-test/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/Dockerfile
-+++ /dev/null
-@@ -1,117 +0,0 @@
--FROM          ubuntu:16.04
--MAINTAINER    Joel Maher <joel.maher@gmail.com>
--
--RUN useradd -d /home/worker -s /bin/bash -m worker
--WORKDIR /home/worker
--
--# %include python/mozbuild/mozbuild/action/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
--
--# %include testing/mozharness/external_tools/robustcheckout.py
--ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
--
--# %include taskcluster/docker/recipes/common.sh
--ADD topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh
--
--# %include taskcluster/docker/recipes/install-mercurial.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh
--
--# %include taskcluster/docker/recipes/install-node.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /setup/install-node.sh
--
--# Add the tooltool manifest containing the minidump_stackwalk binary.
--# %include testing/config/tooltool-manifests/linux64/releng.manifest
--ADD topsrcdir/testing/config/tooltool-manifests/linux64/releng.manifest /tmp/minidump_stackwalk.manifest
--
--# %include taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
--ADD topsrcdir/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh /setup/system-setup.sh
--RUN           bash /setup/system-setup.sh
--
--# Add wrapper scripts for xvfb allowing tasks to easily retry starting up xvfb
--# %include taskcluster/docker/recipes/xvfb.sh
--ADD topsrcdir/taskcluster/docker/recipes/xvfb.sh /home/worker/scripts/xvfb.sh
--
--# %include taskcluster/docker/recipes/run-task
--ADD topsrcdir/taskcluster/docker/recipes/run-task /home/worker/bin/run-task
--
--# %include taskcluster/scripts/tester/test-linux.sh
--ADD topsrcdir/taskcluster/scripts/tester/test-linux.sh /home/worker/bin/test-linux.sh
--
--# This will create a host mounted filesystem when the cache is stripped
--# on Try. This cancels out some of the performance losses of aufs. See
--# bug 1291940.
--VOLUME /home/worker/checkouts
--VOLUME /home/worker/workspace
--
--# Set variable normally configured at login, by the shells parent process, these
--# are taken from GNU su manual
--ENV           HOME          /home/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
--ENV           HOSTNAME      taskcluster-worker
--ENV           LANG          en_US.UTF-8
--ENV           LC_ALL        en_US.UTF-8
--
--# Add utilities and configuration
--COPY           dot-files/config              /home/worker/.config
--COPY           dot-files/pulse               /home/worker/.pulse
--RUN            chmod +x bin/*
--# TODO: remove this when buildbot is gone
--COPY           buildprops.json               /home/worker/buildprops.json
--COPY           tc-vcs-config.yml /etc/taskcluster-vcs.yml
--
--# TODO: remove
--ADD            https://raw.githubusercontent.com/taskcluster/buildbot-step/master/buildbot_step /home/worker/bin/buildbot_step
--RUN chmod u+x /home/worker/bin/buildbot_step
--
--# allow the worker user to access video devices
--RUN usermod -a -G video worker
--
--RUN mkdir Documents; mkdir Pictures; mkdir Music; mkdir Videos; mkdir artifacts
--
--# install tc-vcs and tc-npm-cache
--RUN npm install -g taskcluster-vcs@2.3.12 \
-- && npm install -g taskcluster-npm-cache@1.1.14 \
-- && rm -rf ~/.npm
--ENV PATH $PATH:/home/worker/bin
--
--# TODO Re-enable worker when bug 1093833 lands
--#USER          worker
--
--# clean up
--RUN rm -Rf .cache && mkdir -p .cache
--
--# Disable Ubuntu update prompt
--# http://askubuntu.com/questions/515161/ubuntu-12-04-disable-release-notification-of-14-04-in-update-manager
--ADD release-upgrades /etc/update-manager/release-upgrades
--
--# Disable tools with on-login popups that interfere with tests; see bug 1240084 and bug 984944.
--ADD autostart/jockey-gtk.desktop autostart/deja-dup-monitor.desktop /etc/xdg/autostart/
--
--# Bug 1345105 - Do not run periodical update checks and downloads
--ADD autostart/gnome-software-service.desktop /etc/xdg/autostart/
--
--# In test.sh we accept START_VNC to start a vnc daemon.
--# Exposing this port allows it to work.
--EXPOSE 5900
--
--# This helps not forgetting setting DISPLAY=:0 when running
--# tests outside of test.sh
--ENV DISPLAY :0
--
--# Disable apport (Ubuntu app crash reporter) to avoid stealing focus from test runs
--ADD apport /etc/default/apport
--
--# Disable font antialiasing for now to match releng's setup
--ADD fonts.conf /builds/worker/.fonts.conf
--
--# Set up first-run experience for interactive mode
--ADD motd /etc/taskcluster-motd
--ADD taskcluster-interactive-shell /bin/taskcluster-interactive-shell
--RUN chmod +x /bin/taskcluster-interactive-shell
--
--RUN chown -R worker:worker /builds/worker
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/desktop1604-test/apport b/taskcluster/docker/desktop1604-test/apport
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/apport
-+++ /dev/null
-@@ -1,1 +0,0 @@
--enabled=0
-diff --git a/taskcluster/docker/desktop1604-test/autostart/deja-dup-monitor.desktop b/taskcluster/docker/desktop1604-test/autostart/deja-dup-monitor.desktop
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/autostart/deja-dup-monitor.desktop
-+++ /dev/null
-@@ -1,19 +0,0 @@
--[Desktop Entry]
--Version=1.0
--X-Ubuntu-Gettext-Domain=deja-dup
--
--Name=Backup Monitor
--Comment=Schedules backups at regular intervals
--
--Icon=deja-dup
--TryExec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor
--Exec=/usr/lib/deja-dup/deja-dup/deja-dup-monitor
--
--# Bug 984944/1240084 - It prevents taking screenshots
--X-GNOME-Autostart-Delay=false
--
--StartupNotify=false
--NoDisplay=true
--
--Type=Application
--Categories=System;Utility;Archiving;
-diff --git a/taskcluster/docker/desktop1604-test/autostart/gnome-software-service.desktop b/taskcluster/docker/desktop1604-test/autostart/gnome-software-service.desktop
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/autostart/gnome-software-service.desktop
-+++ /dev/null
-@@ -1,9 +0,0 @@
--[Desktop Entry]
--Type=Application
--Name=GNOME Software
--Exec=/usr/bin/gnome-software --gapplication-service
--OnlyShowIn=GNOME;Unity;
--X-Ubuntu-Gettext-Domain=gnome-software
--
--# Bug 1345105 - Do not run periodical update checks and downloads
--X-GNOME-Autostart-enabled=false
-diff --git a/taskcluster/docker/desktop1604-test/autostart/jockey-gtk.desktop b/taskcluster/docker/desktop1604-test/autostart/jockey-gtk.desktop
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/autostart/jockey-gtk.desktop
-+++ /dev/null
-@@ -1,15 +0,0 @@
--[Desktop Entry]
--Name=Check for new hardware drivers
--Comment=Notify about new hardware drivers available for the system
--Icon=jockey
--Exec=sh -c "test -e /var/cache/jockey/check || exec jockey-gtk --check"
--Terminal=false
--Type=Application
--Categories=System;Settings;GTK;HardwareSettings;
--NotShowIn=KDE;
--X-Ubuntu-Gettext-Domain=jockey
--
--# Bug 984944/1240084 - It prevents taking screenshots
--X-GNOME-Autostart-Delay=false
--
--NoDisplay=true
-diff --git a/taskcluster/docker/desktop1604-test/buildprops.json b/taskcluster/docker/desktop1604-test/buildprops.json
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/buildprops.json
-+++ /dev/null
-@@ -1,8 +0,0 @@
--{
--  "properties": {
--    "buildername": ""
--  },
--  "sourcestamp": {
--    "changes": []
--  }
--}
-diff --git a/taskcluster/docker/desktop1604-test/dot-files/config/pip/pip.conf b/taskcluster/docker/desktop1604-test/dot-files/config/pip/pip.conf
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/dot-files/config/pip/pip.conf
-+++ /dev/null
-@@ -1,2 +0,0 @@
--[global]
--disable-pip-version-check = true
-diff --git a/taskcluster/docker/desktop1604-test/dot-files/config/user-dirs.dirs b/taskcluster/docker/desktop1604-test/dot-files/config/user-dirs.dirs
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/dot-files/config/user-dirs.dirs
-+++ /dev/null
-@@ -1,15 +0,0 @@
--# This file is written by xdg-user-dirs-update
--# If you want to change or add directories, just edit the line you're
--# interested in. All local changes will be retained on the next run
--# Format is XDG_xxx_DIR="$HOME/yyy", where yyy is a shell-escaped
--# homedir-relative path, or XDG_xxx_DIR="/yyy", where /yyy is an
--# absolute path. No other format is supported.
--
--XDG_DESKTOP_DIR="$HOME/Desktop"
--XDG_DOWNLOAD_DIR="$HOME/Downloads"
--XDG_TEMPLATES_DIR="$HOME/Templates"
--XDG_PUBLICSHARE_DIR="$HOME/Public"
--XDG_DOCUMENTS_DIR="$HOME/Documents"
--XDG_MUSIC_DIR="$HOME/Music"
--XDG_PICTURES_DIR="$HOME/Pictures"
--XDG_VIDEOS_DIR="$HOME/Videos"
-diff --git a/taskcluster/docker/desktop1604-test/dot-files/config/user-dirs.locale b/taskcluster/docker/desktop1604-test/dot-files/config/user-dirs.locale
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/dot-files/config/user-dirs.locale
-+++ /dev/null
-@@ -1,1 +0,0 @@
--en_US
-diff --git a/taskcluster/docker/desktop1604-test/dot-files/pulse/default.pa b/taskcluster/docker/desktop1604-test/dot-files/pulse/default.pa
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/dot-files/pulse/default.pa
-+++ /dev/null
-@@ -1,164 +0,0 @@
--#!/usr/bin/pulseaudio -nF
--#
--# This file is part of PulseAudio.
--#
--# PulseAudio is free software; you can redistribute it and/or modify it
--# under the terms of the GNU Lesser General Public License as published by
--# the Free Software Foundation; either version 2 of the License, or
--# (at your option) any later version.
--#
--# PulseAudio is distributed in the hope that it will be useful, but
--# WITHOUT ANY WARRANTY; without even the implied warranty of
--# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
--# General Public License for more details.
--#
--# You should have received a copy of the GNU Lesser General Public License
--# along with PulseAudio; if not, write to the Free Software Foundation,
--# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
--
--# This startup script is used only if PulseAudio is started per-user
--# (i.e. not in system mode)
--
--.nofail
--
--### Load something into the sample cache
--#load-sample-lazy x11-bell /usr/share/sounds/gtk-events/activate.wav
--#load-sample-lazy pulse-hotplug /usr/share/sounds/startup3.wav
--#load-sample-lazy pulse-coldplug /usr/share/sounds/startup3.wav
--#load-sample-lazy pulse-access /usr/share/sounds/generic.wav
--
--.fail
--
--### Automatically restore the volume of streams and devices
--load-module module-device-restore
--load-module module-stream-restore
--load-module module-card-restore
--
--### Automatically augment property information from .desktop files
--### stored in /usr/share/application
--load-module module-augment-properties
--
--### Load audio drivers statically
--### (it's probably better to not load these drivers manually, but instead
--### use module-udev-detect -- see below -- for doing this automatically)
--#load-module module-alsa-sink
--#load-module module-alsa-source device=hw:1,0
--#load-module module-oss device="/dev/dsp" sink_name=output source_name=input
--#load-module module-oss-mmap device="/dev/dsp" sink_name=output source_name=input
--#load-module module-null-sink
--#load-module module-pipe-sink
--
--### Automatically load driver modules depending on the hardware available
--.ifexists module-udev-detect.so
--load-module module-udev-detect
--.else
--### Use the static hardware detection module (for systems that lack udev/hal support)
--load-module module-detect
--.endif
--
--### Automatically connect sink and source if JACK server is present
--.ifexists module-jackdbus-detect.so
--.nofail
--load-module module-jackdbus-detect
--.fail
--.endif
--
--### Automatically load driver modules for Bluetooth hardware
--# This module causes a pulseaudio startup failure on "gecko-tester"
--#.ifexists module-bluetooth-discover.so
--#load-module module-bluetooth-discover
--#.endif
--
--### Load several protocols
--.ifexists module-esound-protocol-unix.so
--load-module module-esound-protocol-unix
--.endif
--load-module module-native-protocol-unix
--
--### Network access (may be configured with paprefs, so leave this commented
--### here if you plan to use paprefs)
--#load-module module-esound-protocol-tcp
--#load-module module-native-protocol-tcp
--#load-module module-zeroconf-publish
--
--### Load the RTP receiver module (also configured via paprefs, see above)
--#load-module module-rtp-recv
--
--### Load the RTP sender module (also configured via paprefs, see above)
--#load-module module-null-sink sink_name=rtp format=s16be channels=2 rate=44100 sink_properties="device.description='RTP Multicast Sink'"
--#load-module module-rtp-send source=rtp.monitor
--
--### Load additional modules from GConf settings. This can be configured with the paprefs tool.
--### Please keep in mind that the modules configured by paprefs might conflict with manually
--### loaded modules.
--.ifexists module-gconf.so
--.nofail
--load-module module-gconf
--.fail
--.endif
--
--### Automatically restore the default sink/source when changed by the user
--### during runtime
--### NOTE: This should be loaded as early as possible so that subsequent modules
--### that look up the default sink/source get the right value
--load-module module-default-device-restore
--
--### Automatically move streams to the default sink if the sink they are
--### connected to dies, similar for sources
--load-module module-rescue-streams
--
--### Make sure we always have a sink around, even if it is a null sink.
--load-module module-always-sink
--
--### Honour intended role device property
--load-module module-intended-roles
--
--### Automatically suspend sinks/sources that become idle for too long
--load-module module-suspend-on-idle
--
--### If autoexit on idle is enabled we want to make sure we only quit
--### when no local session needs us anymore.
--# This module causes a pulseaudio startup failure on "gecko-tester"
--#.ifexists module-console-kit.so
--#load-module module-console-kit
--#.endif
--
--### Enable positioned event sounds
--load-module module-position-event-sounds
--
--### Cork music streams when a phone stream is active
--#load-module module-cork-music-on-phone
--
--### Modules to allow autoloading of filters (such as echo cancellation)
--### on demand. module-filter-heuristics tries to determine what filters
--### make sense, and module-filter-apply does the heavy-lifting of
--### loading modules and rerouting streams.
--load-module module-filter-heuristics
--load-module module-filter-apply
--
--### Load DBus protocol
--#.ifexists module-dbus-protocol.so
--#load-module module-dbus-protocol
--#.endif
--
--# X11 modules should not be started from default.pa so that one daemon
--# can be shared by multiple sessions.
--
--### Load X11 bell module
--#load-module module-x11-bell sample=bell-windowing-system
--
--### Register ourselves in the X11 session manager
--#load-module module-x11-xsmp
--
--### Publish connection data in the X11 root window
--#.ifexists module-x11-publish.so
--#.nofail
--#load-module module-x11-publish
--#.fail
--#.endif
--
--load-module module-switch-on-port-available
--
--### Make some devices default
--#set-default-sink output
--#set-default-source input
-diff --git a/taskcluster/docker/desktop1604-test/fonts.conf b/taskcluster/docker/desktop1604-test/fonts.conf
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/fonts.conf
-+++ /dev/null
-@@ -1,5 +0,0 @@
--<match target="font">
--  <edit name="antialias" mode="assign">
--   <bool>false</bool>
--  </edit>
--</match>
-diff --git a/taskcluster/docker/desktop1604-test/motd b/taskcluster/docker/desktop1604-test/motd
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/motd
-+++ /dev/null
-@@ -1,6 +0,0 @@
--Welcome to your taskcluster interactive shell! The regularly scheduled task
--has been paused to give you a chance to set up your debugging environment.
--
--For your convenience, the exact mozharness command needed for this task can
--be invoked using the 'run-mozharness' command.
--
-diff --git a/taskcluster/docker/desktop1604-test/release-upgrades b/taskcluster/docker/desktop1604-test/release-upgrades
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/release-upgrades
-+++ /dev/null
-@@ -1,17 +0,0 @@
--# Default behavior for the release upgrader.
--
--[DEFAULT]
--# Default prompting behavior, valid options:
--#
--#  never  - Never check for a new release.
--#  normal - Check to see if a new release is available.  If more than one new
--#           release is found, the release upgrader will attempt to upgrade to
--#           the release that immediately succeeds the currently-running
--#           release.
--#  lts    - Check to see if a new LTS release is available.  The upgrader
--#           will attempt to upgrade to the first LTS release available after
--#           the currently-running one.  Note that this option should not be
--#           used if the currently-running release is not itself an LTS
--#           release, since in that case the upgrader won't be able to
--#           determine if a newer release is available.
--Prompt=never
-diff --git a/taskcluster/docker/desktop1604-test/taskcluster-interactive-shell b/taskcluster/docker/desktop1604-test/taskcluster-interactive-shell
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/taskcluster-interactive-shell
-+++ /dev/null
-@@ -1,22 +0,0 @@
--#!/usr/bin/env bash
--
--download() {
--    name=`basename $1`
--    url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/$1
--    if ! curl --fail --silent -o ./$name --retry 10 $url; then
--        fail "failed downloading $1 from ${GECKO_HEAD_REPOSITORY}"
--    fi
--}
--
--cd $HOME/bin;
--download taskcluster/scripts/tester/run-wizard;
--chmod +x run-wizard;
--./run-wizard;
--
--SPAWN="$SHELL";
--if [ "$SHELL" = "bash" ]; then
--  SPAWN="bash -li";
--fi;
--
--cd $HOME;
--exec $SPAWN;
-diff --git a/taskcluster/docker/desktop1604-test/tc-vcs-config.yml b/taskcluster/docker/desktop1604-test/tc-vcs-config.yml
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/tc-vcs-config.yml
-+++ /dev/null
-@@ -1,40 +0,0 @@
--# Default configuration used by the tc-vs tools these can be overridden by
--# passing the config you wish to use over the command line...
--git: git
--hg: hg
--
--repoCache:
--  # Repo url to clone when running repo init..
--  repoUrl: https://gerrit.googlesource.com/git-repo.git
--  # Version of repo to utilize...
--  repoRevision: master
--  # The root where all downloaded cache files are stored on the local machine...
--  cacheDir: '{{env.HOME}}/.tc-vcs-repo/'
--  # Name/prefixed used as part of the base url.
--  cacheName: sources/{{name}}.tar.gz
--  # Command used to upload the tarball
--  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
--  # Large http get requests are often slower using nodes built in http layer so
--  # we utilize a subprocess which is responsible for fetching...
--  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
--  # Used to create clone tarball
--  compress: tar -czf {{dest}} {{source}}
--  # All cache urls use tar + gz this is the command used to extract those files
--  # downloaded by the "get" command.
--  extract: tar -x -z -C {{dest}} -f {{source}}
--
--cloneCache:
--  # The root where all downloaded cache files are stored on the local machine...
--  cacheDir: '{{env.HOME}}/.tc-vcs/'
--  # Command used to upload the tarball
--  uploadTar: "curl --header 'Content-Type: application/x-tar' --header 'Content-Encoding: gzip' -X PUT --data-binary @'{{source}}' '{{url}}'"
--  # Large http get requests are often slower using nodes built in http layer so
--  # we utilize a subprocess which is responsible for fetching...
--  get: curl --connect-timeout 30 --speed-limit 500000 -L -o {{dest}} {{url}}
--  # Used to create clone tarball
--  compress: tar -czf {{dest}} {{source}}
--  # All cache urls use tar + gz this is the command used to extract those files
--  # downloaded by the "get" command.
--  extract: tar -x -z --strip-components 1 -C {{dest}} -f {{source}}
--  # Name/prefixed used as part of the base url.
--  cacheName: clones/{{name}}.tar.gz
-diff --git a/taskcluster/docker/desktop1604-test/tester.env b/taskcluster/docker/desktop1604-test/tester.env
-deleted file mode 100644
---- a/taskcluster/docker/desktop1604-test/tester.env
-+++ /dev/null
-@@ -1,4 +0,0 @@
--GAIA_REV=tip
--GAIA_REF=tip
--GAIA_BASE_REPOSITORY=https://hg.mozilla.org/integration/gaia-central
--GAIA_HEAD_REPOSITORY=https://hg.mozilla.org/integration/gaia-central
-diff --git a/taskcluster/docker/firefox-snap/Dockerfile b/taskcluster/docker/firefox-snap/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/firefox-snap/Dockerfile
-+++ /dev/null
-@@ -1,3 +0,0 @@
--FROM ubuntu:16.04
--
--RUN apt-get update && apt-get install -qy snapcraft bzip2 curl git && apt-get clean
-diff --git a/taskcluster/docker/firefox-snap/Makefile b/taskcluster/docker/firefox-snap/Makefile
-deleted file mode 100644
---- a/taskcluster/docker/firefox-snap/Makefile
-+++ /dev/null
-@@ -1,12 +0,0 @@
--DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
--IMAGE_NAME = firefox-snapcraft
--FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
--
--build:
--	docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
--
--push:
--	docker push $(FULL_IMAGE_NAME):latest
--
--pull:
--	docker pull $(FULL_IMAGE_NAME):latest
-diff --git a/taskcluster/docker/firefox-snap/fetch_macaroons.sh b/taskcluster/docker/firefox-snap/fetch_macaroons.sh
-deleted file mode 100644
---- a/taskcluster/docker/firefox-snap/fetch_macaroons.sh
-+++ /dev/null
-@@ -1,14 +0,0 @@
--#!/bin/bash
--
--set -ex
--
--url="$1"
--
--CONFIG="$HOME/.config/snapcraft/snapcraft.cfg"
--
--mkdir -p "$( dirname "$CONFIG" )"
--curl -s "$url" | \
--    python -c 'import json, sys; a = json.load(sys.stdin); print a["secret"]["content"]' | \
--    base64 -d > \
--    "$CONFIG"
--chmod 600 "$CONFIG"
-diff --git a/taskcluster/docker/firefox-snap/firefox.desktop b/taskcluster/docker/firefox-snap/firefox.desktop
-deleted file mode 100644
---- a/taskcluster/docker/firefox-snap/firefox.desktop
-+++ /dev/null
-@@ -1,219 +0,0 @@
--[Desktop Entry]
--Version=1.0
--Name=Firefox Web Browser
--Name[ar]=متصفح الويب فَيَرفُكْس
--Name[ast]=Restolador web Firefox
--Name[bn]=ফায়ারফক্স ওয়েব ব্রাউজার
--Name[ca]=Navegador web Firefox
--Name[cs]=Firefox Webový prohlížeč
--Name[da]=Firefox - internetbrowser
--Name[el]=Περιηγητής Firefox
--Name[es]=Navegador web Firefox
--Name[et]=Firefoxi veebibrauser
--Name[fa]=مرورگر اینترنتی Firefox
--Name[fi]=Firefox-selain
--Name[fr]=Navigateur Web Firefox
--Name[gl]=Navegador web Firefox
--Name[he]=דפדפן האינטרנט Firefox
--Name[hr]=Firefox web preglednik
--Name[hu]=Firefox webböngésző
--Name[it]=Firefox Browser Web
--Name[ja]=Firefox ウェブ・ブラウザ
--Name[ko]=Firefox 웹 브라우저
--Name[ku]=Geroka torê Firefox
--Name[lt]=Firefox interneto naršyklė
--Name[nb]=Firefox Nettleser
--Name[nl]=Firefox webbrowser
--Name[nn]=Firefox Nettlesar
--Name[no]=Firefox Nettleser
--Name[pl]=Przeglądarka WWW Firefox
--Name[pt]=Firefox Navegador Web
--Name[pt_BR]=Navegador Web Firefox
--Name[ro]=Firefox – Navigator Internet
--Name[ru]=Веб-браузер Firefox
--Name[sk]=Firefox - internetový prehliadač
--Name[sl]=Firefox spletni brskalnik
--Name[sv]=Firefox webbläsare
--Name[tr]=Firefox Web Tarayıcısı
--Name[ug]=Firefox توركۆرگۈ
--Name[uk]=Веб-браузер Firefox
--Name[vi]=Trình duyệt web Firefox
--Name[zh_CN]=Firefox 网络浏览器
--Name[zh_TW]=Firefox 網路瀏覽器
--Comment=Browse the World Wide Web
--Comment[ar]=تصفح الشبكة العنكبوتية العالمية
--Comment[ast]=Restola pela Rede
--Comment[bn]=ইন্টারনেট ব্রাউজ করুন
--Comment[ca]=Navegueu per la web
--Comment[cs]=Prohlížení stránek World Wide Webu
--Comment[da]=Surf på internettet
--Comment[de]=Im Internet surfen
--Comment[el]=Μπορείτε να περιηγηθείτε στο διαδίκτυο (Web)
--Comment[es]=Navegue por la web
--Comment[et]=Lehitse veebi
--Comment[fa]=صفحات شبکه جهانی اینترنت را مرور نمایید
--Comment[fi]=Selaa Internetin WWW-sivuja
--Comment[fr]=Naviguer sur le Web
--Comment[gl]=Navegar pola rede
--Comment[he]=גלישה ברחבי האינטרנט
--Comment[hr]=Pretražite web
--Comment[hu]=A világháló böngészése
--Comment[it]=Esplora il web
--Comment[ja]=ウェブを閲覧します
--Comment[ko]=웹을 돌아 다닙니다
--Comment[ku]=Li torê bigere
--Comment[lt]=Naršykite internete
--Comment[nb]=Surf på nettet
--Comment[nl]=Verken het internet
--Comment[nn]=Surf på nettet
--Comment[no]=Surf på nettet
--Comment[pl]=Przeglądanie stron WWW
--Comment[pt]=Navegue na Internet
--Comment[pt_BR]=Navegue na Internet
--Comment[ro]=Navigați pe Internet
--Comment[ru]=Доступ в Интернет
--Comment[sk]=Prehliadanie internetu
--Comment[sl]=Brskajte po spletu
--Comment[sv]=Surfa på webben
--Comment[tr]=İnternet'te Gezinin
--Comment[ug]=دۇنيادىكى توربەتلەرنى كۆرگىلى بولىدۇ
--Comment[uk]=Перегляд сторінок Інтернету
--Comment[vi]=Để duyệt các trang web
--Comment[zh_CN]=浏览互联网
--Comment[zh_TW]=瀏覽網際網路
--GenericName=Web Browser
--GenericName[ar]=متصفح ويب
--GenericName[ast]=Restolador Web
--GenericName[bn]=ওয়েব ব্রাউজার
--GenericName[ca]=Navegador web
--GenericName[cs]=Webový prohlížeč
--GenericName[da]=Webbrowser
--GenericName[el]=Περιηγητής διαδικτύου
--GenericName[es]=Navegador web
--GenericName[et]=Veebibrauser
--GenericName[fa]=مرورگر اینترنتی
--GenericName[fi]=WWW-selain
--GenericName[fr]=Navigateur Web
--GenericName[gl]=Navegador Web
--GenericName[he]=דפדפן אינטרנט
--GenericName[hr]=Web preglednik
--GenericName[hu]=Webböngésző
--GenericName[it]=Browser web
--GenericName[ja]=ウェブ・ブラウザ
--GenericName[ko]=웹 브라우저
--GenericName[ku]=Geroka torê
--GenericName[lt]=Interneto naršyklė
--GenericName[nb]=Nettleser
--GenericName[nl]=Webbrowser
--GenericName[nn]=Nettlesar
--GenericName[no]=Nettleser
--GenericName[pl]=Przeglądarka WWW
--GenericName[pt]=Navegador Web
--GenericName[pt_BR]=Navegador Web
--GenericName[ro]=Navigator Internet
--GenericName[ru]=Веб-браузер
--GenericName[sk]=Internetový prehliadač
--GenericName[sl]=Spletni brskalnik
--GenericName[sv]=Webbläsare
--GenericName[tr]=Web Tarayıcı
--GenericName[ug]=توركۆرگۈ
--GenericName[uk]=Веб-браузер
--GenericName[vi]=Trình duyệt Web
--GenericName[zh_CN]=网络浏览器
--GenericName[zh_TW]=網路瀏覽器
--Keywords=Internet;WWW;Browser;Web;Explorer
--Keywords[ar]=انترنت;إنترنت;متصفح;ويب;وب
--Keywords[ast]=Internet;WWW;Restolador;Web;Esplorador
--Keywords[ca]=Internet;WWW;Navegador;Web;Explorador;Explorer
--Keywords[cs]=Internet;WWW;Prohlížeč;Web;Explorer
--Keywords[da]=Internet;Internettet;WWW;Browser;Browse;Web;Surf;Nettet
--Keywords[de]=Internet;WWW;Browser;Web;Explorer;Webseite;Site;surfen;online;browsen
--Keywords[el]=Internet;WWW;Browser;Web;Explorer;Διαδίκτυο;Περιηγητής;Firefox;Φιρεφοχ;Ιντερνετ
--Keywords[es]=Explorador;Internet;WWW
--Keywords[fi]=Internet;WWW;Browser;Web;Explorer;selain;Internet-selain;internetselain;verkkoselain;netti;surffaa
--Keywords[fr]=Internet;WWW;Browser;Web;Explorer;Fureteur;Surfer;Navigateur
--Keywords[he]=דפדפן;אינטרנט;רשת;אתרים;אתר;פיירפוקס;מוזילה;
--Keywords[hr]=Internet;WWW;preglednik;Web
--Keywords[hu]=Internet;WWW;Böngésző;Web;Háló;Net;Explorer
--Keywords[it]=Internet;WWW;Browser;Web;Navigatore
--Keywords[is]=Internet;WWW;Vafri;Vefur;Netvafri;Flakk
--Keywords[ja]=Internet;WWW;Web;インターネット;ブラウザ;ウェブ;エクスプローラ
--Keywords[nb]=Internett;WWW;Nettleser;Explorer;Web;Browser;Nettside
--Keywords[nl]=Internet;WWW;Browser;Web;Explorer;Verkenner;Website;Surfen;Online
--Keywords[pt]=Internet;WWW;Browser;Web;Explorador;Navegador
--Keywords[pt_BR]=Internet;WWW;Browser;Web;Explorador;Navegador
--Keywords[ru]=Internet;WWW;Browser;Web;Explorer;интернет;браузер;веб;файрфокс;огнелис
--Keywords[sk]=Internet;WWW;Prehliadač;Web;Explorer
--Keywords[sl]=Internet;WWW;Browser;Web;Explorer;Brskalnik;Splet
--Keywords[tr]=İnternet;WWW;Tarayıcı;Web;Gezgin;Web sitesi;Site;sörf;çevrimiçi;tara
--Keywords[uk]=Internet;WWW;Browser;Web;Explorer;Інтернет;мережа;переглядач;оглядач;браузер;веб;файрфокс;вогнелис;перегляд
--Keywords[vi]=Internet;WWW;Browser;Web;Explorer;Trình duyệt;Trang web
--Keywords[zh_CN]=Internet;WWW;Browser;Web;Explorer;网页;浏览;上网;火狐;Firefox;ff;互联网;网站;
--Keywords[zh_TW]=Internet;WWW;Browser;Web;Explorer;網際網路;網路;瀏覽器;上網;網頁;火狐
--Exec=firefox %u
--Terminal=false
--X-MultipleArgs=false
--Type=Application
--Icon=firefox
--Categories=GNOME;GTK;Network;WebBrowser;
--MimeType=text/html;text/xml;application/xhtml+xml;application/xml;application/rss+xml;application/rdf+xml;image/gif;image/jpeg;image/png;x-scheme-handler/http;x-scheme-handler/https;x-scheme-handler/ftp;x-scheme-handler/chrome;video/webm;application/x-xpinstall;
--StartupNotify=true
--Actions=NewWindow;NewPrivateWindow;
--
--[Desktop Action NewWindow]
--Name=Open a New Window
--Name[ar]=افتح نافذة جديدة
--Name[ast]=Abrir una ventana nueva
--Name[bn]=Abrir una ventana nueva
--Name[ca]=Obre una finestra nova
--Name[cs]=Otevřít nové okno
--Name[da]=Åbn et nyt vindue
--Name[de]=Ein neues Fenster öffnen
--Name[el]=Άνοιγμα νέου παραθύρου
--Name[es]=Abrir una ventana nueva
--Name[fi]=Avaa uusi ikkuna
--Name[fr]=Ouvrir une nouvelle fenêtre
--Name[gl]=Abrir unha nova xanela
--Name[he]=פתיחת חלון חדש
--Name[hr]=Otvori novi prozor
--Name[hu]=Új ablak nyitása
--Name[it]=Apri una nuova finestra
--Name[ja]=新しいウィンドウを開く
--Name[ko]=새 창 열기
--Name[ku]=Paceyeke nû veke
--Name[lt]=Atverti naują langą
--Name[nb]=Åpne et nytt vindu
--Name[nl]=Nieuw venster openen
--Name[pt]=Abrir nova janela
--Name[pt_BR]=Abrir nova janela
--Name[ro]=Deschide o fereastră nouă
--Name[ru]=Новое окно
--Name[sk]=Otvoriť nové okno
--Name[sl]=Odpri novo okno
--Name[sv]=Öppna ett nytt fönster
--Name[tr]=Yeni pencere aç
--Name[ug]=يېڭى كۆزنەك ئېچىش
--Name[uk]=Відкрити нове вікно
--Name[vi]=Mở cửa sổ mới
--Name[zh_CN]=新建窗口
--Name[zh_TW]=開啟新視窗
--Exec=firefox -new-window
--
--[Desktop Action NewPrivateWindow]
--Name=Open a New Private Window
--Name[ar]=افتح نافذة جديدة للتصفح الخاص
--Name[ca]=Obre una finestra nova en mode d'incògnit
--Name[de]=Ein neues privates Fenster öffnen
--Name[es]=Abrir una ventana privada nueva
--Name[fi]=Avaa uusi yksityinen ikkuna
--Name[fr]=Ouvrir une nouvelle fenêtre de navigation privée
--Name[he]=פתיחת חלון גלישה פרטית חדש
--Name[hu]=Új privát ablak nyitása
--Name[it]=Apri una nuova finestra anonima
--Name[nb]=Åpne et nytt privat vindu
--Name[ru]=Новое приватное окно
--Name[sl]=Odpri novo okno zasebnega brskanja
--Name[tr]=Yeni bir pencere aç
--Name[uk]=Відкрити нове вікно у потайливому режимі
--Name[zh_TW]=開啟新隱私瀏覽視窗
--Exec=firefox -private-window
-diff --git a/taskcluster/docker/firefox-snap/runme.sh b/taskcluster/docker/firefox-snap/runme.sh
-deleted file mode 100755
---- a/taskcluster/docker/firefox-snap/runme.sh
-+++ /dev/null
-@@ -1,88 +0,0 @@
--#!/bin/bash
--
--set -xe
--
--# Required env variables
--test $VERSION
--test $BUILD_NUMBER
--test $CANDIDATES_DIR
--
--# Optional env variables
--: WORKSPACE                     ${WORKSPACE:=/home/worker/workspace}
--: ARTIFACTS_DIR                 ${ARTIFACTS_DIR:=/home/worker/artifacts}
--
--SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
--
--TARGET="firefox-${VERSION}.snap"
--TARGET_FULL_PATH="$ARTIFACTS_DIR/$TARGET"
--
--mkdir -p "$ARTIFACTS_DIR"
--rm -rf "${WORKSPACE}/source" && mkdir -p "${WORKSPACE}/source/opt" "${WORKSPACE}/source/usr/bin"
--
--CURL="curl --location --retry 10 --retry-delay 10"
--
--# Download and extract en-US linux64 binary
--$CURL -o "${WORKSPACE}/firefox.tar.bz2" \
--    "${CANDIDATES_DIR}/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/en-US/firefox-${VERSION}.tar.bz2"
--tar -C "${WORKSPACE}/source/opt" -xf "${WORKSPACE}/firefox.tar.bz2"
--
--# Get Ubuntu configuration
--PARTNER_CONFIG_DIR="$WORKSPACE/partner_config"
--git clone https://github.com/mozilla-partners/canonical.git "$PARTNER_CONFIG_DIR"
--
--DISTRIBUTION_DIR="$WORKSPACE/source/opt/firefox/distribution"
--mv "$PARTNER_CONFIG_DIR/desktop/ubuntu/distribution" "$DISTRIBUTION_DIR"
--cp -v "$SCRIPT_DIRECTORY/firefox.desktop" "$DISTRIBUTION_DIR"
--
--# Use release-specific list of locales to fetch L10N XPIs
--$CURL -o "${WORKSPACE}/l10n_changesets.txt" "${CANDIDATES_DIR}/${VERSION}-candidates/build${BUILD_NUMBER}/l10n_changesets.txt"
--cat "${WORKSPACE}/l10n_changesets.txt"
--
--mkdir -p "$DISTRIBUTION_DIR/extensions"
--for locale in $(grep -v ja-JP-mac "${WORKSPACE}/l10n_changesets.txt" | awk '{print $1}'); do
--    $CURL -o "${WORKSPACE}/source/opt/firefox/distribution/extensions/langpack-${locale}@firefox.mozilla.org.xpi" \
--        "$CANDIDATES_DIR/${VERSION}-candidates/build${BUILD_NUMBER}/linux-x86_64/xpi/${locale}.xpi"
--done
--
--# Symlink firefox binary to /usr/bin to make it available in PATH
--ln -s ../../opt/firefox/firefox "${WORKSPACE}/source/usr/bin"
--
--# Generate snapcraft manifest
--sed -e "s/@VERSION@/${VERSION}/g" -e "s/@BUILD_NUMBER@/${BUILD_NUMBER}/g" snapcraft.yaml.in > ${WORKSPACE}/snapcraft.yaml
--cd ${WORKSPACE}
--snapcraft
--
--mv *.snap "$TARGET_FULL_PATH"
--
--cd $ARTIFACTS_DIR
--
--# Generate checksums file
--size=$(stat --printf="%s" "$TARGET_FULL_PATH")
--sha=$(sha512sum "$TARGET_FULL_PATH" | awk '{print $1}')
--echo "$sha sha512 $size $TARGET" > $TARGET.checksums
--
--echo "Generating signing manifest"
--hash=$(sha512sum $TARGET.checksums | awk '{print $1}')
--
--cat << EOF > signing_manifest.json
--[{"file_to_sign": "$TARGET.checksums", "hash": "$hash"}]
--EOF
--
--# For posterity
--find . -ls
--cat $TARGET.checksums
--cat signing_manifest.json
--
--
--# Upload Beta snaps to Ubuntu Snap Store (No channel)
--# TODO: Add a release channel once ready for broader audience
--# TODO: Don't filter out non-beta releases
--# TODO: Parametrize channel depending on beta vs release
--# TODO: Make this part an independent task
--if [[ $VERSION =~ ^[0-9]+\.0b[0-9]+$ ]]; then
--  echo "Beta version detected. Uploading to Ubuntu Store (no channel)..."
--  bash "$SCRIPT_DIRECTORY/fetch_macaroons.sh" 'http://taskcluster/secrets/v1/secret/project/releng/snapcraft/firefox/edge'
--  snapcraft push "$TARGET_FULL_PATH"
--else
--  echo "Non-beta version detected. Nothing else to do."
--fi
-diff --git a/taskcluster/docker/firefox-snap/snapcraft.yaml.in b/taskcluster/docker/firefox-snap/snapcraft.yaml.in
-deleted file mode 100644
---- a/taskcluster/docker/firefox-snap/snapcraft.yaml.in
-+++ /dev/null
-@@ -1,39 +0,0 @@
--name: firefox
--version: @VERSION@-@BUILD_NUMBER@
--summary: Mozilla Firefox web browser
--description:  Firefox is a powerful, extensible web browser with support for modern web application technologies.
--confinement: strict
--
--apps:
--  firefox:
--    command: desktop-launch firefox
--    desktop: opt/firefox/distribution/firefox.desktop
--    plugs:
--      - unity7
--      - network
--      - home
--      - x11
--      - opengl
--      - pulseaudio
--      - gsettings
--      - camera
--      - browser-sandbox
--
--plugs:
--  browser-sandbox:
--    interface: browser-support
--    allow-sandbox: true
--
--parts:
--  firefox:
--    plugin: dump
--    source: source
--    stage-packages:
--      - libxt6
--      - libdbus-glib-1-2
--      - libasound2
--      - libpulse0
--      - libgl1-mesa-dri
--      - libgl1-mesa-glx
--      - libmirclient9
--    after: [desktop-gtk3]
-diff --git a/taskcluster/docker/funsize-balrog-submitter/Dockerfile b/taskcluster/docker/funsize-balrog-submitter/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/Dockerfile
-+++ /dev/null
-@@ -1,35 +0,0 @@
--FROM ubuntu:vivid
--MAINTAINER Rail Aliiev <rail@mozilla.com>
--
--# Required software
--ENV DEBIAN_FRONTEND noninteractive
--# Chain apt-get commands with apt-get clean in a single docker RUN
--# to make sure that files are removed within a single docker layer
--RUN apt-get update -q && \
--    apt-get install -yyq --no-install-recommends \
--    python mercurial curl python-boto python-setuptools python-cryptography \
--    python-dev gcc liblzma-dev && \
--    apt-get clean
--
--COPY requirements.txt /tmp/
--# python-pip installs a lot of dependencies increasing the size of an image
--# drastically.
--RUN easy_install pip
--RUN pip install -r /tmp/requirements.txt
--
--RUN hg clone https://hg.mozilla.org/build/tools /home/worker/tools
--
--RUN useradd -d /home/worker -s /bin/bash -m worker
--
--RUN mkdir /home/worker/bin
--COPY scripts/* /home/worker/bin/
--RUN mkdir /home/worker/keys
--COPY *.pubkey /home/worker/keys/
--COPY runme.sh /runme.sh
--COPY submit_complete.sh /submit_complete.sh
--RUN chmod 755 /home/worker/bin/* /runme.sh /submit_complete.sh
--
--ENV           HOME          /home/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
-diff --git a/taskcluster/docker/funsize-balrog-submitter/Makefile b/taskcluster/docker/funsize-balrog-submitter/Makefile
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/Makefile
-+++ /dev/null
-@@ -1,17 +0,0 @@
--DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
--IMAGE_NAME = funsize-balrog-submitter
--FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
--
--build:
--	docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
--
--push:
--	docker push $(FULL_IMAGE_NAME):latest
--
--pull:
--	docker pull $(FULL_IMAGE_NAME):latest
--
--update_pubkeys:
--	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
--	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
--	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
-diff --git a/taskcluster/docker/funsize-balrog-submitter/dep.pubkey b/taskcluster/docker/funsize-balrog-submitter/dep.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/dep.pubkey
-+++ /dev/null
-@@ -1,9 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzjHSobdeiQ3JHP/cCIOp
--WaX9y12rL5mIo9OR9bpqEZdD0yXJJJeZA887Mv8slqsM+qObMUpKvfEE6zyYPIZJ
--ANib31neI5BBYHhfhf2f5EnkilSYlmU3Gx+uRsmsdt58PpYe124tOAGgca/8bUy3
--eb6kUUTwvMI0oWQuPkGUaoHVQyj/bBMTrIkyF3UbfFtiX/SfOPvIoabNUe+pQHUe
--pqC2+RxzDGj+shTq/hYhtXlptFzsEEb2+0foLy0MY8C30dP2QqbM2iavvr/P8OcS
--Gm3H0TQcRzIEBzvPcIjiZi1nQj/r/3TlYRNCjuYT/HsNLXrB/U5Tc990jjAUJxdH
--0wIDAQAB
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-balrog-submitter/nightly_sha1.pubkey b/taskcluster/docker/funsize-balrog-submitter/nightly_sha1.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/nightly_sha1.pubkey
-+++ /dev/null
-@@ -1,9 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4j/IS3gWbyVVnWn4ZRnC
--Fuzb6VAaHa0I+4E504ekhVAhbKlSfBstkLbXajdjUVAJpn02zWnOaTl5KAdpDpIp
--SkdA4mK20ej3/Ij7gIt8IwaX+ArXL8mP84pxDn5BgaNADm3206Z6YQzc/TDYu529
--qkDFmLqNUVRJAhPO+qqhKHIcVGh8HUHXN6XV1qOFip+UU0M474jAGgurVmAv8Rh7
--VvM0v5KmB6V6WHwM5gwjg2yRY/o+xYIsNeSes9rpp+MOs/RnUA6LI4WZGY4YahvX
--VclIXBDgbWPYtojexIJkmYj8JIIRsh3eCsrRRe14fq7cBurp3CxBYMlDHf0RUoaq
--hQIDAQAB
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-balrog-submitter/nightly_sha384.pubkey b/taskcluster/docker/funsize-balrog-submitter/nightly_sha384.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/nightly_sha384.pubkey
-+++ /dev/null
-@@ -1,14 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAth151NGY8PBzn0bii9Yc
--AjYHZDwP9Lj1c3owG0zLqW2kPcdp86QTAcoYunHGYFFakNG3tooZhzwkMjZ1OrXc
--ERjD6AuVSGIBdsKtKP4vLtMjDUteFN4K2+rveozcnYFZuTWEajGu8uoYsv4QgdEA
--nTBC39j0J33xlfUR+XKuxzhxNrFX+fRFWuLDJrPziMcVA/mzf0gXlhtEsfV0HYyg
--yWpHdIWww+llysD1QOQAHk94Ss8c/4BFXFxlwlLeNlB1ZqLm1LsNy0jUy9EHeO3C
--H6eqmiFEbpdjlrkJdgR1NcTzeY/Qf/nhWH6BAZrSapQycF7OSLU+rFWMQUElSPLc
--NVl7oNAAfSYLTvRjPGi+mJK3wGFQw1EpwQl+elE1oj4+sHvIVpDrLb6btpxfr1cZ
--pR4Di/hkOIymxEDWvtUhOxUXnYbDKQSDcAHKM/xR3sdIAiVtVuL4hyBwlAqkQc2j
--H+SmnCbazgnq5+dN4y5DjoOgbZQ/koE3s3bUzzMeIxaul9v4gMtGROw3PQ3OZcP0
--lgjPRhY+NeTnWMo2nGb4/eS6Cn2qFLfbEQjsj6pJJBNKfvK/gm1jXb3PgXXdf8+d
--2xTPOX8QNpSK7C0w4vYlvSpYZlsx2cznEOV6LDqP0QHUnmd/k1xWRRGiQ7gtT+BV
--Fn0h7JyTGmEdFu6l4OhS8hMCAwEAAQ==
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-balrog-submitter/release_sha1.pubkey b/taskcluster/docker/funsize-balrog-submitter/release_sha1.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/release_sha1.pubkey
-+++ /dev/null
-@@ -1,9 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvH4r94FpQ0gvr1hhTfV9
--NUeWPJ5CN6TZRq7v/Dc4nkJ1J4IP1B3UEii34tcNKpy1nKupiZuTT6T1zQYT+z5x
--3UkDF9qQboQ8RNb/BEz/cN3on/LTEnZ7YSraRL11M6cEB8mvmJxddCEquwqccRbs
--Usp8WUB7uRv1w6Anley7N9F/LE1iLPwJasZypRnzWb3aYsJy0cMFOYy+OXVdpktn
--qYqlNIjnt84u4Nil6UXnBbIJNUVOCY8wOFClNvVpubjPkWK1gtdWy3x/hJU5RpAO
--K9cnHxq4M/I4SUWTWO3r7yweQiHG4Jyoc7sP1jkwjBkSG93sDEycfwOdOoZft3wN
--sQIDAQAB
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-balrog-submitter/release_sha384.pubkey b/taskcluster/docker/funsize-balrog-submitter/release_sha384.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/release_sha384.pubkey
-+++ /dev/null
-@@ -1,14 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxCHbY+fP3dvaP9XVbmK6
--i4rbqo72INEWgDSYbr/DIYfCSzHC9H8pU8dyjt+Nd8OtoUZtBD1N9fP7SlrvPZSI
--ZSW4k0e9Ky5aV3Uy+ivamSvYszkhqdeP2y7MBu73XHKYONR9PnKa+ovmREwSEI+h
--1e0ebm8zvF7Ndwx0mOeZkDu9SDkDGg4aj2xrJyBBOuGVjuctMZ6l1davANI5xiJ0
--GBEU3tR1gJs1T4vLBis5mEFn9y4kgyw/HrxmRYGnZL4fLb2fTI+pNW0Twu3KWwwi
--LgLkkVrNWiHSk7YWqxjcg5IA3pQETQ17paTHoB5Mnkvuh6MkDXvRG5VgAHZAigr6
--fJMsasOUaBeos/cD1LDQEIObpetlxc0Fiu/lvUts0755otkhI+yv35+wUa6GJrsE
--CsT7c/LaFtQXg06aGXbMLDn0bE/e+nw9KWT/rE1iYXMFkzrqoTeYJ+v7/fD/ywU8
--m8l4CZmXxzd/RogMrM3xl+j4ucAAltDQyL4yLySaIT05w5U8z2zJDEXFvpFDSRfF
--K3kjLwGub7wNwaQDuh/msIUdavu4g+GNikCXAJ8AssLuYatyHoltd2tf+EIIDW3U
--zzLpymnLo3cAz3IPfXyqVB+mcLcpqbHjl3hWms6l1wGtz6S4WqdrWs/KfzS5EyDK
--r63xn1Rg/XFmR57EsFEXAZ8CAwEAAQ==
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-balrog-submitter/requirements.txt b/taskcluster/docker/funsize-balrog-submitter/requirements.txt
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/requirements.txt
-+++ /dev/null
-@@ -1,1 +0,0 @@
--mar==2.1.2
-diff --git a/taskcluster/docker/funsize-balrog-submitter/runme.sh b/taskcluster/docker/funsize-balrog-submitter/runme.sh
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/runme.sh
-+++ /dev/null
-@@ -1,25 +0,0 @@
--#!/bin/bash
--
--set -xe
--
--test $PARENT_TASK_ARTIFACTS_URL_PREFIX
--test $BALROG_API_ROOT
--test $SHA1_SIGNING_CERT
--test $SHA384_SIGNING_CERT
--
--
--ARTIFACTS_DIR="/home/worker/artifacts"
--mkdir -p "$ARTIFACTS_DIR"
--
--curl --location --retry 10 --retry-delay 10 -o "$ARTIFACTS_DIR/manifest.json" \
--    "$PARENT_TASK_ARTIFACTS_URL_PREFIX/manifest.json"
--
--cat "$ARTIFACTS_DIR/manifest.json"
--python /home/worker/bin/funsize-balrog-submitter.py \
--    --artifacts-url-prefix "$PARENT_TASK_ARTIFACTS_URL_PREFIX" \
--    --manifest "$ARTIFACTS_DIR/manifest.json" \
--    -a "$BALROG_API_ROOT" \
--    --sha1-signing-cert "/home/worker/keys/${SHA1_SIGNING_CERT}.pubkey" \
--    --sha384-signing-cert "/home/worker/keys/${SHA384_SIGNING_CERT}.pubkey" \
--    --verbose \
--    $EXTRA_BALROG_SUBMITTER_PARAMS
-diff --git a/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter-complete.py b/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter-complete.py
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter-complete.py
-+++ /dev/null
-@@ -1,66 +0,0 @@
--#!/usr/bin/env python
--from __future__ import absolute_import, print_function
--
--import site
--import os
--import logging
--import argparse
--import json
--
--site.addsitedir("/home/worker/tools/lib/python")
--
--from balrog.submitter.cli import ReleaseSubmitterV4
--from util.retry import retry
--
--log = logging.getLogger(__name__)
--
--
--def main():
--    parser = argparse.ArgumentParser()
--    parser.add_argument("--manifest", required=True)
--    parser.add_argument("-a", "--api-root", required=True,
--                        help="Balrog API root")
--    parser.add_argument("-v", "--verbose", action="store_const",
--                        dest="loglevel", const=logging.DEBUG,
--                        default=logging.INFO)
--    parser.add_argument("--product", help="Override product name from application.ini")
--    args = parser.parse_args()
--    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
--                        level=args.loglevel)
--    logging.getLogger("requests").setLevel(logging.WARNING)
--    logging.getLogger("boto").setLevel(logging.WARNING)
--
--    balrog_username = os.environ.get("BALROG_USERNAME")
--    balrog_password = os.environ.get("BALROG_PASSWORD")
--    suffix = os.environ.get("BALROG_BLOB_SUFFIX")
--    if not balrog_username and not balrog_password:
--        raise RuntimeError("BALROG_USERNAME and BALROG_PASSWORD environment "
--                           "variables should be set")
--    if not suffix:
--        raise RuntimeError("BALROG_BLOB_SUFFIX environment variable should be set")
--
--    manifest = json.load(open(args.manifest))
--    auth = (balrog_username, balrog_password)
--
--    for e in manifest:
--        complete_info = [{
--            "hash": e["hash"],
--            "size": e["size"],
--        }]
--
--        submitter = ReleaseSubmitterV4(api_root=args.api_root, auth=auth,
--                                       suffix=suffix)
--        productName = args.product or e["appName"]
--        retry(lambda: submitter.run(
--            platform=e["platform"], productName=productName,
--            version=e["toVersion"],
--            build_number=e["toBuildNumber"],
--            appVersion=e["version"], extVersion=e["version"],
--            buildID=e["to_buildid"], locale=e["locale"],
--            hashFunction='sha512', completeInfo=complete_info),
--            attempts=30, sleeptime=10, max_sleeptime=60, jitter=3,
--        )
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py b/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/scripts/funsize-balrog-submitter.py
-+++ /dev/null
-@@ -1,227 +0,0 @@
--#!/usr/bin/env python
--import site
--import os
--import logging
--import argparse
--import json
--import hashlib
--import requests
--import tempfile
--from boto.s3.connection import S3Connection
--from mardor.reader import MarReader
--from mardor.signing import get_keysize
--
--site.addsitedir("/home/worker/tools/lib/python")
--
--from balrog.submitter.cli import NightlySubmitterV4, ReleaseSubmitterV4
--from util.retry import retry, retriable
--
--log = logging.getLogger(__name__)
--
--
--def get_hash(content, hash_type="md5"):
--    h = hashlib.new(hash_type)
--    h.update(content)
--    return h.hexdigest()
--
--
--@retriable()
--def download(url, dest, mode=None):
--    log.debug("Downloading %s to %s", url, dest)
--    r = requests.get(url)
--    r.raise_for_status()
--
--    bytes_downloaded = 0
--    with open(dest, 'wb') as fd:
--        for chunk in r.iter_content(4096):
--            fd.write(chunk)
--            bytes_downloaded += len(chunk)
--
--    log.debug('Downloaded %s bytes', bytes_downloaded)
--    if 'content-length' in r.headers:
--        log.debug('Content-Length: %s bytes', r.headers['content-length'])
--        if bytes_downloaded != int(r.headers['content-length']):
--            raise IOError('Unexpected number of bytes downloaded')
--
--    if mode:
--        log.debug("chmod %o %s", mode, dest)
--        os.chmod(dest, mode)
--
--
--def verify_signature(mar, certs):
--    log.info("Checking %s signature", mar)
--    with open(mar, 'rb') as mar_fh:
--        m = MarReader(mar_fh)
--        m.verify(verify_key=certs.get(m.signature_type))
--
--
--def verify_copy_to_s3(bucket_name, aws_access_key_id, aws_secret_access_key,
--                      mar_url, mar_dest, signing_certs):
--    conn = S3Connection(aws_access_key_id, aws_secret_access_key)
--    bucket = conn.get_bucket(bucket_name)
--    _, dest = tempfile.mkstemp()
--    log.info("Downloading %s to %s...", mar_url, dest)
--    download(mar_url, dest)
--    log.info("Verifying the signature...")
--    if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
--        verify_signature(dest, signing_certs)
--    for name in possible_names(mar_dest, 10):
--        log.info("Checking if %s already exists", name)
--        key = bucket.get_key(name)
--        if not key:
--            log.info("Uploading to %s...", name)
--            key = bucket.new_key(name)
--            # There is a chance for race condition here. To avoid it we check
--            # the return value with replace=False. It should be not None.
--            length = key.set_contents_from_filename(dest, replace=False)
--            if length is None:
--                log.warn("Name race condition using %s, trying again...", name)
--                continue
--            else:
--                # key.make_public() may lead to race conditions, because
--                # it doesn't pass version_id, so it may not set permissions
--                bucket.set_canned_acl(acl_str='public-read', key_name=name,
--                                      version_id=key.version_id)
--                # Use explicit version_id to avoid using "latest" version
--                return key.generate_url(expires_in=0, query_auth=False,
--                                        version_id=key.version_id)
--        else:
--            if get_hash(retry(key.get_contents_as_string)) == \
--                    get_hash(open(dest).read()):
--                log.info("%s has the same MD5 checksum, not uploading...",
--                         name)
--                return key.generate_url(expires_in=0, query_auth=False,
--                                        version_id=key.version_id)
--            log.info("%s already exists with different checksum, "
--                     "trying another one...", name)
--
--    raise RuntimeError("Cannot generate a unique name for %s", mar_dest)
--
--
--def possible_names(initial_name, amount):
--    """Generate names appending counter before extension"""
--    prefix, ext = os.path.splitext(initial_name)
--    return [initial_name] + ["{}-{}{}".format(prefix, n, ext) for n in
--                             range(1, amount + 1)]
--
--
--def main():
--    parser = argparse.ArgumentParser()
--    parser.add_argument("--artifacts-url-prefix", required=True,
--                        help="URL prefix for MAR")
--    parser.add_argument("--manifest", required=True)
--    parser.add_argument("-a", "--api-root", required=True,
--                        help="Balrog API root")
--    parser.add_argument("-d", "--dummy", action="store_true",
--                        help="Add '-dummy' suffix to branch name")
--    parser.add_argument("--sha1-signing-cert", required=True)
--    parser.add_argument("--sha384-signing-cert", required=True)
--    parser.add_argument("-v", "--verbose", action="store_const",
--                        dest="loglevel", const=logging.DEBUG,
--                        default=logging.INFO)
--    parser.add_argument("--product", help="Override product name from application.ini")
--    args = parser.parse_args()
--    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
--                        level=args.loglevel)
--    logging.getLogger("requests").setLevel(logging.WARNING)
--    logging.getLogger("boto").setLevel(logging.WARNING)
--
--    balrog_username = os.environ.get("BALROG_USERNAME")
--    balrog_password = os.environ.get("BALROG_PASSWORD")
--    if not balrog_username and not balrog_password:
--        raise RuntimeError("BALROG_USERNAME and BALROG_PASSWORD environment "
--                           "variables should be set")
--    # blob suffix used for releases only
--    suffix = os.environ.get("BALROG_BLOB_SUFFIX")
--
--    s3_bucket = os.environ.get("S3_BUCKET")
--    aws_access_key_id = os.environ.get("AWS_ACCESS_KEY_ID")
--    aws_secret_access_key = os.environ.get("AWS_SECRET_ACCESS_KEY")
--    if not (s3_bucket and aws_access_key_id and aws_secret_access_key):
--        log.warn("Skipping S3 uploads...")
--        uploads_enabled = False
--    else:
--        uploads_enabled = True
--
--    manifest = json.load(open(args.manifest))
--    auth = (balrog_username, balrog_password)
--
--    signing_certs = {
--        'sha1': open(args.sha1_signing_cert, 'rb').read(),
--        'sha384': open(args.sha384_signing_cert, 'rb').read(),
--    }
--
--    assert(get_keysize(signing_certs['sha1']) == 2048)
--    assert(get_keysize(signing_certs['sha384']) == 4096)
--
--    for e in manifest:
--        complete_info = [{
--            "hash": e["to_hash"],
--            "size": e["to_size"],
--        }]
--        partial_info = [{
--            "hash": e["hash"],
--            "size": e["size"],
--        }]
--
--        if "previousVersion" in e and "previousBuildNumber" in e:
--            log.info("Release style balrog submission")
--            partial_info[0]["previousVersion"] = e["previousVersion"]
--            partial_info[0]["previousBuildNumber"] = e["previousBuildNumber"]
--            submitter = ReleaseSubmitterV4(api_root=args.api_root, auth=auth,
--                                           dummy=args.dummy, suffix=suffix)
--            productName = args.product or e["appName"]
--            if suffix:
--                log.warning("Not submitting complete info")
--                complete_info = None
--            retry(lambda: submitter.run(
--                platform=e["platform"], productName=productName,
--                version=e["toVersion"],
--                build_number=e["toBuildNumber"],
--                appVersion=e["version"], extVersion=e["version"],
--                buildID=e["to_buildid"], locale=e["locale"],
--                hashFunction='sha512',
--                partialInfo=partial_info, completeInfo=complete_info),
--                attempts=30, sleeptime=10, max_sleeptime=60, jitter=3,
--            )
--        elif "from_buildid" in e and uploads_enabled:
--            log.info("Nightly style balrog submission")
--            partial_mar_url = "{}/{}".format(args.artifacts_url_prefix,
--                                             e["mar"])
--            complete_mar_url = e["to_mar"]
--            dest_prefix = "{branch}/{buildid}".format(
--                branch=e["branch"], buildid=e["to_buildid"])
--            partial_mar_dest = "{}/{}".format(dest_prefix, e["mar"])
--            complete_mar_filename = "{appName}-{branch}-{version}-" \
--                                    "{platform}-{locale}.complete.mar"
--            complete_mar_filename = complete_mar_filename.format(
--                appName=e["appName"], branch=e["branch"],
--                version=e["version"], platform=e["platform"],
--                locale=e["locale"]
--            )
--            complete_mar_dest = "{}/{}".format(dest_prefix,
--                                               complete_mar_filename)
--            partial_info[0]["url"] = verify_copy_to_s3(
--                s3_bucket, aws_access_key_id, aws_secret_access_key,
--                partial_mar_url, partial_mar_dest, signing_certs)
--            complete_info[0]["url"] = verify_copy_to_s3(
--                s3_bucket, aws_access_key_id, aws_secret_access_key,
--                complete_mar_url, complete_mar_dest, signing_certs)
--            partial_info[0]["from_buildid"] = e["from_buildid"]
--            submitter = NightlySubmitterV4(api_root=args.api_root, auth=auth,
--                                           dummy=args.dummy)
--            productName = args.product or e["appName"]
--            retry(lambda: submitter.run(
--                platform=e["platform"], buildID=e["to_buildid"],
--                productName=productName, branch=e["branch"],
--                appVersion=e["version"], locale=e["locale"],
--                hashFunction='sha512', extVersion=e["version"],
--                partialInfo=partial_info, completeInfo=complete_info),
--                attempts=30, sleeptime=10, max_sleeptime=60, jitter=3,
--            )
--        else:
--            raise RuntimeError("Cannot determine Balrog submission style")
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/docker/funsize-balrog-submitter/submit_complete.sh b/taskcluster/docker/funsize-balrog-submitter/submit_complete.sh
-deleted file mode 100644
---- a/taskcluster/docker/funsize-balrog-submitter/submit_complete.sh
-+++ /dev/null
-@@ -1,23 +0,0 @@
--#!/bin/bash
--
--set -xe
--
--test $PARENT_TASK_ARTIFACTS_URL_PREFIX
--test $BALROG_API_ROOT
--# BALROG_BLOB_SUFFIX is used by the script implicitly to avoid possible CLI
--# issues with suffixes starting with "-"
--test $BALROG_BLOB_SUFFIX
--
--
--ARTIFACTS_DIR="/home/worker/artifacts"
--mkdir -p "$ARTIFACTS_DIR"
--
--curl --location --retry 10 --retry-delay 10 -o "$ARTIFACTS_DIR/manifest.json" \
--    "$PARENT_TASK_ARTIFACTS_URL_PREFIX/manifest.json"
--
--cat "$ARTIFACTS_DIR/manifest.json"
--python /home/worker/bin/funsize-balrog-submitter-complete.py  \
--    --manifest "$ARTIFACTS_DIR/manifest.json" \
--    -a "$BALROG_API_ROOT" \
--    --verbose \
--    $EXTRA_BALROG_SUBMITTER_PARAMS
-diff --git a/taskcluster/docker/funsize-update-generator/Dockerfile b/taskcluster/docker/funsize-update-generator/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/Dockerfile
-+++ /dev/null
-@@ -1,37 +0,0 @@
--FROM ubuntu:vivid
--MAINTAINER Rail Aliiev <rail@mozilla.com>
--
--# Required software
--ENV DEBIAN_FRONTEND noninteractive
--# Chain apt-get commands with apt-get clean in a single docker RUN
--# to make sure that files are removed within a single docker layer
--RUN apt-get update -q && \
--    apt-get install -yyq --no-install-recommends \
--    python python-setuptools python-cryptography  libgetopt-simple-perl \
--    bzip2 clamav clamav-freshclam python-requests python-sh curl \
--    python-dev gcc liblzma-dev xz-utils && \
--    apt-get clean
--RUN useradd -d /home/worker -s /bin/bash -m worker
--COPY requirements.txt /tmp/
--
--# Freshclam may be flaky, retry if it fails
--RUN for i in 1 2 3 4 5; do freshclam --verbose && break || sleep 15; done
--
--# python-pip installs a lot of dependencies increasing the size of an image
--# drastically. Using easy_install saves us almost 200M.
--RUN easy_install pip
--RUN pip install -r /tmp/requirements.txt
--
--# scripts
--RUN mkdir /home/worker/bin
--COPY scripts/* /home/worker/bin/
--COPY runme.sh /runme.sh
--COPY recompress.sh /recompress.sh
--RUN chmod 755 /home/worker/bin/* /*.sh
--RUN mkdir /home/worker/keys
--COPY *.pubkey /home/worker/keys/
--
--ENV           HOME          /home/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
-diff --git a/taskcluster/docker/funsize-update-generator/Makefile b/taskcluster/docker/funsize-update-generator/Makefile
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/Makefile
-+++ /dev/null
-@@ -1,17 +0,0 @@
--DOCKERIO_USERNAME =$(error DOCKERIO_USERNAME should be set)
--IMAGE_NAME = funsize-update-generator
--FULL_IMAGE_NAME = $(DOCKERIO_USERNAME)/$(IMAGE_NAME)
--
--build:
--	docker build -t $(FULL_IMAGE_NAME) --no-cache --rm .
--
--push:
--	docker push $(FULL_IMAGE_NAME):latest
--
--pull:
--	docker pull $(FULL_IMAGE_NAME):latest
--
--update_pubkeys:
--	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/nightly_aurora_level3_primary.der | openssl x509 -inform DER -pubkey -noout > nightly.pubkey
--	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/dep1.der | openssl x509 -inform DER -pubkey -noout > dep.pubkey
--	curl https://hg.mozilla.org/mozilla-central/raw-file/default/toolkit/mozapps/update/updater/release_primary.der | openssl x509 -inform DER -pubkey -noout > release.pubkey
-diff --git a/taskcluster/docker/funsize-update-generator/README b/taskcluster/docker/funsize-update-generator/README
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/README
-+++ /dev/null
-@@ -1,7 +0,0 @@
--
--To run this locally for testing/development purposes:
--
--1. Find a funsize generating task ID
--2. make pull DOCKERIO_USERNAME=mozillareleases
--3. docker run -t -e SHA1_SIGNING_CERT='nightly_sha1' -e SHA384_SIGNING_CERT='nightly_sha384' -e TASK_ID=LD5HUGP5QNeQdFKNTTuyCg mozillareleases/funsize-update-generator /runme.sh
--
-diff --git a/taskcluster/docker/funsize-update-generator/dep.pubkey b/taskcluster/docker/funsize-update-generator/dep.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/dep.pubkey
-+++ /dev/null
-@@ -1,9 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzjHSobdeiQ3JHP/cCIOp
--WaX9y12rL5mIo9OR9bpqEZdD0yXJJJeZA887Mv8slqsM+qObMUpKvfEE6zyYPIZJ
--ANib31neI5BBYHhfhf2f5EnkilSYlmU3Gx+uRsmsdt58PpYe124tOAGgca/8bUy3
--eb6kUUTwvMI0oWQuPkGUaoHVQyj/bBMTrIkyF3UbfFtiX/SfOPvIoabNUe+pQHUe
--pqC2+RxzDGj+shTq/hYhtXlptFzsEEb2+0foLy0MY8C30dP2QqbM2iavvr/P8OcS
--Gm3H0TQcRzIEBzvPcIjiZi1nQj/r/3TlYRNCjuYT/HsNLXrB/U5Tc990jjAUJxdH
--0wIDAQAB
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-update-generator/nightly_sha1.pubkey b/taskcluster/docker/funsize-update-generator/nightly_sha1.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/nightly_sha1.pubkey
-+++ /dev/null
-@@ -1,9 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4j/IS3gWbyVVnWn4ZRnC
--Fuzb6VAaHa0I+4E504ekhVAhbKlSfBstkLbXajdjUVAJpn02zWnOaTl5KAdpDpIp
--SkdA4mK20ej3/Ij7gIt8IwaX+ArXL8mP84pxDn5BgaNADm3206Z6YQzc/TDYu529
--qkDFmLqNUVRJAhPO+qqhKHIcVGh8HUHXN6XV1qOFip+UU0M474jAGgurVmAv8Rh7
--VvM0v5KmB6V6WHwM5gwjg2yRY/o+xYIsNeSes9rpp+MOs/RnUA6LI4WZGY4YahvX
--VclIXBDgbWPYtojexIJkmYj8JIIRsh3eCsrRRe14fq7cBurp3CxBYMlDHf0RUoaq
--hQIDAQAB
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-update-generator/nightly_sha384.pubkey b/taskcluster/docker/funsize-update-generator/nightly_sha384.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/nightly_sha384.pubkey
-+++ /dev/null
-@@ -1,14 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAth151NGY8PBzn0bii9Yc
--AjYHZDwP9Lj1c3owG0zLqW2kPcdp86QTAcoYunHGYFFakNG3tooZhzwkMjZ1OrXc
--ERjD6AuVSGIBdsKtKP4vLtMjDUteFN4K2+rveozcnYFZuTWEajGu8uoYsv4QgdEA
--nTBC39j0J33xlfUR+XKuxzhxNrFX+fRFWuLDJrPziMcVA/mzf0gXlhtEsfV0HYyg
--yWpHdIWww+llysD1QOQAHk94Ss8c/4BFXFxlwlLeNlB1ZqLm1LsNy0jUy9EHeO3C
--H6eqmiFEbpdjlrkJdgR1NcTzeY/Qf/nhWH6BAZrSapQycF7OSLU+rFWMQUElSPLc
--NVl7oNAAfSYLTvRjPGi+mJK3wGFQw1EpwQl+elE1oj4+sHvIVpDrLb6btpxfr1cZ
--pR4Di/hkOIymxEDWvtUhOxUXnYbDKQSDcAHKM/xR3sdIAiVtVuL4hyBwlAqkQc2j
--H+SmnCbazgnq5+dN4y5DjoOgbZQ/koE3s3bUzzMeIxaul9v4gMtGROw3PQ3OZcP0
--lgjPRhY+NeTnWMo2nGb4/eS6Cn2qFLfbEQjsj6pJJBNKfvK/gm1jXb3PgXXdf8+d
--2xTPOX8QNpSK7C0w4vYlvSpYZlsx2cznEOV6LDqP0QHUnmd/k1xWRRGiQ7gtT+BV
--Fn0h7JyTGmEdFu6l4OhS8hMCAwEAAQ==
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-update-generator/recompress.sh b/taskcluster/docker/funsize-update-generator/recompress.sh
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/recompress.sh
-+++ /dev/null
-@@ -1,24 +0,0 @@
--#!/bin/sh
--
--set -xe
--
--test $TASK_ID
--test $SHA1_SIGNING_CERT
--test $SHA384_SIGNING_CERT
--
--ARTIFACTS_DIR="/home/worker/artifacts"
--mkdir -p "$ARTIFACTS_DIR"
--
--curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
--    "https://queue.taskcluster.net/v1/task/$TASK_ID"
--
--if [ ! -z $OUTPUT_FILENAME ]; then
--    EXTRA_PARAMS="--output-filename $OUTPUT_FILENAME $EXTRA_PARAMS"
--fi
--
--/home/worker/bin/recompress.py \
--    --artifacts-dir "$ARTIFACTS_DIR" \
--    --task-definition /home/worker/task.json \
--    --sha1-signing-cert "/home/worker/keys/${SHA1_SIGNING_CERT}.pubkey" \
--    --sha384-signing-cert "/home/worker/keys/${SHA384_SIGNING_CERT}.pubkey" \
--    $EXTRA_PARAMS
-diff --git a/taskcluster/docker/funsize-update-generator/release_sha1.pubkey b/taskcluster/docker/funsize-update-generator/release_sha1.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/release_sha1.pubkey
-+++ /dev/null
-@@ -1,9 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvH4r94FpQ0gvr1hhTfV9
--NUeWPJ5CN6TZRq7v/Dc4nkJ1J4IP1B3UEii34tcNKpy1nKupiZuTT6T1zQYT+z5x
--3UkDF9qQboQ8RNb/BEz/cN3on/LTEnZ7YSraRL11M6cEB8mvmJxddCEquwqccRbs
--Usp8WUB7uRv1w6Anley7N9F/LE1iLPwJasZypRnzWb3aYsJy0cMFOYy+OXVdpktn
--qYqlNIjnt84u4Nil6UXnBbIJNUVOCY8wOFClNvVpubjPkWK1gtdWy3x/hJU5RpAO
--K9cnHxq4M/I4SUWTWO3r7yweQiHG4Jyoc7sP1jkwjBkSG93sDEycfwOdOoZft3wN
--sQIDAQAB
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-update-generator/release_sha384.pubkey b/taskcluster/docker/funsize-update-generator/release_sha384.pubkey
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/release_sha384.pubkey
-+++ /dev/null
-@@ -1,14 +0,0 @@
-------BEGIN PUBLIC KEY-----
--MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxCHbY+fP3dvaP9XVbmK6
--i4rbqo72INEWgDSYbr/DIYfCSzHC9H8pU8dyjt+Nd8OtoUZtBD1N9fP7SlrvPZSI
--ZSW4k0e9Ky5aV3Uy+ivamSvYszkhqdeP2y7MBu73XHKYONR9PnKa+ovmREwSEI+h
--1e0ebm8zvF7Ndwx0mOeZkDu9SDkDGg4aj2xrJyBBOuGVjuctMZ6l1davANI5xiJ0
--GBEU3tR1gJs1T4vLBis5mEFn9y4kgyw/HrxmRYGnZL4fLb2fTI+pNW0Twu3KWwwi
--LgLkkVrNWiHSk7YWqxjcg5IA3pQETQ17paTHoB5Mnkvuh6MkDXvRG5VgAHZAigr6
--fJMsasOUaBeos/cD1LDQEIObpetlxc0Fiu/lvUts0755otkhI+yv35+wUa6GJrsE
--CsT7c/LaFtQXg06aGXbMLDn0bE/e+nw9KWT/rE1iYXMFkzrqoTeYJ+v7/fD/ywU8
--m8l4CZmXxzd/RogMrM3xl+j4ucAAltDQyL4yLySaIT05w5U8z2zJDEXFvpFDSRfF
--K3kjLwGub7wNwaQDuh/msIUdavu4g+GNikCXAJ8AssLuYatyHoltd2tf+EIIDW3U
--zzLpymnLo3cAz3IPfXyqVB+mcLcpqbHjl3hWms6l1wGtz6S4WqdrWs/KfzS5EyDK
--r63xn1Rg/XFmR57EsFEXAZ8CAwEAAQ==
-------END PUBLIC KEY-----
-diff --git a/taskcluster/docker/funsize-update-generator/requirements.txt b/taskcluster/docker/funsize-update-generator/requirements.txt
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/requirements.txt
-+++ /dev/null
-@@ -1,3 +0,0 @@
--mar==2.1.2
--backports.lzma==0.0.8
--redo
-diff --git a/taskcluster/docker/funsize-update-generator/runme.sh b/taskcluster/docker/funsize-update-generator/runme.sh
-deleted file mode 100644
---- a/taskcluster/docker/funsize-update-generator/runme.sh
-+++ /dev/null
-@@ -1,27 +0,0 @@
--#!/bin/sh
--
--set -xe
--
--test $TASK_ID
--test $SHA1_SIGNING_CERT
--test $SHA384_SIGNING_CERT
--
--ARTIFACTS_DIR="/home/worker/artifacts"
--mkdir -p "$ARTIFACTS_DIR"
--
--curl --location --retry 10 --retry-delay 10 -o /home/worker/task.json \
--    "https://queue.taskcluster.net/v1/task/$TASK_ID"
--
--# enable locale cache
--export MBSDIFF_HOOK="/home/worker/bin/mbsdiff_hook.sh -c /tmp/fs-cache"
--
--if [ ! -z $FILENAME_TEMPLATE ]; then
--    EXTRA_PARAMS="--filename-template $FILENAME_TEMPLATE $EXTRA_PARAMS"
--fi
--
--/home/worker/bin/funsize.py \
--    --artifacts-dir "$ARTIFACTS_DIR" \
--    --task-definition /home/worker/task.json \
--    --sha1-signing-cert "/home/worker/keys/${SHA1_SIGNING_CERT}.pubkey" \
--    --sha384-signing-cert "/home/worker/keys/${SHA384_SIGNING_CERT}.pubkey" \
--    $EXTRA_PARAMS
-diff --git a/taskcluster/docker/funsize-update-generator/scripts/funsize.py b/taskcluster/docker/funsize-update-generator/scripts/funsize.py
-deleted file mode 100755
---- a/taskcluster/docker/funsize-update-generator/scripts/funsize.py
-+++ /dev/null
-@@ -1,317 +0,0 @@
--#!/usr/bin/env python
--
--import ConfigParser
--import argparse
--import functools
--import hashlib
--import json
--import logging
--import os
--import shutil
--import tempfile
--import requests
--import sh
--
--import redo
--from mardor.reader import MarReader
--from mardor.signing import get_keysize
--
--
--log = logging.getLogger(__name__)
--ALLOWED_URL_PREFIXES = [
--    "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
--    "http://download.cdn.mozilla.net/pub/firefox/nightly/",
--    "https://mozilla-nightly-updates.s3.amazonaws.com",
--    "https://queue.taskcluster.net/",
--    "http://ftp.mozilla.org/",
--    "http://download.mozilla.org/",
--    "https://archive.mozilla.org/",
--]
--
--DEFAULT_FILENAME_TEMPLATE = "{appName}-{branch}-{version}-{platform}-" \
--                            "{locale}-{from_buildid}-{to_buildid}.partial.mar"
--
--
--def verify_signature(mar, certs):
--    log.info("Checking %s signature", mar)
--    with open(mar, 'rb') as mar_fh:
--        m = MarReader(mar_fh)
--        m.verify(verify_key=certs.get(m.signature_type))
--
--
--def is_lzma_compressed_mar(mar):
--    log.info("Checking %s for lzma compression", mar)
--    result = MarReader(open(mar, 'rb')).compression_type == 'xz'
--    if result:
--        log.info("%s is lzma compressed", mar)
--    else:
--        log.info("%s is not lzma compressed", mar)
--    return result
--
--
--@redo.retriable()
--def download(url, dest, mode=None):
--    log.debug("Downloading %s to %s", url, dest)
--    r = requests.get(url)
--    r.raise_for_status()
--
--    bytes_downloaded = 0
--    with open(dest, 'wb') as fd:
--        for chunk in r.iter_content(4096):
--            fd.write(chunk)
--            bytes_downloaded += len(chunk)
--
--    log.debug('Downloaded %s bytes', bytes_downloaded)
--    if 'content-length' in r.headers:
--        log.debug('Content-Length: %s bytes', r.headers['content-length'])
--        if bytes_downloaded != int(r.headers['content-length']):
--            raise IOError('Unexpected number of bytes downloaded')
--
--    if mode:
--        log.debug("chmod %o %s", mode, dest)
--        os.chmod(dest, mode)
--
--
--def unpack(work_env, mar, dest_dir):
--    os.mkdir(dest_dir)
--    unwrap_cmd = sh.Command(os.path.join(work_env.workdir,
--                                         "unwrap_full_update.pl"))
--    log.debug("Unwrapping %s", mar)
--    env = work_env.env
--    if not is_lzma_compressed_mar(mar):
--        env['MAR_OLD_FORMAT'] = '1'
--    elif 'MAR_OLD_FORMAT' in env:
--        del env['MAR_OLD_FORMAT']
--    out = unwrap_cmd(mar, _cwd=dest_dir, _env=env, _timeout=240,
--                     _err_to_out=True)
--    if out:
--        log.debug(out)
--
--
--def find_file(directory, filename):
--    log.debug("Searching for %s in %s", filename, directory)
--    for root, dirs, files in os.walk(directory):
--        if filename in files:
--            f = os.path.join(root, filename)
--            log.debug("Found %s", f)
--            return f
--
--
--def get_option(directory, filename, section, option):
--    log.debug("Exctracting [%s]: %s from %s/**/%s", section, option, directory,
--              filename)
--    f = find_file(directory, filename)
--    config = ConfigParser.ConfigParser()
--    config.read(f)
--    rv = config.get(section, option)
--    log.debug("Found %s", rv)
--    return rv
--
--
--def generate_partial(work_env, from_dir, to_dir, dest_mar, channel_ids,
--                     version, use_old_format):
--    log.debug("Generating partial %s", dest_mar)
--    env = work_env.env
--    env["MOZ_PRODUCT_VERSION"] = version
--    env["MOZ_CHANNEL_ID"] = channel_ids
--    if use_old_format:
--        env['MAR_OLD_FORMAT'] = '1'
--    elif 'MAR_OLD_FORMAT' in env:
--        del env['MAR_OLD_FORMAT']
--    make_incremental_update = os.path.join(work_env.workdir,
--                                           "make_incremental_update.sh")
--    out = sh.bash(make_incremental_update, dest_mar, from_dir, to_dir,
--                  _cwd=work_env.workdir, _env=env, _timeout=900,
--                  _err_to_out=True)
--    if out:
--        log.debug(out)
--
--
--def get_hash(path, hash_type="sha512"):
--    h = hashlib.new(hash_type)
--    with open(path, "rb") as f:
--        for chunk in iter(functools.partial(f.read, 4096), ''):
--            h.update(chunk)
--    return h.hexdigest()
--
--
--class WorkEnv(object):
--
--    def __init__(self):
--        self.workdir = tempfile.mkdtemp()
--
--    def setup(self):
--        self.download_unwrap()
--        self.download_martools()
--
--    def download_unwrap(self):
--        # unwrap_full_update.pl is not too sensitive to the revision
--        url = "https://hg.mozilla.org/mozilla-central/raw-file/default/" \
--            "tools/update-packaging/unwrap_full_update.pl"
--        download(url, dest=os.path.join(self.workdir, "unwrap_full_update.pl"),
--                 mode=0o755)
--
--    def download_buildsystem_bits(self, repo, revision):
--        prefix = "{repo}/raw-file/{revision}/tools/update-packaging"
--        prefix = prefix.format(repo=repo, revision=revision)
--        for f in ("make_incremental_update.sh", "common.sh"):
--            url = "{prefix}/{f}".format(prefix=prefix, f=f)
--            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
--
--    def download_martools(self):
--        # TODO: check if the tools have to be branch specific
--        prefix = "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/" \
--            "latest-mozilla-central/mar-tools/linux64"
--        for f in ("mar", "mbsdiff"):
--            url = "{prefix}/{f}".format(prefix=prefix, f=f)
--            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
--
--    def cleanup(self):
--        shutil.rmtree(self.workdir)
--
--    @property
--    def env(self):
--        my_env = os.environ.copy()
--        my_env['LC_ALL'] = 'C'
--        my_env['MAR'] = os.path.join(self.workdir, "mar")
--        my_env['MBSDIFF'] = os.path.join(self.workdir, "mbsdiff")
--        return my_env
--
--
--def verify_allowed_url(mar):
--    if not any(mar.startswith(prefix) for prefix in ALLOWED_URL_PREFIXES):
--        raise ValueError("{mar} is not in allowed URL prefixes: {p}".format(
--            mar=mar, p=ALLOWED_URL_PREFIXES
--        ))
--
--
--def main():
--    parser = argparse.ArgumentParser()
--    parser.add_argument("--artifacts-dir", required=True)
--    parser.add_argument("--sha1-signing-cert", required=True)
--    parser.add_argument("--sha384-signing-cert", required=True)
--    parser.add_argument("--task-definition", required=True,
--                        type=argparse.FileType('r'))
--    parser.add_argument("--filename-template",
--                        default=DEFAULT_FILENAME_TEMPLATE)
--    parser.add_argument("--no-freshclam", action="store_true", default=False,
--                        help="Do not refresh ClamAV DB")
--    parser.add_argument("-q", "--quiet", dest="log_level",
--                        action="store_const", const=logging.WARNING,
--                        default=logging.DEBUG)
--    args = parser.parse_args()
--
--    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
--                        level=args.log_level)
--    task = json.load(args.task_definition)
--    # TODO: verify task["extra"]["funsize"]["partials"] with jsonschema
--
--    signing_certs = {
--        'sha1': open(args.sha1_signing_cert, 'rb').read(),
--        'sha384': open(args.sha384_signing_cert, 'rb').read(),
--    }
--
--    assert(get_keysize(signing_certs['sha1']) == 2048)
--    assert(get_keysize(signing_certs['sha384']) == 4096)
--
--    if args.no_freshclam:
--        log.info("Skipping freshclam")
--    else:
--        log.info("Refreshing clamav db...")
--        try:
--            redo.retry(lambda: sh.freshclam("--stdout", "--verbose",
--                                            _timeout=300, _err_to_out=True))
--            log.info("Done.")
--        except sh.ErrorReturnCode:
--            log.warning("Freshclam failed, skipping DB update")
--    manifest = []
--    for e in task["extra"]["funsize"]["partials"]:
--        for mar in (e["from_mar"], e["to_mar"]):
--            verify_allowed_url(mar)
--
--        work_env = WorkEnv()
--        # TODO: run setup once
--        work_env.setup()
--        complete_mars = {}
--        use_old_format = False
--        for mar_type, f in (("from", e["from_mar"]), ("to", e["to_mar"])):
--            dest = os.path.join(work_env.workdir, "{}.mar".format(mar_type))
--            unpack_dir = os.path.join(work_env.workdir, mar_type)
--            download(f, dest)
--            if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
--                verify_signature(dest, signing_certs)
--            complete_mars["%s_size" % mar_type] = os.path.getsize(dest)
--            complete_mars["%s_hash" % mar_type] = get_hash(dest)
--            unpack(work_env, dest, unpack_dir)
--            if mar_type == 'from':
--                version = get_option(unpack_dir, filename="application.ini",
--                                     section="App", option="Version")
--                major = int(version.split(".")[0])
--                # The updater for versions less than 56.0 requires BZ2
--                # compressed MAR files
--                if major < 56:
--                    use_old_format = True
--                    log.info("Forcing BZ2 compression for %s", f)
--            log.info("AV-scanning %s ...", unpack_dir)
--            sh.clamscan("-r", unpack_dir, _timeout=600, _err_to_out=True)
--            log.info("Done.")
--
--        path = os.path.join(work_env.workdir, "to")
--        from_path = os.path.join(work_env.workdir, "from")
--        mar_data = {
--            "ACCEPTED_MAR_CHANNEL_IDS": get_option(
--                path, filename="update-settings.ini", section="Settings",
--                option="ACCEPTED_MAR_CHANNEL_IDS"),
--            "version": get_option(path, filename="application.ini",
--                                  section="App", option="Version"),
--            "to_buildid": get_option(path, filename="application.ini",
--                                     section="App", option="BuildID"),
--            "from_buildid": get_option(from_path, filename="application.ini",
--                                       section="App", option="BuildID"),
--            "appName": get_option(from_path, filename="application.ini",
--                                  section="App", option="Name"),
--            # Use Gecko repo and rev from platform.ini, not application.ini
--            "repo": get_option(path, filename="platform.ini", section="Build",
--                               option="SourceRepository"),
--            "revision": get_option(path, filename="platform.ini",
--                                   section="Build", option="SourceStamp"),
--            "from_mar": e["from_mar"],
--            "to_mar": e["to_mar"],
--            "platform": e["platform"],
--            "locale": e["locale"],
--        }
--        # Override ACCEPTED_MAR_CHANNEL_IDS if needed
--        if "ACCEPTED_MAR_CHANNEL_IDS" in os.environ:
--            mar_data["ACCEPTED_MAR_CHANNEL_IDS"] = os.environ["ACCEPTED_MAR_CHANNEL_IDS"]
--        for field in ("update_number", "previousVersion",
--                      "previousBuildNumber", "toVersion",
--                      "toBuildNumber"):
--            if field in e:
--                mar_data[field] = e[field]
--        mar_data.update(complete_mars)
--        # if branch not set explicitly use repo-name
--        mar_data["branch"] = e.get("branch",
--                                   mar_data["repo"].rstrip("/").split("/")[-1])
--        mar_name = args.filename_template.format(**mar_data)
--        mar_data["mar"] = mar_name
--        dest_mar = os.path.join(work_env.workdir, mar_name)
--        # TODO: download these once
--        work_env.download_buildsystem_bits(repo=mar_data["repo"],
--                                           revision=mar_data["revision"])
--        generate_partial(work_env, from_path, path, dest_mar,
--                         mar_data["ACCEPTED_MAR_CHANNEL_IDS"],
--                         mar_data["version"],
--                         use_old_format)
--        mar_data["size"] = os.path.getsize(dest_mar)
--        mar_data["hash"] = get_hash(dest_mar)
--
--        shutil.copy(dest_mar, args.artifacts_dir)
--        work_env.cleanup()
--        manifest.append(mar_data)
--    manifest_file = os.path.join(args.artifacts_dir, "manifest.json")
--    with open(manifest_file, "w") as fp:
--        json.dump(manifest, fp, indent=2, sort_keys=True)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh b/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
-deleted file mode 100755
---- a/taskcluster/docker/funsize-update-generator/scripts/mbsdiff_hook.sh
-+++ /dev/null
-@@ -1,135 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--#
--# This tool contains functions that are to be used to handle/enable funsize
--# Author: Mihai Tabara
--#
--
--HOOK=
--SERVER_URL=
--LOCAL_CACHE_DIR=
--
--getsha512(){
--    echo "$(openssl sha512 "${1}" | awk '{print $2}')"
--}
--
--print_usage(){
--    echo "$(basename $0) -A SERVER-URL [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH"
--    echo "Script that saves/retrieves from cache presumptive patches as args"
--    echo ""
--    echo "-A SERVER-URL - host where to send the files"
--    echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached"
--    echo "-g pre hook - tests whether patch already in cache"
--    echo "-u post hook - upload patch to cache for future use"
--    echo ""
--    echo "PATH-FROM-URL     : path on disk for source file"
--    echo "PATH-TO-URL       : path on disk for destination file"
--    echo "PATH-PATCH        : path on disk for patch between source and destination"
--}
--
--upload_patch(){
--    sha_from=`getsha512 "$1"`
--    sha_to=`getsha512 "$2"`
--    patch_path="$3"
--
--    # save to local cache first
--    if [ -n "$LOCAL_CACHE_DIR" ]; then
--        local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from""
--        if `$local_cmd` >&2; then
--            cp -avf "$patch_path" "$LOCAL_CACHE_DIR/$sha_from/$sha_to"
--            echo "$patch_path saved on local cache!"
--        fi
--    fi
--    # The remote cache implementation is not used. The code is for usage
--    # reference only.
--     return 0
--
--    # send it over to funsize
--    cmd="curl -sSw %{http_code} -o /dev/null -X POST $SERVER_URL -F sha_from="$sha_from" -F sha_to="$sha_to" -F patch_file="@$patch_path""
--    ret_code=`$cmd`
--
--    if [ $ret_code -eq 200 ]; then
--        echo "$patch_path Successful uploaded to funsize!"
--        return 0
--    fi
--
--    echo "$patch_path Failed to be uploaded to funsize!"
--    return 1
--}
--
--get_patch(){
--    sha_from=`getsha512 "$1"`
--    sha_to=`getsha512 "$2"`
--    destination_file="$3"
--    tmp_file="$destination_file.tmp"
--
--    # try to retrieve from local cache first
--    if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then
--        cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file"
--        echo "Successful retrieved $destination_file from local cache!"
--        return 0
--    else
--        echo "File is not in the locale cache"
--        return 1
--    fi
--    # The remote cache implementation is not used. The code is for usage
--    # reference only.
--
--    # if unsuccessful, try to retrieve from funsize
--    cmd="curl -LsSGw %{http_code} $SERVER_URL/$sha_from/$sha_to -o $tmp_file"
--    ret_code=`$cmd`
--
--    if [ $ret_code -eq 200 ]; then
--        mv "$tmp_file" "$destination_file"
--        echo "Successful retrieved $destination_file from funsize!"
--        return 0
--    fi
--
--    rm  -f "$tmp_file"
--    echo "Failed to retrieve $destination_file from funsize!"
--    return 1
--}
--
--OPTIND=1
--
--while getopts ":A:c:gu" option; do
--    case $option in
--        A)
--            SERVER_URL="$OPTARG"
--            ;;
--        c)
--            LOCAL_CACHE_DIR="$OPTARG"
--            ;;
--        g)
--            HOOK="PRE"
--            ;;
--        u)
--            HOOK="POST"
--            ;;
--        \?)
--            echo "Invalid option: -$OPTARG" >&2
--            print_usage
--            exit 1
--            ;;
--        :)
--            echo "Option -$OPTARG requires an argument." >&2
--            print_usage
--            exit 1
--            ;;
--        *)
--            echo "Unimplemented option: -$OPTARG" >&2
--            print_usage
--            exit 1
--            ;;
--    esac
--done
--shift $((OPTIND-1))
--
--if [ "$HOOK" == "PRE" ]; then
--    get_patch "$1" "$2" "$3"
--elif [ "$HOOK" == "POST" ]; then
--    upload_patch "$1" "$2" "$3"
--fi
-diff --git a/taskcluster/docker/funsize-update-generator/scripts/recompress.py b/taskcluster/docker/funsize-update-generator/scripts/recompress.py
-deleted file mode 100755
---- a/taskcluster/docker/funsize-update-generator/scripts/recompress.py
-+++ /dev/null
-@@ -1,239 +0,0 @@
--#!/usr/bin/env python
--from __future__ import absolute_import, print_function
--
--import ConfigParser
--import argparse
--import functools
--import hashlib
--import json
--import logging
--import os
--import shutil
--import tempfile
--import requests
--import sh
--
--import redo
--from mardor.reader import MarReader
--from mardor.signing import get_keysize
--
--
--log = logging.getLogger(__name__)
--ALLOWED_URL_PREFIXES = [
--    "http://download.cdn.mozilla.net/pub/mozilla.org/firefox/nightly/",
--    "http://download.cdn.mozilla.net/pub/firefox/nightly/",
--    "https://mozilla-nightly-updates.s3.amazonaws.com",
--    "https://queue.taskcluster.net/",
--    "http://ftp.mozilla.org/",
--    "http://download.mozilla.org/",
--    "https://archive.mozilla.org/",
--]
--
--
--def find_file(directory, filename):
--    log.debug("Searching for %s in %s", filename, directory)
--    for root, dirs, files in os.walk(directory):
--        if filename in files:
--            f = os.path.join(root, filename)
--            log.debug("Found %s", f)
--            return f
--
--
--def get_option(directory, filename, section, option):
--    log.debug("Exctracting [%s]: %s from %s/**/%s", section, option, directory,
--              filename)
--    f = find_file(directory, filename)
--    config = ConfigParser.ConfigParser()
--    config.read(f)
--    rv = config.get(section, option)
--    log.debug("Found %s", rv)
--    return rv
--
--
--def verify_signature(mar, certs):
--    log.info("Checking %s signature", mar)
--    with open(mar, 'rb') as mar_fh:
--        m = MarReader(mar_fh)
--        m.verify(verify_key=certs.get(m.signature_type))
--
--
--def is_lzma_compressed_mar(mar):
--    log.info("Checking %s for lzma compression", mar)
--    result = MarReader(open(mar, 'rb')).compression_type == 'xz'
--    if result:
--        log.info("%s is lzma compressed", mar)
--    else:
--        log.info("%s is not lzma compressed", mar)
--    return result
--
--
--@redo.retriable()
--def download(url, dest, mode=None):
--    log.debug("Downloading %s to %s", url, dest)
--    r = requests.get(url)
--    r.raise_for_status()
--
--    bytes_downloaded = 0
--    with open(dest, 'wb') as fd:
--        for chunk in r.iter_content(4096):
--            fd.write(chunk)
--            bytes_downloaded += len(chunk)
--
--    log.debug('Downloaded %s bytes', bytes_downloaded)
--    if 'content-length' in r.headers:
--        log.debug('Content-Length: %s bytes', r.headers['content-length'])
--        if bytes_downloaded != int(r.headers['content-length']):
--            raise IOError('Unexpected number of bytes downloaded')
--
--    if mode:
--        log.debug("chmod %o %s", mode, dest)
--        os.chmod(dest, mode)
--
--
--def change_mar_compression(work_env, file_path):
--    """Toggles MAR compression format between BZ2 and LZMA"""
--    cmd = sh.Command(os.path.join(work_env.workdir,
--                                  "change_mar_compression.pl"))
--    log.debug("Changing MAR compression of %s", file_path)
--    out = cmd("-r", file_path, _env=work_env.env, _timeout=240,
--              _err_to_out=True)
--    if out:
--        log.debug(out)
--
--
--def unpack(work_env, mar, dest_dir):
--    os.mkdir(dest_dir)
--    unwrap_cmd = sh.Command(os.path.join(work_env.workdir,
--                                         "unwrap_full_update.pl"))
--    log.debug("Unwrapping %s", mar)
--    env = work_env.env
--    if not is_lzma_compressed_mar(mar):
--        env['MAR_OLD_FORMAT'] = '1'
--    elif 'MAR_OLD_FORMAT' in env:
--        del env['MAR_OLD_FORMAT']
--    out = unwrap_cmd(mar, _cwd=dest_dir, _env=env, _timeout=240,
--                     _err_to_out=True)
--    if out:
--        log.debug(out)
--
--
--def get_hash(path, hash_type="sha512"):
--    h = hashlib.new(hash_type)
--    with open(path, "rb") as f:
--        for chunk in iter(functools.partial(f.read, 4096), ''):
--            h.update(chunk)
--    return h.hexdigest()
--
--
--class WorkEnv(object):
--
--    def __init__(self):
--        self.workdir = tempfile.mkdtemp()
--
--    def setup(self):
--        self.download_scripts()
--        self.download_martools()
--
--    def download_scripts(self):
--        # unwrap_full_update.pl is not too sensitive to the revision
--        prefix = "https://hg.mozilla.org/mozilla-central/raw-file/default/" \
--            "tools/update-packaging"
--        for f in ("unwrap_full_update.pl", "change_mar_compression.pl"):
--            url = "{prefix}/{f}".format(prefix=prefix, f=f)
--            download(url, dest=os.path.join(self.workdir, f), mode=0o755)
--
--    def download_martools(self):
--        url = "https://ftp.mozilla.org/pub/mozilla.org/firefox/nightly/" \
--            "latest-mozilla-central/mar-tools/linux64/mar"
--        download(url, dest=os.path.join(self.workdir, "mar"), mode=0o755)
--
--    def cleanup(self):
--        shutil.rmtree(self.workdir)
--
--    @property
--    def env(self):
--        my_env = os.environ.copy()
--        my_env['LC_ALL'] = 'C'
--        my_env['MAR'] = os.path.join(self.workdir, "mar")
--        return my_env
--
--
--def verify_allowed_url(mar):
--    if not any(mar.startswith(prefix) for prefix in ALLOWED_URL_PREFIXES):
--        raise ValueError("{mar} is not in allowed URL prefixes: {p}".format(
--            mar=mar, p=ALLOWED_URL_PREFIXES
--        ))
--
--
--def main():
--    parser = argparse.ArgumentParser()
--    parser.add_argument("--artifacts-dir", required=True)
--    parser.add_argument("--sha1-signing-cert", required=True)
--    parser.add_argument("--sha384-signing-cert", required=True)
--    parser.add_argument("--task-definition", required=True,
--                        type=argparse.FileType('r'))
--    parser.add_argument("--output-filename", required=True)
--    parser.add_argument("-q", "--quiet", dest="log_level",
--                        action="store_const", const=logging.WARNING,
--                        default=logging.DEBUG)
--    args = parser.parse_args()
--
--    logging.basicConfig(format="%(asctime)s - %(levelname)s - %(message)s",
--                        level=args.log_level)
--    task = json.load(args.task_definition)
--
--    signing_certs = {
--        'sha1': open(args.sha1_signing_cert, 'rb').read(),
--        'sha384': open(args.sha384_signing_cert, 'rb').read(),
--    }
--
--    assert(get_keysize(signing_certs['sha1']) == 2048)
--    assert(get_keysize(signing_certs['sha384']) == 4096)
--
--    manifest = []
--    for e in task["extra"]["funsize"]["completes"]:
--        to_mar = e["to_mar"]
--        locale = e["locale"]
--        output_filename = args.output_filename.format(locale=locale)
--        verify_allowed_url(to_mar)
--
--        work_env = WorkEnv()
--        work_env.setup()
--        complete_mars = {}
--        dest = os.path.join(work_env.workdir, "to.mar")
--        unpack_dir = os.path.join(work_env.workdir, "to")
--        download(to_mar, dest)
--        if not os.getenv("MOZ_DISABLE_MAR_CERT_VERIFICATION"):
--            verify_signature(dest, signing_certs)
--        # Changing the compression strips the signature
--        change_mar_compression(work_env, dest)
--        complete_mars["hash"] = get_hash(dest)
--        unpack(work_env, dest, unpack_dir)
--        log.info("AV-scanning %s ...", unpack_dir)
--        sh.clamscan("-r", unpack_dir, _timeout=600, _err_to_out=True)
--        log.info("Done.")
--
--        mar_data = {
--            "file_to_sign": output_filename,
--            "hash": get_hash(dest),
--            "appName": get_option(unpack_dir, filename="application.ini",
--                                  section="App", option="Name"),
--            "version": get_option(unpack_dir, filename="application.ini",
--                                  section="App", option="Version"),
--            "to_buildid": get_option(unpack_dir, filename="application.ini",
--                                     section="App", option="BuildID"),
--            "toVersion": e["toVersion"],
--            "toBuildNumber": e["toBuildNumber"],
--            "platform": e["platform"],
--            "locale": e["locale"],
--        }
--        shutil.copy(dest, os.path.join(args.artifacts_dir, output_filename))
--        work_env.cleanup()
--        manifest.append(mar_data)
--    manifest_file = os.path.join(args.artifacts_dir, "manifest.json")
--    with open(manifest_file, "w") as fp:
--        json.dump(manifest, fp, indent=2, sort_keys=True)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/docker/image_builder/Dockerfile b/taskcluster/docker/image_builder/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/image_builder/Dockerfile
-+++ /dev/null
-@@ -1,41 +0,0 @@
--FROM ubuntu:16.04
--
--# %include python/mozbuild/mozbuild/action/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /setup/tooltool.py
--
--# %include taskcluster/docker/recipes/common.sh
--ADD topsrcdir/taskcluster/docker/recipes/common.sh /setup/common.sh
--
--# %include taskcluster/docker/recipes/install-mercurial.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /setup/install-mercurial.sh
--
--# %include testing/mozharness/external_tools/robustcheckout.py
--ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
--
--# %include taskcluster/docker/recipes/run-task
--ADD topsrcdir/taskcluster/docker/recipes/run-task /usr/local/bin/run-task
--
--# Add and run setup script
--ADD build-image.sh      /usr/local/bin/build-image.sh
--ADD download-and-compress /usr/local/bin/download-and-compress
--ADD setup.sh            /setup/setup.sh
--RUN bash /setup/setup.sh
--
--# Setup a workspace that won't use AUFS
--VOLUME /home/worker/workspace
--
--# Set variable normally configured at login, by the shells parent process, these
--# are taken from GNU su manual
--ENV           HOME          /home/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
--ENV           HOSTNAME      taskcluster-worker
--ENV           LC_ALL        C
--
--# Create worker user
--RUN useradd -d /home/worker -s /bin/bash -m worker
--
--# Set some sane defaults
--WORKDIR /home/worker/
--CMD     build-image.sh
-diff --git a/taskcluster/docker/image_builder/HASH b/taskcluster/docker/image_builder/HASH
-deleted file mode 100644
---- a/taskcluster/docker/image_builder/HASH
-+++ /dev/null
-@@ -1,1 +0,0 @@
--sha256:ceaaf92511cfbff711598005585127953873332c62f245dcf1892510c4eb371f
-diff --git a/taskcluster/docker/image_builder/REGISTRY b/taskcluster/docker/image_builder/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/image_builder/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--taskcluster
-diff --git a/taskcluster/docker/image_builder/VERSION b/taskcluster/docker/image_builder/VERSION
-deleted file mode 100644
---- a/taskcluster/docker/image_builder/VERSION
-+++ /dev/null
-@@ -1,1 +0,0 @@
--1.3.0
-diff --git a/taskcluster/docker/image_builder/build-image.sh b/taskcluster/docker/image_builder/build-image.sh
-deleted file mode 100755
---- a/taskcluster/docker/image_builder/build-image.sh
-+++ /dev/null
-@@ -1,68 +0,0 @@
--#!/bin/bash -vex
--
--# Set bash options to exit immediately if a pipeline exists non-zero, expand
--# print a trace of commands, and make output verbose (print shell input as it's
--# read)
--# See https://www.gnu.org/software/bash/manual/html_node/The-Set-Builtin.html
--set -x -e -v -o pipefail
--
--# Prefix errors with taskcluster error prefix so that they are parsed by Treeherder
--raise_error() {
--  echo
--  echo "[taskcluster-image-build:error] $1"
--  exit 1
--}
--
--# Ensure that the PROJECT is specified so the image can be indexed
--test -n "$PROJECT"    || raise_error "PROJECT must be provided."
--test -n "$HASH"       || raise_error "Context HASH must be provided."
--test -n "$IMAGE_NAME" || raise_error "IMAGE_NAME must be provided."
--
--# Create artifact folder
--mkdir -p /home/worker/workspace/artifacts
--
--# Construct a CONTEXT_FILE
--CONTEXT_FILE=/home/worker/workspace/context.tar
--
--# Run ./mach taskcluster-build-image with --context-only to build context
--run-task \
--  --chown-recursive "/home/worker/workspace" \
--  --vcs-checkout "/home/worker/checkouts/gecko" \
--  -- \
--  /home/worker/checkouts/gecko/mach taskcluster-build-image \
--  --context-only "$CONTEXT_FILE" \
--  "$IMAGE_NAME"
--test -f "$CONTEXT_FILE" || raise_error "Context file wasn't created"
--
--# Post context tar-ball to docker daemon
--# This interacts directly with the docker remote API, see:
--# https://docs.docker.com/engine/reference/api/docker_remote_api_v1.18/
--curl -s --fail \
--  -X POST \
--  --header 'Content-Type: application/tar' \
--  --data-binary "@$CONTEXT_FILE" \
--  --unix-socket /var/run/docker.sock "http:/build?t=$IMAGE_NAME:$HASH" \
--  | tee /tmp/docker-build.log \
--  | jq -jr '(.status + .progress, .error | select(. != null) + "\n"), .stream | select(. != null)'
--
--# Exit non-zero if there is error entries in the log
--if cat /tmp/docker-build.log | jq -se 'add | .error' > /dev/null; then
--  raise_error "Image build failed: `cat /tmp/docker-build.log | jq -rse 'add | .error'`";
--fi
--
--# Sanity check that image was built successfully
--if ! cat /tmp/docker-build.log | tail -n 1 | jq -r '.stream' | grep '^Successfully built' > /dev/null; then
--  echo 'docker-build.log for debugging:';
--  cat /tmp/docker-build.log | tail -n 50;
--  raise_error "Image build log didn't with 'Successfully built'";
--fi
--
--# Get image from docker daemon (try up to 10 times)
--# This interacts directly with the docker remote API, see:
--# https://docs.docker.com/engine/reference/api/docker_remote_api_v1.18/
--#
--# The script will retry up to 10 times.
--/usr/local/bin/download-and-compress \
--    http+unix://%2Fvar%2Frun%2Fdocker.sock/images/${IMAGE_NAME}:${HASH}/get \
--    /home/worker/workspace/image.tar.zst.tmp \
--    /home/worker/workspace/artifacts/image.tar.zst
-diff --git a/taskcluster/docker/image_builder/download-and-compress b/taskcluster/docker/image_builder/download-and-compress
-deleted file mode 100755
---- a/taskcluster/docker/image_builder/download-and-compress
-+++ /dev/null
-@@ -1,85 +0,0 @@
--#!/usr/bin/python2.7
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import os
--import sys
--import time
--
--import requests
--import requests_unixsocket
--import zstd
--
--# Allow requests to fetch from UNIX domain sockets.
--requests_unixsocket.monkeypatch()
--
--
--def download_and_compress(url, path, level):
--    r = requests.get(url, stream=True)
--
--    if r.status_code != 200:
--        raise Exception('non-200 response: %d' % r.status_code)
--
--    in_size = 0
--    out_size = 0
--    last_progress = time.time()
--
--    # Use all available CPU cores for multi-threaded compression.
--    cctx = zstd.ZstdCompressor(threads=-1, level=level, write_checksum=True)
--    cobj = cctx.compressobj()
--    with open(path, 'wb') as fh:
--        for raw in r.iter_content(zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE):
--            # Print output periodically, for humans.
--            now = time.time()
--            if now - last_progress > 5.0:
--                print('%d -> %d' % (in_size, out_size))
--                last_progress = now
--
--            in_size += len(raw)
--            chunk = cobj.compress(raw)
--            if not chunk:
--                continue
--
--            out_size += len(chunk)
--            fh.write(chunk)
--
--        chunk = cobj.flush()
--        out_size += len(chunk)
--        fh.write(chunk)
--
--    return in_size, out_size
--
--
--if __name__ == '__main__':
--    url, temp_path, final_path = sys.argv[1:]
--
--    # Default zstd level is 3. We default to 10 because multi-threaded
--    # compression allows us to burn lots of CPU for significant image
--    # size reductions without a major wall time penalty.
--    level = int(os.environ.get('DOCKER_IMAGE_ZSTD_LEVEL', '10'))
--    print('using zstandard compression level %d' % level)
--
--    count = 0
--    while count < 10:
--        count += 1
--
--        try:
--            t_start = time.time()
--            raw_size, compress_size = download_and_compress(url, temp_path,
--                                                            level)
--            elapsed = time.time() - t_start
--            # Move to final path at end so partial image isn't uploaded as
--            # an artifact.
--            os.rename(temp_path, final_path)
--            speed = int(raw_size / elapsed) / 1000000
--            print('compression ratio: %.2f (%d -> %d) @ %d MB/s' % (
--                float(compress_size) / float(raw_size),
--                raw_size, compress_size, speed))
--            sys.exit(0)
--        except Exception as e:
--            print('exception: %s' % e)
--            time.sleep(5)
--
--    print('reached maximum retry attempts; giving up')
--    sys.exit(1)
-diff --git a/taskcluster/docker/image_builder/setup.sh b/taskcluster/docker/image_builder/setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/image_builder/setup.sh
-+++ /dev/null
-@@ -1,61 +0,0 @@
--#!/bin/bash -vex
--set -v -e -x
--
--export DEBIAN_FRONTEND=noninteractive
--
--# Update apt-get lists
--apt-get update -y
--
--# Install dependencies
--apt-get install -y --no-install-recommends \
--    curl \
--    tar \
--    jq \
--    python \
--    python-requests \
--    python-requests-unixsocket
--
--# Extra dependencies only needed for image building. Will be removed at
--# end of script.
--apt-get install -y python-pip
--
--# Install mercurial
--. /setup/common.sh
--. /setup/install-mercurial.sh
--
--# Install build-image.sh script
--chmod +x /usr/local/bin/build-image.sh
--chmod +x /usr/local/bin/run-task
--chmod +x /usr/local/bin/download-and-compress
--
--# Create workspace
--mkdir -p /home/worker/workspace
--
--# Install python-zstandard.
--cd /setup
--tooltool_fetch <<EOF
--[
--  {
--    "size": 463794,
--    "visibility": "public",
--    "digest": "c6ba906403e5c18b374faf9f676b10f0988b9f4067bd6c52c548d7dee58fac79974babfd5c438aef8da0a5260158116db69b11f2a52a775772d9904b9d86fdbc",
--    "algorithm": "sha512",
--    "filename": "zstandard-0.8.0.tar.gz"
--  }
--]
--EOF
--cd -
--
--/usr/bin/pip -v install /setup/zstandard-0.8.0.tar.gz
--
--# python-pip only needed to install python-zstandard. Removing it removes
--# several hundred MB of dependencies from the image.
--apt-get purge -y python-pip
--
--# Purge apt-get caches to minimize image size
--apt-get auto-remove -y
--apt-get clean -y
--rm -rf /var/lib/apt/lists/
--
--# Remove this script
--rm -rf /setup/
-diff --git a/taskcluster/docker/index-task/.eslintrc.js b/taskcluster/docker/index-task/.eslintrc.js
-deleted file mode 100644
---- a/taskcluster/docker/index-task/.eslintrc.js
-+++ /dev/null
-@@ -1,7 +0,0 @@
--"use strict";
--
--module.exports = {
--  "env": {
--    "node": true
--  }
--};
-diff --git a/taskcluster/docker/index-task/Dockerfile b/taskcluster/docker/index-task/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/index-task/Dockerfile
-+++ /dev/null
-@@ -1,11 +0,0 @@
--FROM node:6-alpine
--
--ENV       NODE_ENV        production
--RUN       mkdir /app
--ADD       insert-indexes.js   /app/
--ADD       package.json        /app/
--ADD       npm-shrinkwrap.json /app/
--WORKDIR   /app
--RUN       npm install && npm cache clean
--
--ENTRYPOINT ["node"]
-diff --git a/taskcluster/docker/index-task/README b/taskcluster/docker/index-task/README
-deleted file mode 100644
---- a/taskcluster/docker/index-task/README
-+++ /dev/null
-@@ -1,36 +0,0 @@
--Index-Image
--===========
--
--This image is designed to be used for indexing other tasks. It takes a task
--definition as follows:
--```js
--{
--  ...,
--  scopes: [
--    'index:insert-task:my-index.namespace',
--    'index:insert-task:...',
--  ],
--  payload: {
--    image: '...',
--    env: {
--      TARGET_TASKID: '<taskId-to-be-indexed>',
--    },
--    command: [
--      'insert-indexes.js',
--      'my-index.namespace.one',
--      'my-index.namespace.two',
--      '....',
--    ],
--    features: {
--      taskclusterProxy: true,
--    },
--    maxRunTime: 600,
--  },
--}
--```
--
--As can be seen the `taskId` to be indexed is given by the environment variable
--`TARGET_TASKID` and the `command` arguments specifies namespaces that it must
--be index under. It is **important** to also include scopes on the form
--`index:insert-task:<...>` for all namespaces `<...>` given as `command`
--arguments.
-diff --git a/taskcluster/docker/index-task/insert-indexes.js b/taskcluster/docker/index-task/insert-indexes.js
-deleted file mode 100644
---- a/taskcluster/docker/index-task/insert-indexes.js
-+++ /dev/null
-@@ -1,47 +0,0 @@
--let taskcluster = require("taskcluster-client");
--
--// Create instance of index client
--let index = new taskcluster.Index({
--  delayFactor:    750,  // Good solid delay for background process
--  retries:        8,    // A few extra retries for robustness
--  baseUrl:        "taskcluster/index/v1",
--});
--
--// Create queue instance for fetching taskId
--let queue = new taskcluster.Queue({
--    delayFactor:    750,  // Good solid delay for background process
--    retries:        8,    // A few extra retries for robustness
--});
--
--// Load input
--let taskId = process.env.TARGET_TASKID;
--let namespaces = process.argv.slice(2);
--
--// Validate input
--if (!taskId) {
--  console.log("Expected target task as environment variable: TARGET_TASKID");
--  process.exit(1);
--}
--
--// Fetch task definition to get expiration and then insert into index
--queue.task(taskId).then(task => task.expires).then(expires => {
--  return Promise.all(namespaces.map(namespace => {
--    console.log("Inserting %s into index under: %s", taskId, namespace);
--    return index.insertTask(namespace, {
--      taskId,
--      rank: 0,
--      data: {},
--      expires,
--    });
--  }));
--}).then(() => {
--  console.log("indexing successfully completed.");
--  process.exit(0);
--}).catch(err => {
--  console.log("Error:\n%s", err);
--  if (err.stack) {
--    console.log("Stack:\n%s", err.stack);
--  }
--  console.log("Properties:\n%j", err);
--  throw err;
--}).catch(() => process.exit(1));
-diff --git a/taskcluster/docker/index-task/npm-shrinkwrap.json b/taskcluster/docker/index-task/npm-shrinkwrap.json
-deleted file mode 100644
---- a/taskcluster/docker/index-task/npm-shrinkwrap.json
-+++ /dev/null
-@@ -1,309 +0,0 @@
--{
--  "dependencies": {
--    "amqplib": {
--      "version": "0.5.1",
--      "from": "amqplib@>=0.5.1 <0.6.0",
--      "resolved": "https://registry.npmjs.org/amqplib/-/amqplib-0.5.1.tgz"
--    },
--    "asap": {
--      "version": "1.0.0",
--      "from": "asap@>=1.0.0 <1.1.0",
--      "resolved": "https://registry.npmjs.org/asap/-/asap-1.0.0.tgz"
--    },
--    "async": {
--      "version": "0.9.2",
--      "from": "async@>=0.9.0 <0.10.0",
--      "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz"
--    },
--    "bitsyntax": {
--      "version": "0.0.4",
--      "from": "bitsyntax@>=0.0.4 <0.1.0",
--      "resolved": "https://registry.npmjs.org/bitsyntax/-/bitsyntax-0.0.4.tgz"
--    },
--    "bluebird": {
--      "version": "3.4.7",
--      "from": "bluebird@>=3.4.6 <4.0.0",
--      "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz"
--    },
--    "boom": {
--      "version": "2.10.1",
--      "from": "boom@>=2.0.0 <3.0.0",
--      "resolved": "https://registry.npmjs.org/boom/-/boom-2.10.1.tgz"
--    },
--    "buffer-more-ints": {
--      "version": "0.0.2",
--      "from": "buffer-more-ints@0.0.2",
--      "resolved": "https://registry.npmjs.org/buffer-more-ints/-/buffer-more-ints-0.0.2.tgz"
--    },
--    "combined-stream": {
--      "version": "0.0.7",
--      "from": "combined-stream@>=0.0.4 <0.1.0",
--      "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-0.0.7.tgz"
--    },
--    "component-emitter": {
--      "version": "1.2.1",
--      "from": "component-emitter@>=1.2.0 <1.3.0",
--      "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.2.1.tgz"
--    },
--    "cookiejar": {
--      "version": "2.0.6",
--      "from": "cookiejar@2.0.6",
--      "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.0.6.tgz"
--    },
--    "core-util-is": {
--      "version": "1.0.2",
--      "from": "core-util-is@>=1.0.0 <1.1.0",
--      "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz"
--    },
--    "cryptiles": {
--      "version": "2.0.5",
--      "from": "cryptiles@>=2.0.0 <3.0.0",
--      "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-2.0.5.tgz"
--    },
--    "debug": {
--      "version": "2.6.0",
--      "from": "debug@>=2.1.3 <3.0.0",
--      "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.0.tgz"
--    },
--    "delayed-stream": {
--      "version": "0.0.5",
--      "from": "delayed-stream@0.0.5",
--      "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-0.0.5.tgz"
--    },
--    "eventsource": {
--      "version": "0.1.6",
--      "from": "eventsource@0.1.6",
--      "resolved": "https://registry.npmjs.org/eventsource/-/eventsource-0.1.6.tgz",
--      "optional": true
--    },
--    "extend": {
--      "version": "3.0.0",
--      "from": "extend@3.0.0",
--      "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.0.tgz"
--    },
--    "faye-websocket": {
--      "version": "0.11.1",
--      "from": "faye-websocket@>=0.11.0 <0.12.0",
--      "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.1.tgz",
--      "optional": true
--    },
--    "form-data": {
--      "version": "0.2.0",
--      "from": "form-data@0.2.0",
--      "resolved": "https://registry.npmjs.org/form-data/-/form-data-0.2.0.tgz"
--    },
--    "formidable": {
--      "version": "1.0.17",
--      "from": "formidable@>=1.0.14 <1.1.0",
--      "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.0.17.tgz"
--    },
--    "hawk": {
--      "version": "2.3.1",
--      "from": "hawk@>=2.3.1 <3.0.0",
--      "resolved": "https://registry.npmjs.org/hawk/-/hawk-2.3.1.tgz"
--    },
--    "hoek": {
--      "version": "2.16.3",
--      "from": "hoek@>=2.0.0 <3.0.0",
--      "resolved": "https://registry.npmjs.org/hoek/-/hoek-2.16.3.tgz"
--    },
--    "inherits": {
--      "version": "2.0.3",
--      "from": "inherits@>=2.0.1 <2.1.0",
--      "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
--    },
--    "isarray": {
--      "version": "0.0.1",
--      "from": "isarray@0.0.1",
--      "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz"
--    },
--    "json3": {
--      "version": "3.3.2",
--      "from": "json3@>=3.3.2 <4.0.0",
--      "resolved": "https://registry.npmjs.org/json3/-/json3-3.3.2.tgz",
--      "optional": true
--    },
--    "lodash": {
--      "version": "3.10.1",
--      "from": "lodash@>=3.6.0 <4.0.0",
--      "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz"
--    },
--    "methods": {
--      "version": "1.1.2",
--      "from": "methods@>=1.1.1 <1.2.0",
--      "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz"
--    },
--    "mime": {
--      "version": "1.3.4",
--      "from": "mime@1.3.4",
--      "resolved": "https://registry.npmjs.org/mime/-/mime-1.3.4.tgz"
--    },
--    "mime-db": {
--      "version": "1.12.0",
--      "from": "mime-db@>=1.12.0 <1.13.0",
--      "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.12.0.tgz"
--    },
--    "mime-types": {
--      "version": "2.0.14",
--      "from": "mime-types@>=2.0.3 <2.1.0",
--      "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.0.14.tgz"
--    },
--    "ms": {
--      "version": "0.7.2",
--      "from": "ms@0.7.2",
--      "resolved": "https://registry.npmjs.org/ms/-/ms-0.7.2.tgz"
--    },
--    "original": {
--      "version": "1.0.0",
--      "from": "original@>=0.0.5",
--      "resolved": "https://registry.npmjs.org/original/-/original-1.0.0.tgz",
--      "optional": true,
--      "dependencies": {
--        "url-parse": {
--          "version": "1.0.5",
--          "from": "url-parse@>=1.0.0 <1.1.0",
--          "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.0.5.tgz",
--          "optional": true
--        }
--      }
--    },
--    "promise": {
--      "version": "6.1.0",
--      "from": "promise@>=6.1.0 <7.0.0",
--      "resolved": "https://registry.npmjs.org/promise/-/promise-6.1.0.tgz"
--    },
--    "qs": {
--      "version": "2.3.3",
--      "from": "qs@2.3.3",
--      "resolved": "https://registry.npmjs.org/qs/-/qs-2.3.3.tgz"
--    },
--    "querystringify": {
--      "version": "0.0.4",
--      "from": "querystringify@>=0.0.0 <0.1.0",
--      "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-0.0.4.tgz"
--    },
--    "readable-stream": {
--      "version": "1.1.14",
--      "from": "readable-stream@>=1.0.0 <2.0.0 >=1.1.9",
--      "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz"
--    },
--    "reduce-component": {
--      "version": "1.0.1",
--      "from": "reduce-component@1.0.1",
--      "resolved": "https://registry.npmjs.org/reduce-component/-/reduce-component-1.0.1.tgz"
--    },
--    "requires-port": {
--      "version": "1.0.0",
--      "from": "requires-port@>=1.0.0 <1.1.0",
--      "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz"
--    },
--    "slugid": {
--      "version": "1.1.0",
--      "from": "slugid@>=1.1.0 <2.0.0",
--      "resolved": "https://registry.npmjs.org/slugid/-/slugid-1.1.0.tgz"
--    },
--    "sntp": {
--      "version": "1.0.9",
--      "from": "sntp@>=1.0.0 <2.0.0",
--      "resolved": "https://registry.npmjs.org/sntp/-/sntp-1.0.9.tgz"
--    },
--    "sockjs-client": {
--      "version": "1.1.2",
--      "from": "sockjs-client@>=1.0.3 <2.0.0",
--      "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.1.2.tgz",
--      "optional": true
--    },
--    "string_decoder": {
--      "version": "0.10.31",
--      "from": "string_decoder@>=0.10.0 <0.11.0",
--      "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz"
--    },
--    "superagent": {
--      "version": "1.7.2",
--      "from": "superagent@>=1.7.0 <1.8.0",
--      "resolved": "https://registry.npmjs.org/superagent/-/superagent-1.7.2.tgz",
--      "dependencies": {
--        "readable-stream": {
--          "version": "1.0.27-1",
--          "from": "readable-stream@1.0.27-1",
--          "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.27-1.tgz"
--        }
--      }
--    },
--    "superagent-hawk": {
--      "version": "0.0.6",
--      "from": "superagent-hawk@>=0.0.6 <0.0.7",
--      "resolved": "https://registry.npmjs.org/superagent-hawk/-/superagent-hawk-0.0.6.tgz",
--      "dependencies": {
--        "boom": {
--          "version": "0.4.2",
--          "from": "boom@>=0.4.0 <0.5.0",
--          "resolved": "https://registry.npmjs.org/boom/-/boom-0.4.2.tgz"
--        },
--        "cryptiles": {
--          "version": "0.2.2",
--          "from": "cryptiles@>=0.2.0 <0.3.0",
--          "resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-0.2.2.tgz"
--        },
--        "hawk": {
--          "version": "1.0.0",
--          "from": "hawk@>=1.0.0 <1.1.0",
--          "resolved": "https://registry.npmjs.org/hawk/-/hawk-1.0.0.tgz"
--        },
--        "hoek": {
--          "version": "0.9.1",
--          "from": "hoek@>=0.9.0 <0.10.0",
--          "resolved": "https://registry.npmjs.org/hoek/-/hoek-0.9.1.tgz"
--        },
--        "qs": {
--          "version": "0.6.6",
--          "from": "qs@>=0.6.6 <0.7.0",
--          "resolved": "https://registry.npmjs.org/qs/-/qs-0.6.6.tgz"
--        },
--        "sntp": {
--          "version": "0.2.4",
--          "from": "sntp@>=0.2.0 <0.3.0",
--          "resolved": "https://registry.npmjs.org/sntp/-/sntp-0.2.4.tgz"
--        }
--      }
--    },
--    "superagent-promise": {
--      "version": "0.2.0",
--      "from": "superagent-promise@>=0.2.0 <0.3.0",
--      "resolved": "https://registry.npmjs.org/superagent-promise/-/superagent-promise-0.2.0.tgz"
--    },
--    "taskcluster-client": {
--      "version": "1.6.3",
--      "from": "taskcluster-client@>=1.6.2 <2.0.0",
--      "resolved": "https://registry.npmjs.org/taskcluster-client/-/taskcluster-client-1.6.3.tgz"
--    },
--    "url-join": {
--      "version": "0.0.1",
--      "from": "url-join@>=0.0.1 <0.0.2",
--      "resolved": "https://registry.npmjs.org/url-join/-/url-join-0.0.1.tgz"
--    },
--    "url-parse": {
--      "version": "1.1.7",
--      "from": "url-parse@>=1.1.1 <2.0.0",
--      "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.1.7.tgz",
--      "optional": true
--    },
--    "uuid": {
--      "version": "2.0.3",
--      "from": "uuid@>=2.0.1 <3.0.0",
--      "resolved": "https://registry.npmjs.org/uuid/-/uuid-2.0.3.tgz"
--    },
--    "websocket-driver": {
--      "version": "0.6.5",
--      "from": "websocket-driver@>=0.5.1",
--      "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.6.5.tgz",
--      "optional": true
--    },
--    "websocket-extensions": {
--      "version": "0.1.1",
--      "from": "websocket-extensions@>=0.1.1",
--      "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.1.tgz",
--      "optional": true
--    }
--  }
--}
-diff --git a/taskcluster/docker/index-task/package.json b/taskcluster/docker/index-task/package.json
-deleted file mode 100644
---- a/taskcluster/docker/index-task/package.json
-+++ /dev/null
-@@ -1,9 +0,0 @@
--{
--  "private": true,
--  "scripts":{
--    "start": "node index.js"
--  },
--  "dependencies": {
--    "taskcluster-client": "^1.6.2"
--  }
--}
-diff --git a/taskcluster/docker/lint/Dockerfile b/taskcluster/docker/lint/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/lint/Dockerfile
-+++ /dev/null
-@@ -1,45 +0,0 @@
--FROM          ubuntu:16.04
--MAINTAINER    Andrew Halberstadt <ahalberstadt@mozilla.com>
--
--RUN useradd -d /home/worker -s /bin/bash -m worker
--WORKDIR /home/worker
--
--RUN mkdir /build
--# %include python/mozbuild/mozbuild/action/tooltool.py
--ADD topsrcdir/python/mozbuild/mozbuild/action/tooltool.py /build/tooltool.py
--
--# %include testing/mozharness/external_tools/robustcheckout.py
--ADD topsrcdir/testing/mozharness/external_tools/robustcheckout.py /usr/local/mercurial/robustcheckout.py
--
--# %include taskcluster/docker/recipes/install-node.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-node.sh /build/install-node.sh
--
--# %include taskcluster/docker/recipes/install-mercurial.sh
--ADD topsrcdir/taskcluster/docker/recipes/install-mercurial.sh /build/install-mercurial.sh
--ADD system-setup.sh /tmp/system-setup.sh
--# %include tools/lint/eslint/manifest.tt
--ADD topsrcdir/tools/lint/eslint/manifest.tt /tmp/eslint.tt
--# %include tools/lint/eslint/eslint-plugin-mozilla/manifest.tt
--ADD topsrcdir/tools/lint/eslint/eslint-plugin-mozilla/manifest.tt /tmp/eslint-plugin-mozilla.tt
--# %include tools/lint/flake8_/flake8_requirements.txt
--ADD topsrcdir/tools/lint/flake8_/flake8_requirements.txt /tmp/flake8_requirements.txt
--# %include tools/lint/tox/tox_requirements.txt
--ADD topsrcdir/tools/lint/tox/tox_requirements.txt /tmp/tox_requirements.txt
--RUN bash /tmp/system-setup.sh
--
--# %include taskcluster/docker/recipes/run-task
--ADD topsrcdir/taskcluster/docker/recipes/run-task /builds/worker/bin/run-task
--RUN chown -R worker:worker /builds/worker/bin && chmod 755 /builds/worker/bin/*
--
--# Set variable normally configured at login, by the shells parent process, these
--# are taken from GNU su manual
--ENV           HOME          /builds/worker
--ENV           SHELL         /bin/bash
--ENV           USER          worker
--ENV           LOGNAME       worker
--ENV           HOSTNAME      taskcluster-worker
--ENV           LANG          en_US.UTF-8
--ENV           LC_ALL        en_US.UTF-8
--
--# Set a default command useful for debugging
--CMD ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/lint/system-setup.sh b/taskcluster/docker/lint/system-setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/lint/system-setup.sh
-+++ /dev/null
-@@ -1,72 +0,0 @@
--#!/usr/bin/env bash
--# This allows ubuntu-desktop to be installed without human interaction
--export DEBIAN_FRONTEND=noninteractive
--
--set -ve
--
--test `whoami` == 'root'
--
--mkdir -p /setup
--cd /setup
--
--apt_packages=()
--apt_packages+=('curl')
--apt_packages+=('locales')
--apt_packages+=('git')
--apt_packages+=('python')
--apt_packages+=('python-pip')
--apt_packages+=('sudo')
--apt_packages+=('wget')
--apt_packages+=('xz-utils')
--
--apt-get update
--apt-get install -y ${apt_packages[@]}
--
--# Without this we get spurious "LC_ALL: cannot change locale (en_US.UTF-8)" errors,
--# and python scripts raise UnicodeEncodeError when trying to print unicode characters.
--locale-gen en_US.UTF-8
--dpkg-reconfigure locales
--
--su -c 'git config --global user.email "worker@mozilla.test"' worker
--su -c 'git config --global user.name "worker"' worker
--
--tooltool_fetch() {
--    cat >manifest.tt
--    /build/tooltool.py fetch
--    rm manifest.tt
--}
--
--cd /build
--. install-mercurial.sh
--
--###
--# ESLint Setup
--###
--
--# install node
--
--. install-node.sh
--
--/build/tooltool.py fetch -m /tmp/eslint.tt
--mv /build/node_modules /build/node_modules_eslint
--/build/tooltool.py fetch -m /tmp/eslint-plugin-mozilla.tt
--mv /build/node_modules /build/node_modules_eslint-plugin-mozilla
--
--###
--# Flake8 Setup
--###
--
--cd /setup
--
--pip install --require-hashes -r /tmp/flake8_requirements.txt
--
--###
--# tox Setup
--###
--
--cd /setup
--
--pip install --require-hashes -r /tmp/tox_requirements.txt
--
--cd /
--rm -rf /setup
-diff --git a/taskcluster/docker/recipes/centos-install-debug-symbols.sh b/taskcluster/docker/recipes/centos-install-debug-symbols.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/centos-install-debug-symbols.sh
-+++ /dev/null
-@@ -1,95 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# Install debuginfo packages
--
--# enable debug info repository
--yum-config-manager enable debuginfo
--
--install_options="-q -t -y --skip-broken"
--debuginfo_install="debuginfo-install $install_options"
--
--install_debuginfo_for_installed_packages() {
--    yum list installed \
--        | awk 'NF >= 3 && $1 !~ /debuginfo/ {
--                # Remove arch suffix
--                print gensub(/\.(i.86|x86_64|noarch)/, "", "", $1)
--            }' \
--        | xargs $debuginfo_install \
--        || : # ignore errors
--}
--
--echo "Installing debuginfo packages..."
--install_debuginfo_for_installed_packages > /dev/null
--
--# Now search for debuginfo for individual libraries in the system
--
--# Get the length of a string in bytes.
--# We have to set LANG=C to get the length in bytes, not chars.
--strlen() {
--    local old_lang byteslen
--    old_lang=$LANG
--    LANG=C
--    byteslen=${#1}
--    LANG=$old_lang
--    echo $byteslen
--}
--
--echo "Searching for additional debuginfo packages..."
--
--# libraries contains the list of libraries found in the system
--libraries=""
--
--# As we accumulate libraries in the $libraries variable, we have
--# to constantly check we didn't extrapolate the command line
--# argument length limit. arg_max stores the argument limit in
--# bytes, discounting the $debuginfo_install command plus one
--# space.
--arg_max=$(( $(getconf ARG_MAX)-$(strlen $debuginfo_install)-$(strlen " ") ))
--
--to_debuginfo() {
--    # extracted from debuginfo-install script
--    if [[ $1 == *-rpms ]]; then
--        echo ${1%*-rpms}-debug-rpms
--    else
--        echo $1-debuginfo
--    fi
--}
--
--get_debuginfo_package() {
--    local package=${1%.so*}
--
--    # Remove version suffix because some libraries have their debuginfo
--    # package without it in the name.
--    local unversioned_package=${package%-*}
--    if [ "$unversioned_package" != "$package" ]; then
--        package="$package $unversioned_package"
--    fi
--
--    echo $package
--}
--
--walk_dir() {
--    local lib
--    for i in $1/*; do
--        # if we found a library...
--        if [[ $i == *.so ]]; then
--            lib="$(get_debuginfo_package $(basename $i))"
--            if [ $(strlen "$debuginfo_install $libraries $lib") -ge $arg_max ]; then
--                $debuginfo_install $libraries > /dev/null
--                libraries=""
--            fi
--            libraries="$libraries $lib"
--        fi
--    done
--}
--
--for i in /usr/lib /usr/lib64 /lib /lib64; do
--    walk_dir $i
--done
--
--if [ ${#libraries} -gt 0 ]; then
--    $debuginfo_install $libraries > /dev/null
--fi
-diff --git a/taskcluster/docker/recipes/centos6-build-system-setup.sh b/taskcluster/docker/recipes/centos6-build-system-setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/centos6-build-system-setup.sh
-+++ /dev/null
-@@ -1,14 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--cd /setup
--
--. /setup/common.sh
--. /setup/install-mercurial.sh
--. /setup/install-make.sh
--. /setup/install-cmake.sh
--. /setup/install-debug-symbols.sh
--
--rm -rf /setup
-diff --git a/taskcluster/docker/recipes/common.sh b/taskcluster/docker/recipes/common.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/common.sh
-+++ /dev/null
-@@ -1,10 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--tooltool_fetch() {
--    cat >manifest.tt
--    python /setup/tooltool.py fetch
--    rm manifest.tt
--}
-diff --git a/taskcluster/docker/recipes/install-cmake.sh b/taskcluster/docker/recipes/install-cmake.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/install-cmake.sh
-+++ /dev/null
-@@ -1,21 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# This script installs CMake 3.7.1.
--tooltool_fetch <<'EOF'
--[
--  {
--    "size": 7361172,
--    "digest": "0539d70ce3ac77042a45d638443b09fbf368e253622db980bc6fb15988743eacd031ab850a45c821ec3e9f0f5f886b9c9cb0668aeda184cd457b78abbfe7b629",
--    "algorithm": "sha512",
--    "filename": "cmake-3.7.1.tar.gz",
--    "unpack": true
--  }
--]
--EOF
--cd cmake-3.7.1
--./bootstrap && make install
--cd ..
--rm -rf cmake-3.7.1
-diff --git a/taskcluster/docker/recipes/install-make.sh b/taskcluster/docker/recipes/install-make.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/install-make.sh
-+++ /dev/null
-@@ -1,27 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# This script installs Make 4.0.
--tooltool_fetch <<'EOF'
--[
--  {
--    "size": 1872991,
--    "visibility": "public",
--    "digest": "bc5083937a6cf473be12c0105b2064e546e1765cfc8d3882346cd50e2f3e967acbc5a679b861da07d32dce833d6b55e9c812fe3216cf6db7c4b1f3c232339c88",
--    "algorithm": "sha512",
--    "filename": "make-4.0.tar.gz",
--    "unpack": true
--  }
--]
--EOF
--cd make-4.0
--./configure
--make
--make install
--# The build system will find `gmake` ahead of `make`, so make sure it finds
--# the version we just installed.
--ln -s /usr/local/bin/make /usr/local/bin/gmake
--cd ..
--rm -rf make-4.0
-diff --git a/taskcluster/docker/recipes/install-mercurial.sh b/taskcluster/docker/recipes/install-mercurial.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/install-mercurial.sh
-+++ /dev/null
-@@ -1,153 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# This script installs and configures Mercurial.
--
--set -e
--
--# Detect OS.
--if [ -f /etc/lsb-release ]; then
--    . /etc/lsb-release
--
--    if [ "${DISTRIB_ID}" = "Ubuntu" -a "${DISTRIB_RELEASE}" = "16.04" ]; then
--        HG_DEB=1
--        HG_DIGEST=dd4dd7759fe73985b6a0424b34a3036d130c26defdd866a9fdd7302e40c7417433b93f020497ceb40593eaead8e86be55e48340887015645202b47ff7b0d7ac6
--        HG_SIZE=181722
--        HG_FILENAME=mercurial_4.3.1_amd64.deb
--
--        HG_COMMON_DIGEST=045f7e07f1e2e0fef767b2f50a7e9ab37d5da0bfead5ddf473ae044b61a4566aed2d6f2706f52d227947d713ef8e89eb9a269288f08e52924e4de88a39cd7ac0
--        HG_COMMON_SIZE=2017628
--        HG_COMMON_FILENAME=mercurial-common_4.3.1_all.deb
--    elif [ "${DISTRIB_ID}" = "Ubuntu" -a "${DISTRIB_RELEASE}" = "12.04" ]; then
--        HG_DEB=1
--        HG_DIGEST=67823aa455c59dbdc24ec1f044b0afdb5c03520ef3601509cb5466dc0ac332846caf96176f07de501c568236f6909e55dfc8f4b02f8c69fa593a4abca9abfeb8
--        HG_SIZE=167880
--        HG_FILENAME=mercurial_4.1.2_amd64.deb
--
--        HG_COMMON_DIGEST=5e1c462a9b699d2068f7a0c14589f347ca719c216181ef7a625033df757185eeb3a8fed57986829a7943f16af5a8d66ddf457cc7fc4af557be88eb09486fe665
--        HG_COMMON_SIZE=3091596
--        HG_COMMON_FILENAME=mercurial-common_4.1.2_all.deb
--    fi
--
--    CERT_PATH=/etc/ssl/certs/ca-certificates.crt
--
--elif [ -f /etc/centos-release ]; then
--    CENTOS_VERSION=`rpm -q --queryformat '%{VERSION}' centos-release`
--    if [ "${CENTOS_VERSION}" = "6" ]; then
--        if [ -f /usr/bin/pip2.7 ]; then
--            PIP_PATH=/usr/bin/pip2.7
--        else
--            # The following RPM is "linked" against Python 2.6, which doesn't
--            # support TLS 1.2. Given the security implications of an insecure
--            # version control tool, we choose to prefer a Mercurial built using
--            # Python 2.7 that supports TLS 1.2. Before you uncomment the code
--            # below, think long and hard about the implications of limiting
--            # Mercurial to TLS 1.0.
--            #HG_RPM=1
--            #HG_DIGEST=c64e00c74402cd9c4ef9792177354fa6ff9c8103f41358f0eab2b15dba900d47d04ea582c6c6ebb80cf52495a28433987ffb67a5f39cd843b6638e3fa46921c8
--            #HG_SIZE=4437360
--            #HG_FILENAME=mercurial-4.1.2.x86_64.rpm
--            echo "We currently require Python 2.7 and /usr/bin/pip2.7 to run Mercurial"
--            exit 1
--        fi
--    else
--        echo "Unsupported CentOS version: ${CENTOS_VERSION}"
--        exit 1
--    fi
--
--    CERT_PATH=/etc/ssl/certs/ca-bundle.crt
--fi
--
--if [ -n "${HG_DEB}" ]; then
--tooltool_fetch <<EOF
--[
--  {
--    "size": ${HG_SIZE},
--    "digest": "${HG_DIGEST}",
--    "algorithm": "sha512",
--    "filename": "${HG_FILENAME}"
--  },
--  {
--    "size": ${HG_COMMON_SIZE},
--    "digest": "${HG_COMMON_DIGEST}",
--    "algorithm": "sha512",
--    "filename": "${HG_COMMON_FILENAME}"
--  }
--]
--EOF
--
--    dpkg -i ${HG_COMMON_FILENAME} ${HG_FILENAME}
--elif [ -n "${HG_RPM}" ]; then
--tooltool_fetch <<EOF
--[
--  {
--    "size": ${HG_SIZE},
--    "digest": "${HG_DIGEST}",
--    "algorithm": "sha512",
--    "filename": "${HG_FILENAME}"
--  }
--]
--EOF
--
--    rpm -i ${HG_FILENAME}
--elif [ -n "${PIP_PATH}" ]; then
--tooltool_fetch <<EOF
--[
--  {
--    "size": 5475042,
--    "digest": "4c42d06b7f111a3e825dd927704a30f88f0b2225cf87ab8954bf53a7fbc0edf561374dd49b13d9c10140d98ff5853a64acb5a744349727abae81d32da401922b",
--    "algorithm": "sha512",
--    "filename": "mercurial-4.3.1.tar.gz"
--  }
--]
--EOF
--
--   ${PIP_PATH} install mercurial-4.3.1.tar.gz
--else
--    echo "Do not know how to install Mercurial on this OS"
--    exit 1
--fi
--
--chmod 644 /usr/local/mercurial/robustcheckout.py
--
--mkdir -p /etc/mercurial
--cat >/etc/mercurial/hgrc <<EOF
--# By default the progress bar starts after 3s and updates every 0.1s. We
--# change this so it shows and updates every 1.0s.
--# We also tell progress to assume a TTY is present so updates are printed
--# even if there is no known TTY.
--[progress]
--delay = 1.0
--refresh = 1.0
--assume-tty = true
--
--[web]
--cacerts = ${CERT_PATH}
--
--[extensions]
--robustcheckout = /usr/local/mercurial/robustcheckout.py
--
--[hostsecurity]
--# When running a modern Python, Mercurial will default to TLS 1.1+.
--# When running on a legacy Python, Mercurial will default to TLS 1.0+.
--# There is no good reason we shouldn't be running a modern Python
--# capable of speaking TLS 1.2. And the only Mercurial servers we care
--# about should be running TLS 1.2. So make TLS 1.2 the minimum.
--minimumprotocol = tls1.2
--
--# Settings to make 1-click loaners more useful.
--[extensions]
--histedit =
--rebase =
--
--[diff]
--git = 1
--showfunc = 1
--
--[pager]
--pager = LESS=FRSXQ less
--EOF
--
--chmod 644 /etc/mercurial/hgrc
-diff --git a/taskcluster/docker/recipes/install-node.sh b/taskcluster/docker/recipes/install-node.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/install-node.sh
-+++ /dev/null
-@@ -1,12 +0,0 @@
--#!/bin/bash
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# This script installs Node v6.
--
--wget https://nodejs.org/dist/v6.9.1/node-v6.9.1-linux-x64.tar.gz
--echo 'a9d9e6308931fa2a2b0cada070516d45b76d752430c31c9198933c78f8d54b17  node-v6.9.1-linux-x64.tar.gz' | sha256sum -c
--tar -C /usr/local -xz --strip-components 1 < node-v6.9.1-linux-x64.tar.gz
--node -v  # verify
--npm -v
-diff --git a/taskcluster/docker/recipes/run-task b/taskcluster/docker/recipes/run-task
-deleted file mode 100755
---- a/taskcluster/docker/recipes/run-task
-+++ /dev/null
-@@ -1,356 +0,0 @@
--#!/usr/bin/python2.7 -u
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--"""Run a task after performing common actions.
--
--This script is meant to be the "driver" for TaskCluster based tasks.
--It receives some common arguments to control the run-time environment.
--
--It performs actions as requested from the arguments. Then it executes
--the requested process and prints its output, prefixing it with the
--current time to improve log usefulness.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import argparse
--import datetime
--import errno
--import grp
--import json
--import os
--import pwd
--import re
--import socket
--import stat
--import subprocess
--import sys
--import urllib2
--
--
--FINGERPRINT_URL = 'http://taskcluster/secrets/v1/secret/project/taskcluster/gecko/hgfingerprint'
--FALLBACK_FINGERPRINT = {
--    'fingerprints':
--        "sha256:8e:ad:f7:6a:eb:44:06:15:ed:f3:e4:69:a6:64:60:37:2d:ff:98:88:37"
--        ":bf:d7:b8:40:84:01:48:9c:26:ce:d9"}
--
--
--def print_line(prefix, m):
--    now = datetime.datetime.utcnow()
--    print(b'[%s %sZ] %s' % (prefix, now.isoformat(), m), end=b'')
--
--
--def run_and_prefix_output(prefix, args, extra_env=None):
--    """Runs a process and prefixes its output with the time.
--
--    Returns the process exit code.
--    """
--    print_line(prefix, b'executing %s\n' % args)
--
--    env = dict(os.environ)
--    env.update(extra_env or {})
--
--    # Note: TaskCluster's stdin is a TTY. This attribute is lost
--    # when we pass sys.stdin to the invoked process. If we cared
--    # to preserve stdin as a TTY, we could make this work. But until
--    # someone needs it, don't bother.
--    p = subprocess.Popen(args,
--                         # Disable buffering because we want to receive output
--                         # as it is generated so timestamps in logs are
--                         # accurate.
--                         bufsize=0,
--                         stdout=subprocess.PIPE,
--                         stderr=subprocess.STDOUT,
--                         stdin=sys.stdin.fileno(),
--                         cwd='/',
--                         env=env,
--                         # So \r in progress bars are rendered as multiple
--                         # lines, preserving progress indicators.
--                         universal_newlines=True)
--
--    while True:
--        data = p.stdout.readline()
--        if data == b'':
--            break
--
--        print_line(prefix, data)
--
--    return p.wait()
--
--
--def vcs_checkout(source_repo, dest, store_path,
--                 base_repo=None, revision=None, branch=None, fetch_hgfingerprint=False):
--    # Specify method to checkout a revision. This defaults to revisions as
--    # SHA-1 strings, but also supports symbolic revisions like `tip` via the
--    # branch flag.
--    if revision:
--        revision_flag = b'--revision'
--        revision_value = revision
--    elif branch:
--        revision_flag = b'--branch'
--        revision_value = branch
--    else:
--        print('revision is not specified for checkout')
--        sys.exit(1)
--
--    args = [
--        b'hg',
--        b'robustcheckout',
--        b'--sharebase', store_path,
--        b'--purge',
--    ]
--
--    # Obtain certificate fingerprints.  Without this, the checkout will use the fingerprint
--    # on the system, which is managed some other way (such as puppet)
--    if fetch_hgfingerprint:
--        try:
--            print_line(b'vcs', 'fetching hg.mozilla.org fingerprint from %s\n' %
--                       FINGERPRINT_URL)
--            res = urllib2.urlopen(FINGERPRINT_URL, timeout=10)
--            secret = res.read()
--            try:
--                secret = json.loads(secret, encoding='utf-8')
--            except ValueError:
--                print_line(b'vcs', 'invalid JSON in hg fingerprint secret')
--                sys.exit(1)
--        except (urllib2.URLError, socket.timeout):
--            print_line(b'vcs', 'Unable to retrieve current hg.mozilla.org fingerprint'
--                               'using the secret service, using fallback instead.')
--            # XXX This fingerprint will not be accurate if running on an old
--            #     revision after the server fingerprint has changed.
--            secret = {'secret': FALLBACK_FINGERPRINT}
--
--        hgmo_fingerprint = secret['secret']['fingerprints'].encode('ascii')
--        args.extend([
--            b'--config', b'hostsecurity.hg.mozilla.org:fingerprints=%s' % hgmo_fingerprint,
--        ])
--
--    if base_repo:
--        args.extend([b'--upstream', base_repo])
--
--    args.extend([
--        revision_flag, revision_value,
--        source_repo, dest,
--    ])
--
--    res = run_and_prefix_output(b'vcs', args,
--                                extra_env={b'PYTHONUNBUFFERED': b'1'})
--    if res:
--        sys.exit(res)
--
--    # Update the current revision hash and ensure that it is well formed.
--    revision = subprocess.check_output(
--        [b'hg', b'log',
--         b'--rev', b'.',
--         b'--template', b'{node}'],
--        cwd=dest)
--
--    assert re.match('^[a-f0-9]{40}$', revision)
--    return revision
--
--
--def main(args):
--    print_line(b'setup', b'run-task started\n')
--    running_as_root = os.getuid() == 0
--
--    # Arguments up to '--' are ours. After are for the main task
--    # to be executed.
--    try:
--        i = args.index('--')
--        our_args = args[0:i]
--        task_args = args[i + 1:]
--    except ValueError:
--        our_args = args
--        task_args = []
--
--    parser = argparse.ArgumentParser()
--    parser.add_argument('--user', default='worker', help='user to run as')
--    parser.add_argument('--group', default='worker', help='group to run as')
--    # We allow paths to be chowned by the --user:--group before permissions are
--    # dropped. This is often necessary for caches/volumes, since they default
--    # to root:root ownership.
--    parser.add_argument('--chown', action='append',
--                        help='Directory to chown to --user:--group')
--    parser.add_argument('--chown-recursive', action='append',
--                        help='Directory to recursively chown to --user:--group')
--    parser.add_argument('--vcs-checkout',
--                        help='Directory where Gecko checkout should be created')
--    parser.add_argument('--tools-checkout',
--                        help='Directory where build/tools checkout should be created')
--    parser.add_argument('--fetch-hgfingerprint', action='store_true',
--                        help='Fetch the latest hgfingerprint from the secrets store, '
--                        'using the taskclsuerProxy')
--
--    args = parser.parse_args(our_args)
--
--    # expand ~ in some paths
--    if args.vcs_checkout:
--        args.vcs_checkout = os.path.expanduser(args.vcs_checkout)
--    if args.tools_checkout:
--        args.tools_checkout = os.path.expanduser(args.tools_checkout)
--    if args.chown:
--        args.chown = [os.path.expanduser(p) for p in args.chown]
--    if args.chown_recursive:
--        args.chown_recursive = [os.path.expanduser(p) for p in args.chown_recursive]
--    if 'HG_STORE_PATH' in os.environ:
--        os.environ['HG_STORE_PATH'] = os.path.expanduser(os.environ['HG_STORE_PATH'])
--
--    if running_as_root:
--        try:
--            user = pwd.getpwnam(args.user)
--        except KeyError:
--            print('could not find user %s; specify --user to a known user' %
--                  args.user)
--            return 1
--        try:
--            group = grp.getgrnam(args.group)
--        except KeyError:
--            print('could not find group %s; specify --group to a known group' %
--                  args.group)
--            return 1
--
--        # Find all groups to which this user is a member.
--        gids = [g.gr_gid for g in grp.getgrall() if args.group in g.gr_mem]
--
--        uid = user.pw_uid
--        gid = group.gr_gid
--    else:
--        uid = gid = gids = None
--
--    wanted_dir_mode = stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR
--
--    def set_dir_permissions(path, uid, gid):
--        st = os.lstat(path)
--
--        if st.st_uid != uid or st.st_gid != gid:
--            os.chown(path, uid, gid)
--
--        # Also make sure dirs are writable in case we need to delete
--        # them.
--        if st.st_mode & wanted_dir_mode != wanted_dir_mode:
--            os.chmod(path, st.st_mode | wanted_dir_mode)
--
--    # Change ownership of requested paths.
--    # FUTURE: parse argument values for user/group if we don't want to
--    # use --user/--group.
--    for path in args.chown or []:
--        if not running_as_root:
--            print_line(b'set_dir_permissions', b'--chown not allowed when not running as root')
--            return 1
--
--        print_line(b'chown', b'changing ownership of %s to %s:%s\n' % (
--                   path, user.pw_name, group.gr_name))
--        set_dir_permissions(path, uid, gid)
--
--    for path in args.chown_recursive or []:
--        if not running_as_root:
--            print_line(b'set_dir_permissions', b'--chown-recursive not allowed when not running as root')
--            return 1
--
--        print_line(b'chown', b'recursively changing ownership of %s to %s:%s\n' %
--                   (path, user.pw_name, group.gr_name))
--
--        set_dir_permissions(path, uid, gid)
--
--        for root, dirs, files in os.walk(path):
--            for d in dirs:
--                set_dir_permissions(os.path.join(root, d), uid, gid)
--
--            for f in files:
--                # File may be a symlink that points to nowhere. In which case
--                # os.chown() would fail because it attempts to follow the
--                # symlink. We only care about directory entries, not what
--                # they point to. So setting the owner of the symlink should
--                # be sufficient.
--                os.lchown(os.path.join(root, f), uid, gid)
--
--    def prepare_checkout_dir(checkout):
--        if not checkout:
--            return
--
--        # Ensure the directory for the source checkout exists.
--        try:
--            os.makedirs(os.path.dirname(checkout))
--        except OSError as e:
--            if e.errno != errno.EEXIST:
--                raise
--
--        # And that it is owned by the appropriate user/group.
--        if running_as_root:
--            os.chown(os.path.dirname(checkout), uid, gid)
--
--    def prepare_hg_store_path():
--        # And ensure the shared store path exists and has proper permissions.
--        if 'HG_STORE_PATH' not in os.environ:
--            print('error: HG_STORE_PATH environment variable not set')
--            sys.exit(1)
--
--        store_path = os.environ['HG_STORE_PATH']
--        try:
--            os.makedirs(store_path)
--        except OSError as e:
--            if e.errno != errno.EEXIST:
--                raise
--
--        if running_as_root:
--            os.chown(store_path, uid, gid)
--
--    prepare_checkout_dir(args.vcs_checkout)
--    prepare_checkout_dir(args.tools_checkout)
--    prepare_hg_store_path()
--
--    if running_as_root:
--        # Drop permissions to requested user.
--        # This code is modeled after what `sudo` was observed to do in a Docker
--        # container. We do not bother calling setrlimit() because containers have
--        # their own limits.
--        print_line(b'setup', b'running as %s:%s\n' % (args.user, args.group))
--        os.setgroups(gids)
--        os.umask(022)
--        os.setresgid(gid, gid, gid)
--        os.setresuid(uid, uid, uid)
--
--    # Checkout the repository, setting the GECKO_HEAD_REV to the current
--    # revision hash. Revision hashes have priority over symbolic revisions. We
--    # disallow running tasks with symbolic revisions unless they have been
--    # resolved by a checkout.
--    if args.vcs_checkout:
--        base_repo = os.environ.get('GECKO_BASE_REPOSITORY')
--        # Some callers set the base repository to mozilla-central for historical
--        # reasons. Switch to mozilla-unified because robustcheckout works best
--        # with it.
--        if base_repo == 'https://hg.mozilla.org/mozilla-central':
--            base_repo = b'https://hg.mozilla.org/mozilla-unified'
--
--        os.environ['GECKO_HEAD_REV'] = vcs_checkout(
--            os.environ['GECKO_HEAD_REPOSITORY'],
--            args.vcs_checkout,
--            os.environ['HG_STORE_PATH'],
--            base_repo=base_repo,
--            revision=os.environ.get('GECKO_HEAD_REV'),
--            branch=os.environ.get('GECKO_HEAD_REF'))
--
--    elif not os.environ.get('GECKO_HEAD_REV') and \
--            os.environ.get('GECKO_HEAD_REF'):
--        print('task should be defined in terms of non-symbolic revision')
--        return 1
--
--    if args.tools_checkout:
--        vcs_checkout(b'https://hg.mozilla.org/build/tools',
--                     args.tools_checkout,
--                     os.environ['HG_STORE_PATH'],
--                     # Always check out the latest commit on default branch.
--                     # This is non-deterministic!
--                     branch=b'default')
--
--    return run_and_prefix_output(b'task', task_args)
--
--
--if __name__ == '__main__':
--    # Unbuffer stdio.
--    sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
--    sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 0)
--
--    sys.exit(main(sys.argv[1:]))
-diff --git a/taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh b/taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/ubuntu1204-test-system-setup.sh
-+++ /dev/null
-@@ -1,287 +0,0 @@
--#!/usr/bin/env bash
--
--set -ve
--
--test `whoami` == 'root'
--
--mkdir -p /setup
--cd /setup
--
--apt_packages=()
--
--apt_packages+=('alsa-base')
--apt_packages+=('alsa-utils')
--apt_packages+=('autoconf2.13')
--apt_packages+=('bluez-alsa')
--apt_packages+=('bluez-alsa:i386')
--apt_packages+=('bluez-cups')
--apt_packages+=('bluez-gstreamer')
--apt_packages+=('build-essential')
--apt_packages+=('ca-certificates')
--apt_packages+=('ccache')
--apt_packages+=('curl')
--apt_packages+=('fonts-kacst')
--apt_packages+=('fonts-kacst-one')
--apt_packages+=('fonts-liberation')
--apt_packages+=('fonts-stix')
--apt_packages+=('fonts-unfonts-core')
--apt_packages+=('fonts-unfonts-extra')
--apt_packages+=('fonts-vlgothic')
--apt_packages+=('g++-multilib')
--apt_packages+=('gcc-multilib')
--apt_packages+=('gir1.2-gnomebluetooth-1.0')
--apt_packages+=('git')
--apt_packages+=('gstreamer0.10-alsa')
--apt_packages+=('gstreamer0.10-ffmpeg')
--apt_packages+=('gstreamer0.10-plugins-bad')
--apt_packages+=('gstreamer0.10-plugins-base')
--apt_packages+=('gstreamer0.10-plugins-good')
--apt_packages+=('gstreamer0.10-plugins-ugly')
--apt_packages+=('gstreamer0.10-tools')
--apt_packages+=('language-pack-en-base')
--apt_packages+=('libasound2-dev')
--apt_packages+=('libasound2-plugins:i386')
--apt_packages+=('libcanberra-pulse')
--apt_packages+=('libcurl4-openssl-dev')
--apt_packages+=('libdbus-1-dev')
--apt_packages+=('libdbus-glib-1-dev')
--apt_packages+=('libdrm-intel1:i386')
--apt_packages+=('libdrm-nouveau1a:i386')
--apt_packages+=('libdrm-radeon1:i386')
--apt_packages+=('libdrm2:i386')
--apt_packages+=('libexpat1:i386')
--apt_packages+=('libgconf2-dev')
--apt_packages+=('libgnome-bluetooth8')
--apt_packages+=('libgstreamer-plugins-base0.10-dev')
--apt_packages+=('libgstreamer0.10-dev')
--apt_packages+=('libgtk2.0-dev')
--apt_packages+=('libiw-dev')
--apt_packages+=('libllvm2.9')
--apt_packages+=('libllvm3.0:i386')
--apt_packages+=('libncurses5:i386')
--apt_packages+=('libnotify-dev')
--apt_packages+=('libpulse-dev')
--apt_packages+=('libpulse-mainloop-glib0:i386')
--apt_packages+=('libpulsedsp:i386')
--apt_packages+=('libsdl1.2debian:i386')
--apt_packages+=('libsox-fmt-alsa')
--apt_packages+=('libx11-xcb1:i386')
--apt_packages+=('libxdamage1:i386')
--apt_packages+=('libxfixes3:i386')
--apt_packages+=('libxt-dev')
--apt_packages+=('libxxf86vm1')
--apt_packages+=('libxxf86vm1:i386')
--apt_packages+=('llvm')
--apt_packages+=('llvm-2.9')
--apt_packages+=('llvm-2.9-dev')
--apt_packages+=('llvm-2.9-runtime')
--apt_packages+=('llvm-dev')
--apt_packages+=('llvm-runtime')
--apt_packages+=('nano')
--apt_packages+=('pulseaudio')
--apt_packages+=('pulseaudio-module-X11')
--apt_packages+=('pulseaudio-module-bluetooth')
--apt_packages+=('pulseaudio-module-gconf')
--apt_packages+=('rlwrap')
--apt_packages+=('screen')
--apt_packages+=('software-properties-common')
--apt_packages+=('sudo')
--apt_packages+=('tar')
--apt_packages+=('ttf-arphic-uming')
--apt_packages+=('ttf-dejavu')
--apt_packages+=('ttf-indic-fonts-core')
--apt_packages+=('ttf-kannada-fonts')
--apt_packages+=('ttf-oriya-fonts')
--apt_packages+=('ttf-paktype')
--apt_packages+=('ttf-punjabi-fonts')
--apt_packages+=('ttf-sazanami-mincho')
--apt_packages+=('ubuntu-desktop')
--apt_packages+=('unzip')
--apt_packages+=('uuid')
--apt_packages+=('vim')
--apt_packages+=('wget')
--apt_packages+=('xvfb')
--apt_packages+=('yasm')
--apt_packages+=('zip')
--
--# get xvinfo for test-linux.sh to monitor Xvfb startup
--apt_packages+=('x11-utils')
--
--# Bug 1232407 - this allows the user to start vnc
--apt_packages+=('x11vnc')
--
--# Bug 1176031: need `xset` to disable screensavers
--apt_packages+=('x11-xserver-utils')
--
--# use Ubuntu's Python-2.7 (2.7.3 on Precise)
--apt_packages+=('python-dev')
--apt_packages+=('python-pip')
--
--apt-get update
--# This allows ubuntu-desktop to be installed without human interaction
--export DEBIAN_FRONTEND=noninteractive
--apt-get install -y --force-yes ${apt_packages[@]}
--
--dpkg-reconfigure locales
--
--tooltool_fetch() {
--    cat >manifest.tt
--    python /setup/tooltool.py fetch
--    rm manifest.tt
--}
--
--. /tmp/install-mercurial.sh
--
--# install peep
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 26912,
--    "digest": "9d730ed7852d4d217aaddda959cd5f871ef1b26dd6c513a3780bbb04a5a93a49d6b78e95c2274451a1311c10cc0a72755b269dc9af62640474e6e73a1abec370",
--    "algorithm": "sha512",
--    "filename": "peep-2.4.1.tar.gz",
--    "unpack": false
--}
--]
--EOF
--pip install peep-2.4.1.tar.gz
--
--# remaining Python utilities are installed with `peep` from upstream
--# repositories; peep verifies file integrity for us
--cat >requirements.txt <<'EOF'
--# wheel
--# sha256: 90pZQ6kAXB6Je8-H9-ivfgDAb6l3e5rWkfafn6VKh9g
--# tarball:
--# sha256: qryO8YzdvYoqnH-SvEPi_qVLEUczDWXbkg7zzpgS49w
--virtualenv==13.1.2
--EOF
--peep install -r requirements.txt
--
--# Install node
--wget https://nodejs.org/dist/v5.0.0/node-v5.0.0-linux-x64.tar.gz
--echo 'ef73b59048a0ed11d01633f0061627b7a9879257deb9add2255e4d0808f8b671  node-v5.0.0-linux-x64.tar.gz' | sha256sum -c
--tar -C /usr/local -xz --strip-components 1 < node-v5.0.0-linux-x64.tar.gz
--node -v  # verify
--
--# Install custom-built Debian packages.  These come from a set of repositories
--# packaged in tarballs on tooltool to make them replicable.  Because they have
--# inter-dependenices, we install all repositories first, then perform the
--# installation.
--cp /etc/apt/sources.list sources.list.orig
--
--# Install a slightly newer version of libxcb
--# See bugs 975216 and 1334641 for the original build of these packages
--# NOTE: if you're re-creating this, the tarball contains an `update.sh` which will rebuild the repository.
--tooltool_fetch <<'EOF'
--[
--  {
--    "size": 9711517,
--    "visibility": "public",
--    "digest": "ecbcebfb409ad9f7f2a9b6b058e20d49e45b3fd5d94dac59e94ff9a54844611f715230468af506a10a5cd62df6df74fdf0e126d43f6bec743eb803ded0740da7",
--    "algorithm": "sha512",
--    "filename": "xcb-repo-1.8.1-2ubuntu2.1mozilla2.tgz"
--  }
--]
--EOF
--tar -zxf xcb-repo-*.tgz
--echo "deb file://$PWD/xcb precise all" >> /etc/apt/sources.list
--
--# Install a patched version of mesa, per bug 1227637.  Origin of the packages themselves is unknown, as
--# these binaries were copied from the apt repositories used by puppet.  Ask rail for more information.
--# NOTE: if you're re-creating this, the tarball contains an `update.sh` which will rebuild the repository.
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 590643702, 
--    "visibility": "public", 
--    "digest": "f03b11987c218e57073d1b7eec6cc0a753d48f600df8dde0a35fa7c4d4d30b3891c9cbcaee38ade23f038e72951cb15f0dca3f7c76cbf5bad5526baf13e91929", 
--    "algorithm": "sha512", 
--    "filename": "mesa-repo-9.2.1-1ubuntu3~precise1mozilla2.tgz"
--}
--]
--EOF
--tar -zxf mesa-repo-*.tgz
--echo "deb file://$PWD/mesa precise all" >> /etc/apt/sources.list
--
--# Install Valgrind (trunk, late Jan 2016) and do some crude sanity
--# checks.  It has to go in /usr/local, otherwise it won't work.  Copy
--# the launcher binary to /usr/bin, though, so that direct invokations
--# of /usr/bin/valgrind also work.  Also install libc6-dbg since
--# Valgrind won't work at all without the debug symbols for libc.so and
--# ld.so being available.
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 41331092,
--    "visibility": "public",
--    "digest": "a89393c39171b8304fc262094a650df9a756543ffe9fbec935911e7b86842c4828b9b831698f97612abb0eca95cf7f7b3ff33ea7a9b0313b30c9be413a5efffc",
--    "algorithm": "sha512",
--    "filename": "valgrind-15775-3206-ubuntu1204.tgz"
--}
--]
--EOF
--cp valgrind-15775-3206-ubuntu1204.tgz /tmp
--(cd / && tar xzf /tmp/valgrind-15775-3206-ubuntu1204.tgz)
--rm /tmp/valgrind-15775-3206-ubuntu1204.tgz
--cp /usr/local/bin/valgrind /usr/bin/valgrind
--apt-get install -y libc6-dbg
--valgrind --version
--valgrind date
--
--# Fetch the minidump_stackwalk binary specified by the in-tree tooltool manifest.
--python /setup/tooltool.py fetch -m /tmp/minidump_stackwalk.manifest
--rm /tmp/minidump_stackwalk.manifest
--mv linux64-minidump_stackwalk /usr/local/bin/
--chmod +x /usr/local/bin/linux64-minidump_stackwalk
--
--apt-get update
--
--apt-get -q -y --force-yes install \
--    libxcb1 \
--    libxcb-render0 \
--    libxcb-shm0 \
--    libxcb-glx0 \
--    libxcb-shape0 libxcb-glx0:i386
--libxcb1_version=$(dpkg-query -s libxcb1 | grep ^Version | awk '{ print $2 }')
--[ "$libxcb1_version" = "1.8.1-2ubuntu2.1mozilla2" ] || exit 1
--
--apt-get -q -y --force-yes install \
--    libgl1-mesa-dev-lts-saucy:i386 \
--    libgl1-mesa-dri-lts-saucy \
--    libgl1-mesa-dri-lts-saucy:i386 \
--    libgl1-mesa-glx-lts-saucy \
--    libgl1-mesa-glx-lts-saucy:i386 \
--    libglapi-mesa-lts-saucy \
--    libglapi-mesa-lts-saucy:i386 \
--    libxatracker1-lts-saucy \
--    mesa-common-dev-lts-saucy:i386
--mesa_version=$(dpkg-query -s libgl1-mesa-dri-lts-saucy | grep ^Version | awk '{ print $2 }')
--[ "$mesa_version" = "9.2.1-1ubuntu3~precise1mozilla2" ] || exit 1
--
--# additional packages for linux32 tests
--apt-get -q -y --force-yes install \
--    libcanberra-gtk3-module:i386 \
--    libcanberra-gtk-module:i386 \
--    libdbus-glib-1-2:i386 \
--    libgtk-3-0:i386 \
--    openjdk-7-jdk:i386
--
--# revert the list of repos
--cp sources.list.orig /etc/apt/sources.list
--apt-get update
--
--# node 5 requires a C++11 compiler.
--add-apt-repository ppa:ubuntu-toolchain-r/test
--apt-get update
--apt-get -y install gcc-4.8 g++-4.8
--update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 20 --slave /usr/bin/g++ g++ /usr/bin/g++-4.8
--update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.6 10 --slave /usr/bin/g++ g++ /usr/bin/g++-4.6
--
--# clean up
--apt_packages+=('mesa-common-dev')
--
--cd /
--rm -rf /setup ~/.ccache ~/.cache ~/.npm
--apt-get clean
--apt-get autoclean
--rm -f $0
-diff --git a/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh b/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/ubuntu1604-test-system-setup.sh
-+++ /dev/null
-@@ -1,179 +0,0 @@
--#!/usr/bin/env bash
--
--set -ve
--
--test `whoami` == 'root'
--
--mkdir -p /setup
--cd /setup
--
--apt_packages=()
--
--apt_packages+=('alsa-base')
--apt_packages+=('alsa-utils')
--apt_packages+=('autoconf2.13')
--apt_packages+=('bluez-cups')
--apt_packages+=('build-essential')
--apt_packages+=('ca-certificates')
--apt_packages+=('ccache')
--apt_packages+=('curl')
--apt_packages+=('fonts-kacst')
--apt_packages+=('fonts-kacst-one')
--apt_packages+=('fonts-liberation')
--apt_packages+=('fonts-stix')
--apt_packages+=('fonts-unfonts-core')
--apt_packages+=('fonts-unfonts-extra')
--apt_packages+=('fonts-vlgothic')
--apt_packages+=('g++-multilib')
--apt_packages+=('gcc-multilib')
--apt_packages+=('gir1.2-gnomebluetooth-1.0')
--apt_packages+=('git')
--apt_packages+=('gstreamer0.10-alsa')
--apt_packages+=('gstreamer0.10-plugins-base')
--apt_packages+=('gstreamer0.10-plugins-good')
--apt_packages+=('gstreamer0.10-tools')
--apt_packages+=('language-pack-en-base')
--apt_packages+=('libasound2-dev')
--apt_packages+=('libcanberra-pulse')
--apt_packages+=('libcurl4-openssl-dev')
--apt_packages+=('libdbus-1-dev')
--apt_packages+=('libdbus-glib-1-dev')
--apt_packages+=('libgconf2-dev')
--apt_packages+=('libgstreamer-plugins-base0.10-dev')
--apt_packages+=('libgstreamer0.10-dev')
--apt_packages+=('libgtk2.0-dev')
--apt_packages+=('libiw-dev')
--apt_packages+=('libnotify-dev')
--apt_packages+=('libpulse-dev')
--apt_packages+=('libsox-fmt-alsa')
--apt_packages+=('libxt-dev')
--apt_packages+=('libxxf86vm1')
--apt_packages+=('llvm')
--apt_packages+=('llvm-dev')
--apt_packages+=('llvm-runtime')
--apt_packages+=('nano')
--apt_packages+=('net-tools')
--apt_packages+=('pulseaudio')
--apt_packages+=('pulseaudio-module-bluetooth')
--apt_packages+=('pulseaudio-module-gconf')
--apt_packages+=('rlwrap')
--apt_packages+=('screen')
--apt_packages+=('software-properties-common')
--apt_packages+=('sudo')
--apt_packages+=('tar')
--apt_packages+=('ttf-dejavu')
--apt_packages+=('ubuntu-desktop')
--apt_packages+=('unzip')
--apt_packages+=('uuid')
--apt_packages+=('vim')
--apt_packages+=('wget')
--apt_packages+=('xvfb')
--apt_packages+=('yasm')
--apt_packages+=('zip')
--
--# get xvinfo for test-linux.sh to monitor Xvfb startup
--apt_packages+=('x11-utils')
--
--# Bug 1232407 - this allows the user to start vnc
--apt_packages+=('x11vnc')
--
--# Bug 1176031: need `xset` to disable screensavers
--apt_packages+=('x11-xserver-utils')
--
--# use Ubuntu's Python-2.7 (2.7.3 on Precise)
--apt_packages+=('python-dev')
--apt_packages+=('python-pip')
--
--apt-get update
--# This allows ubuntu-desktop to be installed without human interaction
--export DEBIAN_FRONTEND=noninteractive
--apt-get install -y -f ${apt_packages[@]}
--
--dpkg-reconfigure locales
--
--. /setup/common.sh
--. /setup/install-mercurial.sh
--
--pip install --upgrade pip
--
--pip install virtualenv
--
--. /setup/install-node.sh
--
--# Install custom-built Debian packages.  These come from a set of repositories
--# packaged in tarballs on tooltool to make them replicable.  Because they have
--# inter-dependenices, we install all repositories first, then perform the
--# installation.
--cp /etc/apt/sources.list sources.list.orig
--
--# Install Valgrind (trunk, late Jan 2016) and do some crude sanity
--# checks.  It has to go in /usr/local, otherwise it won't work.  Copy
--# the launcher binary to /usr/bin, though, so that direct invokations
--# of /usr/bin/valgrind also work.  Also install libc6-dbg since
--# Valgrind won't work at all without the debug symbols for libc.so and
--# ld.so being available.
--tooltool_fetch <<'EOF'
--[
--{
--    "size": 41331092,
--    "visibility": "public",
--    "digest": "a89393c39171b8304fc262094a650df9a756543ffe9fbec935911e7b86842c4828b9b831698f97612abb0eca95cf7f7b3ff33ea7a9b0313b30c9be413a5efffc",
--    "algorithm": "sha512",
--    "filename": "valgrind-15775-3206-ubuntu1204.tgz"
--}
--]
--EOF
--cp valgrind-15775-3206-ubuntu1204.tgz /tmp
--(cd / && tar xzf /tmp/valgrind-15775-3206-ubuntu1204.tgz)
--rm /tmp/valgrind-15775-3206-ubuntu1204.tgz
--cp /usr/local/bin/valgrind /usr/bin/valgrind
--apt-get install -y libc6-dbg
--valgrind --version
--valgrind date
--
--# Fetch the minidump_stackwalk binary specified by the in-tree tooltool manifest.
--python /setup/tooltool.py fetch -m /tmp/minidump_stackwalk.manifest
--rm /tmp/minidump_stackwalk.manifest
--mv linux64-minidump_stackwalk /usr/local/bin/
--chmod +x /usr/local/bin/linux64-minidump_stackwalk
--
--# adding multiverse to get 'ubuntu-restricted-extras' below
--apt-add-repository multiverse
--apt-get update
--
--# for mp4 codec (used in MSE tests)
--apt-get -q -y -f install ubuntu-restricted-extras
--# TEMPORARY: we do not want flash installed, but the above pulls it in (bug 1349208)
--rm -f /usr/lib/flashplugin-installer/libflashplayer.so
--
--apt-get -q -y -f install \
--    libxcb1 \
--    libxcb-render0 \
--    libxcb-shm0 \
--    libxcb-glx0 \
--    libxcb-shape0
--
--apt-get -q -y -f install \
--    libgl1-mesa-dri \
--    libgl1-mesa-glx \
--    mesa-common-dev
--
--# additional packages for linux32 tests
--sudo dpkg --add-architecture i386
--apt-get update
--apt-get -q -y -f install \
--    libavcodec-ffmpeg-extra56:i386 \
--    libgtk-3-0:i386 \
--    libdbus-glib-1-2:i386 \
--    openjdk-8-jdk:i386
--
--# revert the list of repos
--cp sources.list.orig /etc/apt/sources.list
--apt-get update
--
--# clean up
--cd /
--rm -rf /setup ~/.ccache ~/.cache ~/.npm
--apt-get clean
--apt-get autoclean
--rm -f $0
-diff --git a/taskcluster/docker/recipes/xvfb.sh b/taskcluster/docker/recipes/xvfb.sh
-deleted file mode 100644
---- a/taskcluster/docker/recipes/xvfb.sh
-+++ /dev/null
-@@ -1,75 +0,0 @@
--#! /bin/bash -x
--
--set -x
--
--fail() {
--    echo # make sure error message is on a new line
--    echo "[xvfb.sh:error]" "${@}"
--    exit 1
--}
--
--cleanup_xvfb() {
--    # When you call this script with START_VNC or TASKCLUSTER_INTERACTIVE
--    # we make sure we do not kill xvfb so you do not lose your connection
--    local xvfb_pid=`pidof Xvfb`
--    local vnc=${START_VNC:-false}
--    local interactive=${TASKCLUSTER_INTERACTIVE:-false}
--    if [ -n "$xvfb_pid" ] && [[ $vnc == false ]] && [[ $interactive == false ]] ; then
--        kill $xvfb_pid || true
--        screen -XS xvfb quit || true
--    fi
--}
--
--# Attempt to start xvfb in a screen session with the given resolution and display
--# number.  Up to 5 attempts will be made to start xvfb with a short delay
--# between retries
--try_xvfb() {
--    screen -dmS xvfb Xvfb :$2 -nolisten tcp -screen 0 $1 \
--       > ~/artifacts/xvfb/xvfb.log 2>&1
--    export DISPLAY=:$2
--
--    # Only error code 255 matters, because it signifies that no
--    # display could be opened. As long as we can open the display
--    # tests should work. We'll retry a few times with a sleep before
--    # failing.
--    local retry_count=0
--    local max_retries=5
--    xvfb_test=0
--    until [ $retry_count -gt $max_retries ]; do
--        xvinfo || xvfb_test=$?
--        if [ $xvfb_test != 255 ]; then
--            retry_count=$(($max_retries + 1))
--        else
--            retry_count=$(($retry_count + 1))
--            echo "Failed to start Xvfb, retry: $retry_count"
--            sleep 2
--        fi
--    done
--    if [ $xvfb_test == 255 ]; then
--        return 1
--    else
--        return 0
--    fi
--}
--
--start_xvfb() {
--    set +e
--    mkdir -p ~/artifacts/xvfb
--    local retry_count=0
--    local max_retries=2
--    local success=1
--    until [ $retry_count -gt $max_retries ]; do
--        try_xvfb $1 $2
--        success=$?
--        if [ $success -eq 0 ]; then
--            retry_count=$(($max_retries + 1))
--        else
--            retry_count=$(($retry_count + 1))
--            sleep 10
--        fi
--    done
--    set -e
--    if [ $success -eq 1 ]; then
--        fail "Could not start xvfb after ${max_retries} attempts"
--    fi
--}
-diff --git a/taskcluster/docker/rust-build/Dockerfile b/taskcluster/docker/rust-build/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/Dockerfile
-+++ /dev/null
-@@ -1,32 +0,0 @@
--FROM          quay.io/rust/rust-buildbot
--MAINTAINER    Ralph Giles <giles@mozilla.com>
--
--# Reset user/workdir from parent image so we can install software.
--WORKDIR /
--USER root
--
--# Install tooltool directly from github.
--ADD https://raw.githubusercontent.com/mozilla/build-tooltool/master/tooltool.py /build/tooltool.py
--RUN chmod +rx /build/tooltool.py
--
--# Add build scripts.
--ADD             fetch_rust.sh build_rust.sh /build/
--ADD             fetch_cargo.sh build_cargo.sh /build/
--ADD             package_rust.sh upload_rust.sh /build/
--ADD             repack_rust.py splat_rust.py /build/
--RUN             chmod +x /build/*
--
--# Create user for doing the build.
--ENV USER worker
--ENV HOME /home/${USER}
--
--RUN useradd -d ${HOME} -m ${USER}
--
--# Set up the user's tree
--WORKDIR ${HOME}
--
--# Invoke our build scripts by default, but allow other commands.
--USER ${USER}
--ENTRYPOINT /build/fetch_rust.sh && /build/build_rust.sh && \
--  /build/fetch_cargo.sh && /build/build_cargo.sh && \
--  /build/package_rust.sh && /build/upload_rust.sh
-diff --git a/taskcluster/docker/rust-build/README.md b/taskcluster/docker/rust-build/README.md
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/README.md
-+++ /dev/null
-@@ -1,2 +0,0 @@
--This is a docker script for building rust toolchains for
--use in Mozilla's build clusters.
-diff --git a/taskcluster/docker/rust-build/REGISTRY b/taskcluster/docker/rust-build/REGISTRY
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/REGISTRY
-+++ /dev/null
-@@ -1,1 +0,0 @@
--quay.io/rust
-diff --git a/taskcluster/docker/rust-build/VERSION b/taskcluster/docker/rust-build/VERSION
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/VERSION
-+++ /dev/null
-@@ -1,1 +0,0 @@
--0.4.8
-diff --git a/taskcluster/docker/rust-build/build_cargo.sh b/taskcluster/docker/rust-build/build_cargo.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/build_cargo.sh
-+++ /dev/null
-@@ -1,30 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/home/worker}
--: BRANCH ${BRANCH:=0.15.0}
--
--set -v
--
--# Configure and build cargo.
--
--if test $(uname -s) = "Darwin"; then
--  export MACOSX_DEPLOYMENT_TARGET=10.7
--fi
--
--# Build the initial cargo checkout, which can download a snapshot.
--pushd ${WORKSPACE}/cargo
--./configure --prefix=${WORKSPACE}/rustc --local-rust-root=${WORKSPACE}/rustc
--make
--make dist
--make install
--popd
--
--# Build the version we want.
--export PATH=$PATH:${WORKSPACE}/rustc/bin
--pushd ${WORKSPACE}/cargo
--make clean
--git checkout ${BRANCH}
--OPENSSL_DIR=/rustroot/cargo64 cargo install --root=${WORKSPACE}/rustc --force
--popd
-diff --git a/taskcluster/docker/rust-build/build_rust.sh b/taskcluster/docker/rust-build/build_rust.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/build_rust.sh
-+++ /dev/null
-@@ -1,28 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/home/worker}
--
--CORES=$(nproc || grep -c ^processor /proc/cpuinfo || sysctl -n hw.ncpu)
--
--set -v
--
--# Configure and build rust.
--OPTIONS="--enable-llvm-static-stdcpp --disable-docs"
--OPTIONS+="--enable-debuginfo"
--OPTIONS+="--release-channel=stable"
--i586="i586-unknown-linux-gnu"
--i686="i686-unknown-linux-gnu"
--x64="x86_64-unknown-linux-gnu"
--arm_android="arm-linux-androideabi"
--x86_android="i686-linux-android"
--
--mkdir -p ${WORKSPACE}/rust-build
--pushd ${WORKSPACE}/rust-build
--${WORKSPACE}/rust/configure --prefix=${WORKSPACE}/rustc \
--  --target=${x64},${i686} ${OPTIONS}
--make -j ${CORES}
--make dist
--make install
--popd
-diff --git a/taskcluster/docker/rust-build/build_rust_mac.sh b/taskcluster/docker/rust-build/build_rust_mac.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/build_rust_mac.sh
-+++ /dev/null
-@@ -1,36 +0,0 @@
--#!/bin/bash -vex
--
--set -e
--
--: WORKSPACE ${WORKSPACE:=$PWD}
--: TOOLTOOL ${TOOLTOOL:=python $WORKSPACE/tooltool.py}
--
--CORES=$(nproc || grep -c ^processor /proc/cpuinfo || sysctl -n hw.ncpu)
--echo Building on $CORES cpus...
--
--OPTIONS="--enable-debuginfo --disable-docs"
--TARGETS="x86_64-apple-darwin,i686-apple-darwin"
--
--PREFIX=${WORKSPACE}/rustc
--
--set -v
--
--mkdir -p ${WORKSPACE}/gecko-rust-mac
--pushd ${WORKSPACE}/gecko-rust-mac
--
--export MACOSX_DEPLOYMENT_TARGET=10.7
--${WORKSPACE}/rust/configure --prefix=${PREFIX} --target=${TARGETS} ${OPTIONS}
--make -j ${CORES}
--
--rm -rf ${PREFIX}
--mkdir ${PREFIX}
--make dist
--make install
--popd
--
--# Package the toolchain for upload.
--pushd ${WORKSPACE}
--rustc/bin/rustc --version
--tar cvjf rustc.tar.bz2 rustc/*
--${TOOLTOOL} add --visibility=public --unpack rustc.tar.bz2
--popd
-diff --git a/taskcluster/docker/rust-build/fetch_cargo.sh b/taskcluster/docker/rust-build/fetch_cargo.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/fetch_cargo.sh
-+++ /dev/null
-@@ -1,21 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--: REPOSITORY   ${REPOSITORY:=https://github.com/rust-lang/cargo}
--: BRANCH       ${BRANCH:=0.14.0}
--
--: WORKSPACE    ${WORKSPACE:=/home/worker}
--
--set -v
--
--# Check out rust sources
--SRCDIR=${WORKSPACE}/cargo
--git clone --recursive $REPOSITORY -b $BRANCH ${SRCDIR}
--
--# Report version
--VERSION=$(git -C ${SRCDIR} describe --tags --dirty)
--COMMIT=$(git -C ${SRCDIR} rev-parse HEAD)
--echo "cargo ${VERSION} (commit ${COMMIT})" | tee cargo-version
-diff --git a/taskcluster/docker/rust-build/fetch_rust.sh b/taskcluster/docker/rust-build/fetch_rust.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/fetch_rust.sh
-+++ /dev/null
-@@ -1,20 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--: RUST_REPOSITORY ${RUST_REPOSITORY:=https://github.com/rust-lang/rust}
--: RUST_BRANCH     ${RUST_BRANCH:=stable}
--
--: WORKSPACE       ${WORKSPACE:=/home/worker}
--
--set -v
--
--# Check out rust sources
--git clone $RUST_REPOSITORY -b $RUST_BRANCH ${WORKSPACE}/rust
--
--# Report version
--VERSION=$(git -C ${WORKSPACE}/rust describe --tags --dirty)
--COMMIT=$(git -C ${WORKSPACE}/rust rev-parse HEAD)
--echo "rust ${VERSION} (commit ${COMMIT})" | tee rust-version
-diff --git a/taskcluster/docker/rust-build/package_rust.sh b/taskcluster/docker/rust-build/package_rust.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/package_rust.sh
-+++ /dev/null
-@@ -1,13 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/home/worker}
--
--set -v
--
--# Package the toolchain for upload.
--pushd ${WORKSPACE}
--tar cvJf rustc.tar.xz rustc/*
--/build/tooltool.py add --visibility=public --unpack rustc.tar.xz
--popd
-diff --git a/taskcluster/docker/rust-build/repack_rust.py b/taskcluster/docker/rust-build/repack_rust.py
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/repack_rust.py
-+++ /dev/null
-@@ -1,238 +0,0 @@
--#!/bin/env python
--'''
--This script downloads and repacks official rust language builds
--with the necessary tool and target support for the Firefox
--build environment.
--'''
--
--import argparse
--import os.path
--import sys
--
--import requests
--import subprocess
--import toml
--
--
--def log(msg):
--    print('repack: %s' % msg)
--
--
--def fetch_file(url):
--    '''Download a file from the given url if it's not already present.'''
--    filename = os.path.basename(url)
--    if os.path.exists(filename):
--        return
--    r = requests.get(url, stream=True)
--    r.raise_for_status()
--    with open(filename, 'wb') as fd:
--        for chunk in r.iter_content(4096):
--            fd.write(chunk)
--
--
--def sha256sum():
--    '''Return the command for verifying SHA-2 256-bit checksums.'''
--    if sys.platform.startswith('darwin'):
--        return 'shasum'
--    else:
--        return 'sha256sum'
--
--
--def fetch(url):
--    '''Download and verify a package url.'''
--    base = os.path.basename(url)
--    log('Fetching %s...' % base)
--    fetch_file(url + '.asc')
--    fetch_file(url)
--    fetch_file(url + '.sha256')
--    log('Verifying %s...' % base)
--    shasum = sha256sum()
--    subprocess.check_call([shasum, '-c', base + '.sha256'])
--    subprocess.check_call(['gpg', '--verify', base + '.asc', base])
--    if True:
--        subprocess.check_call([
--            'keybase', 'pgp', 'verify', '-d', base + '.asc', '-i', base,
--        ])
--
--
--def install(filename, target):
--    '''Run a package's installer script against the given target directory.'''
--    log('Unpacking %s...' % filename)
--    subprocess.check_call(['tar', 'xf', filename])
--    basename = filename.split('.tar')[0]
--    log('Installing %s...' % basename)
--    install_cmd = [os.path.join(basename, 'install.sh')]
--    install_cmd += ['--prefix=' + os.path.abspath(target)]
--    install_cmd += ['--disable-ldconfig']
--    subprocess.check_call(install_cmd)
--    log('Cleaning %s...' % basename)
--    subprocess.check_call(['rm', '-rf', basename])
--
--
--def package(manifest, pkg, target):
--    '''Pull out the package dict for a particular package and target
--    from the given manifest.'''
--    version = manifest['pkg'][pkg]['version']
--    info = manifest['pkg'][pkg]['target'][target]
--    return (version, info)
--
--
--def fetch_package(manifest, pkg, host):
--    version, info = package(manifest, pkg, host)
--    log('%s %s\n  %s\n  %s' % (pkg, version, info['url'], info['hash']))
--    if not info['available']:
--        log('%s marked unavailable for %s' % (pkg, host))
--        raise AssertionError
--    fetch(info['url'])
--    return info
--
--
--def fetch_std(manifest, targets):
--    stds = []
--    for target in targets:
--        info = fetch_package(manifest, 'rust-std', target)
--        stds.append(info)
--    return stds
--
--
--def tar_for_host(host):
--    if 'linux' in host:
--        tar_options = 'cJf'
--        tar_ext = '.tar.xz'
--    else:
--        tar_options = 'cjf'
--        tar_ext = '.tar.bz2'
--    return tar_options, tar_ext
--
--
--def fetch_manifest(channel='stable'):
--    url = 'https://static.rust-lang.org/dist/channel-rust-' + channel + '.toml'
--    req = requests.get(url)
--    req.raise_for_status()
--    manifest = toml.loads(req.content)
--    if manifest['manifest-version'] != '2':
--        raise NotImplementedError('Unrecognized manifest version %s.' %
--                                  manifest['manifest-version'])
--    return manifest
--
--
--def repack(host, targets, channel='stable', suffix='', cargo_channel=None):
--    log("Repacking rust for %s..." % host)
--
--    manifest = fetch_manifest(channel)
--    log('Using manifest for rust %s as of %s.' % (channel, manifest['date']))
--    if cargo_channel == channel:
--        cargo_manifest = manifest
--    else:
--        cargo_manifest = fetch_manifest(cargo_channel)
--        log('Using manifest for cargo %s as of %s.' %
--            (cargo_channel, cargo_manifest['date']))
--
--    log('Fetching packages...')
--    rustc = fetch_package(manifest, 'rustc', host)
--    cargo = fetch_package(cargo_manifest, 'cargo', host)
--    stds = fetch_std(manifest, targets)
--
--    log('Installing packages...')
--    tar_basename = 'rustc-' + host
--    if suffix:
--        tar_basename += '-' + suffix
--    tar_basename += '-repack'
--    install_dir = 'rustc'
--    subprocess.check_call(['rm', '-rf', install_dir])
--    install(os.path.basename(rustc['url']), install_dir)
--    install(os.path.basename(cargo['url']), install_dir)
--    for std in stds:
--        install(os.path.basename(std['url']), install_dir)
--        pass
--
--    log('Tarring %s...' % tar_basename)
--    tar_options, tar_ext = tar_for_host(host)
--    subprocess.check_call(
--        ['tar', tar_options, tar_basename + tar_ext, install_dir])
--    subprocess.check_call(['rm', '-rf', install_dir])
--
--
--def repack_cargo(host, channel='nightly'):
--    log('Repacking cargo for %s...' % host)
--    # Cargo doesn't seem to have a .toml manifest.
--    base_url = 'https://static.rust-lang.org/cargo-dist/'
--    req = requests.get(os.path.join(base_url, 'channel-cargo-' + channel))
--    req.raise_for_status()
--    file = ''
--    for line in req.iter_lines():
--        if line.find(host) != -1:
--            file = line.strip()
--    if not file:
--        log('No manifest entry for %s!' % host)
--        return
--    manifest = {
--        'date': req.headers['Last-Modified'],
--        'pkg': {
--            'cargo': {
--                'version': channel,
--                'target': {
--                    host: {
--                        'url': os.path.join(base_url, file),
--                        'hash': None,
--                        'available': True,
--                    },
--                },
--            },
--        },
--    }
--    log('Using manifest for cargo %s.' % channel)
--    log('Fetching packages...')
--    cargo = fetch_package(manifest, 'cargo', host)
--    log('Installing packages...')
--    install_dir = 'cargo'
--    subprocess.check_call(['rm', '-rf', install_dir])
--    install(os.path.basename(cargo['url']), install_dir)
--    tar_basename = 'cargo-%s-repack' % host
--    log('Tarring %s...' % tar_basename)
--    tar_options, tar_ext = tar_for_host(host)
--    subprocess.check_call(
--        ['tar', tar_options, tar_basename + tar_ext, install_dir])
--    subprocess.check_call(['rm', '-rf', install_dir])
--
--
--# rust platform triples
--android = "armv7-linux-androideabi"
--android_x86 = "i686-linux-android"
--android_aarch64 = "aarch64-linux-android"
--linux64 = "x86_64-unknown-linux-gnu"
--linux32 = "i686-unknown-linux-gnu"
--mac64 = "x86_64-apple-darwin"
--mac32 = "i686-apple-darwin"
--win64 = "x86_64-pc-windows-msvc"
--win32 = "i686-pc-windows-msvc"
--mingw32 = "i686-pc-windows-gnu"
--
--
--def args():
--    '''Read command line arguments and return options.'''
--    parser = argparse.ArgumentParser()
--    parser.add_argument('--channel',
--                        help='Release channel to use: '
--                             'stable, beta, or nightly',
--                        default='stable')
--    parser.add_argument('--cargo-channel',
--                        help='Release channel to use for cargo: '
--                             'stable, beta, or nightly.'
--                             'Defaults to the same as --channel.')
--    args = parser.parse_args()
--    if not args.cargo_channel:
--        args.cargo_channel = args.channel
--    return args
--
--
--if __name__ == '__main__':
--    args = vars(args())
--    repack(mac64, [mac64], **args)
--    repack(win32, [win32], **args)
--    repack(win64, [win64], **args)
--    repack(linux64, [linux64, linux32], **args)
--    repack(linux64, [linux64, mac64], suffix='mac-cross', **args)
--    repack(linux64, [linux64, android, android_x86, android_aarch64],
--           suffix='android-cross', **args)
--    repack(linux64, [linux64, win32, mingw32], suffix='mingw32-cross', **args)
-diff --git a/taskcluster/docker/rust-build/splat_rust.py b/taskcluster/docker/rust-build/splat_rust.py
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/splat_rust.py
-+++ /dev/null
-@@ -1,107 +0,0 @@
--#!/bin/env python
--'''
--This script patches tooltool manifests in the firefox source
--tree to update them to a new set of rust packages.
--'''
--
--import json
--import os.path
--import sys
--
--from collections import OrderedDict
--
--
--def load_manifest(path):
--    with open(path) as f:
--        return json.load(f, object_pairs_hook=OrderedDict)
--    return None
--
--
--def save_manifest(manifest, path):
--    with open(path, 'w') as f:
--        json.dump(manifest, f, indent=2, separators=(',', ': '))
--        f.write('\n')
--
--
--def replace(manifest, stanza):
--    key = 'rustc'
--    version = stanza.get('version')
--    for s in manifest:
--        if key in s.get('filename'):
--            if version:
--                print('Replacing %s\n     with %s' % (s['version'], version))
--                s['version'] = version
--            print('  old %s' % s['digest'][:12])
--            s['digest'] = stanza['digest']
--            s['size'] = stanza['size']
--            print('  new %s' % s['digest'][:12])
--            return True
--    print('Warning: Could not find matching %s filename' % key)
--    return False
--
--
--def update_manifest(source_manifest, target, target_filename):
--    for stanza in source_manifest:
--        filename = stanza.get('filename')
--        if target in filename:
--            size = int(stanza.get('size'))
--            print('Found %s %d bytes' % (filename, size))
--            version = stanza.get('version')
--            if version:
--                print('  %s' % version)
--            print('Updating %s' % target_filename)
--            old = load_manifest(target_filename)
--            replace(old, stanza)
--            save_manifest(old, target_filename)
--            break
--
--
--'''Mapping from targets to target filenames.'''
--TARGETS = {
--        'x86_64-unknown-linux-gnu-repack': [
--            'browser/config/tooltool-manifests/linux32/releng.manifest',
--            'browser/config/tooltool-manifests/linux64/asan.manifest',
--            'browser/config/tooltool-manifests/linux64/clang.manifest',
--            'browser/config/tooltool-manifests/linux64/clang.manifest.centos6',
--            'browser/config/tooltool-manifests/linux64/fuzzing.manifest',
--            'browser/config/tooltool-manifests/linux64/hazard.manifest',
--            'browser/config/tooltool-manifests/linux64/msan.manifest',
--            'browser/config/tooltool-manifests/linux64/releng.manifest',
--            ],
--        'x86_64-unknown-linux-gnu-android-cross-repack': [
--            'mobile/android/config/tooltool-manifests/android/releng.manifest',
--            'mobile/android/config/tooltool-manifests/android-x86/releng.manifest',
--            'mobile/android/config/tooltool-manifests/android-gradle-dependencies/releng.manifest',
--            ],
--        'x86_64-unknown-linux-gnu-mingw32-cross-repack': [
--            'browser/config/tooltool-manifests/mingw32/releng.manifest',
--            ],
--        'x86_64-unknown-linux-gnu-mac-cross-repack': [
--            'browser/config/tooltool-manifests/macosx64/cross-releng.manifest',
--            ],
--        'x86_64-apple-darwin-repack': [
--            'browser/config/tooltool-manifests/macosx64/clang.manifest',
--            'browser/config/tooltool-manifests/macosx64/releng.manifest',
--            ],
--        'x86_64-pc-windows-msvc-repack': [
--            'browser/config/tooltool-manifests/win64/clang.manifest',
--            'browser/config/tooltool-manifests/win64/releng.manifest',
--            ],
--        'i686-pc-windows-msvc-repack': [
--            'browser/config/tooltool-manifests/win32/clang.manifest',
--            'browser/config/tooltool-manifests/win32/releng.manifest',
--            ],
--}
--
--if __name__ == '__main__':
--    if len(sys.argv) < 2:
--        print('%s PATH' % sys.argv[0])
--        sys.exit(1)
--
--    base_path = sys.argv[1]
--
--    updates = load_manifest('manifest.tt')
--    for target, filenames in TARGETS.items():
--        for target_filename in filenames:
--            update_manifest(updates, target,
--                            os.path.join(base_path, target_filename))
-diff --git a/taskcluster/docker/rust-build/task.json b/taskcluster/docker/rust-build/task.json
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/task.json
-+++ /dev/null
-@@ -1,37 +0,0 @@
--{
--    "provisionerId": "aws-provisioner-v1",
--    "workerType": "rustbuild",
--    "created": "{task_created}",
--    "deadline": "{task_deadline}",
--    "payload": {
--        "image": "quay.io/rust/gecko-rust-build",
--        "env": {
--            "RUST_BRANCH": "{rust_branch}"
--        },
--        "artifacts": {
--            "public/rustc.tar.xz": {
--              "path": "/home/worker/rustc.tar.xz",
--              "expires": "{artifact_expires}",
--              "type": "file"
--            },
--            "public/manifest.tt": {
--              "path": "/home/worker/manifest.tt",
--              "expires": "{artifact_expires}",
--              "type": "file"
--            }
--        },
--        "features": {
--          "relengAPIProxy": true
--        },
--        "maxRunTime": 6000
--    },
--    "scopes": [
--      "docker-worker:relengapi-proxy:tooltool.upload.public"
--    ],
--    "metadata": {
--        "name": "Rust toolchain build",
--        "description": "Builds the rust toolchain for use in gecko builders.",
--        "owner": "giles@mozilla.com",
--        "source": "https://github.com/rillian/rust-build/"
--    }
--}
-diff --git a/taskcluster/docker/rust-build/tcbuild.py b/taskcluster/docker/rust-build/tcbuild.py
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/tcbuild.py
-+++ /dev/null
-@@ -1,228 +0,0 @@
--#!/bin/env python
--'''
--This script triggers a taskcluster task, waits for it to finish,
--fetches the artifacts, uploads them to tooltool, and updates
--the in-tree tooltool manifests.
--'''
--
--from __future__ import print_function
--
--import requests.packages.urllib3
--
--import argparse
--import datetime
--import json
--import os
--import shutil
--import sys
--import taskcluster
--import tempfile
--import time
--import tooltool
--
--
--def local_file(filename):
--    '''
--    Return a path to a file next to this script.
--    '''
--    return os.path.join(os.path.dirname(__file__), filename)
--
--
--def read_tc_auth(tc_auth_file):
--    '''
--    Read taskcluster credentials from tc_auth_file and return them as a dict.
--    '''
--    return json.load(open(tc_auth_file, 'rb'))
--
--
--def fill_template_dict(d, keys):
--    for key, val in d.items():
--        if isinstance(val, basestring) and '{' in val:
--            d[key] = val.format(**keys)
--        elif isinstance(val, dict):
--            fill_template_dict(val, keys)
--
--
--def fill_template(template_file, keys):
--    '''
--    Take the file object template_file, parse it as JSON, and
--    interpolate (using str.template) its keys using keys.
--    '''
--    template = json.load(template_file)
--    fill_template_dict(template, keys)
--    return template
--
--
--def spawn_task(queue, args):
--    '''
--    Spawn a Taskcluster task in queue using args.
--    '''
--    task_id = taskcluster.utils.slugId()
--    with open(local_file('task.json'), 'rb') as template:
--        keys = vars(args)
--        now = datetime.datetime.utcnow()
--        deadline = (now + datetime.timedelta(hours=2))
--        expires = (now + datetime.timedelta(days=1))
--        keys['task_created'] = now.isoformat() + 'Z'
--        keys['task_deadline'] = deadline.isoformat() + 'Z'
--        keys['artifact_expires'] = expires.isoformat() + 'Z'
--        payload = fill_template(template, keys)
--    queue.createTask(task_id, payload)
--    print('--- %s task %s submitted ---' % (now, task_id))
--    return task_id
--
--
--def wait_for_task(queue, task_id, initial_wait=5):
--    '''
--    Wait until queue reports that task task_id is completed, and return
--    its run id.
--
--    Sleep for initial_wait seconds before checking status the first time.
--    Then poll periodically and print a running log of the task status.
--    '''
--    time.sleep(initial_wait)
--    previous_state = None
--    have_ticks = False
--    while True:
--        res = queue.status(task_id)
--        state = res['status']['state']
--        if state != previous_state:
--            now = datetime.datetime.utcnow()
--            if have_ticks:
--                sys.stdout.write('\n')
--                have_ticks = False
--            print('--- %s task %s %s ---' % (now, task_id, state))
--            previous_state = state
--        if state == 'completed':
--            return len(res['status']['runs']) - 1
--        if state in ('failed', 'exception'):
--            raise Exception('Task failed')
--        sys.stdout.write('.')
--        sys.stdout.flush()
--        have_ticks = True
--        time.sleep(10)
--
--
--def fetch_artifact(queue, task_id, run_id, name, dest_dir):
--    '''
--    Fetch the artifact with name from task_id and run_id in queue,
--    write it to a file in dest_dir, and return the path to the written
--    file.
--    '''
--    url = queue.buildUrl('getArtifact', task_id, run_id, name)
--    fn = os.path.join(dest_dir, os.path.basename(name))
--    print('Fetching %s...' % name)
--    try:
--        r = requests.get(url, stream=True)
--        r.raise_for_status()
--        with open(fn, 'wb') as f:
--            for chunk in r.iter_content(1024):
--                f.write(chunk)
--    except requests.exceptions.HTTPError:
--        print('HTTP Error %d fetching %s' % (r.status_code, name))
--        return None
--    return fn
--
--
--def make_artifact_dir(task_id, run_id):
--    prefix = 'tc-artifacts.%s.%d.' % (task_id, run_id)
--    print('making artifact dir %s' % prefix)
--    return tempfile.mkdtemp(prefix=prefix)
--
--
--def fetch_artifacts(queue, task_id, run_id):
--    '''
--    Fetch all artifacts from task_id and run_id in queue, write them to
--    temporary files, and yield the path to each.
--    '''
--    try:
--        tempdir = make_artifact_dir(task_id, run_id)
--        res = queue.listArtifacts(task_id, run_id)
--        for a in res['artifacts']:
--            # Skip logs
--            if a['name'].startswith('public/logs'):
--                continue
--            # Skip interfaces
--            if a['name'].startswith('private/docker-worker'):
--                continue
--            yield fetch_artifact(queue, task_id, run_id, a['name'], tempdir)
--    finally:
--        if os.path.isdir(tempdir):
--            # shutil.rmtree(tempdir)
--            print('Artifacts downloaded to %s' % tempdir)
--            pass
--
--
--def upload_to_tooltool(tooltool_auth, task_id, artifact):
--    '''
--    Upload artifact to tooltool using tooltool_auth as the authentication token.
--    Return the path to the generated tooltool manifest.
--    '''
--    try:
--        oldcwd = os.getcwd()
--        os.chdir(os.path.dirname(artifact))
--        manifest = artifact + '.manifest'
--        tooltool.main([
--            'tooltool.py',
--            'add',
--            '--visibility=public',
--            '-m', manifest,
--            artifact
--        ])
--        tooltool.main([
--            'tooltool.py',
--            'upload',
--            '-m', manifest,
--            '--authentication-file', tooltool_auth,
--            '--message', 'Built from taskcluster task {}'.format(task_id),
--        ])
--        return manifest
--    finally:
--        os.chdir(oldcwd)
--
--
--def update_manifest(artifact, manifest, local_gecko_clone):
--    platform = 'linux'
--    manifest_dir = os.path.join(local_gecko_clone,
--                                'testing', 'config', 'tooltool-manifests')
--    platform_dir = [p for p in os.listdir(manifest_dir)
--                    if p.startswith(platform)][0]
--    tree_manifest = os.path.join(manifest_dir, platform_dir, 'releng.manifest')
--    print('%s -> %s' % (manifest, tree_manifest))
--    shutil.copyfile(manifest, tree_manifest)
--
--
--def main():
--    parser = argparse.ArgumentParser(description='Build and upload binaries')
--    parser.add_argument('taskcluster_auth',
--                        help='Path to a file containing Taskcluster client '
--                             'ID and authentication token as a JSON file in '
--                             'the form {"clientId": "...", "accessToken": "..."}')
--    parser.add_argument('--tooltool-auth',
--                        help='Path to a file containing a tooltool '
--                             'authentication token valid for uploading files')
--    parser.add_argument('--local-gecko-clone',
--                        help='Path to a local Gecko clone whose tooltool '
--                             'manifests will be updated with the newly-built binaries')
--    parser.add_argument('--rust-branch', default='stable',
--                        help='Revision of the rust repository to use')
--    parser.add_argument('--task', help='Use an existing task')
--
--    args = parser.parse_args()
--    tc_auth = read_tc_auth(args.taskcluster_auth)
--    queue = taskcluster.Queue({'credentials': tc_auth})
--    if args.task:
--        task_id, initial_wait = args.task, 0
--    else:
--        task_id, initial_wait = spawn_task(queue, args), 25
--    run_id = wait_for_task(queue, task_id, initial_wait)
--    for artifact in fetch_artifacts(queue, task_id, run_id):
--        if args.tooltool_auth:
--            manifest = upload_to_tooltool(args.tooltool_auth, task_id,
--                                          artifact)
--        if args.local_gecko_clone:
--            update_manifest(artifact, manifest, args.local_gecko_clone)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/docker/rust-build/upload_rust.sh b/taskcluster/docker/rust-build/upload_rust.sh
-deleted file mode 100644
---- a/taskcluster/docker/rust-build/upload_rust.sh
-+++ /dev/null
-@@ -1,22 +0,0 @@
--#!/bin/bash -vex
--
--set -x -e
--
--: WORKSPACE ${WORKSPACE:=/home/worker}
--
--set -v
--
--# Upload artifacts packaged by the build script.
--pushd ${WORKSPACE}
--if test -n "$TASK_ID"; then
--  # If we're running on task cluster, use the upload-capable tunnel.
--  TOOLTOOL_OPTS="--url=http://relengapi/tooltool/"
--  MESSAGE="Taskcluster upload ${TASK_ID}/${RUN_ID} $0"
--else
--  MESSAGE="Rust toolchain build for gecko"
--fi
--if test -r rust-version; then
--  MESSAGE="$MESSAGE $(cat rust-version)"
--fi
--/build/tooltool.py upload ${TOOLTOOL_OPTS} --message="${MESSAGE}"
--popd
-diff --git a/taskcluster/docker/upload-symbols/Dockerfile b/taskcluster/docker/upload-symbols/Dockerfile
-deleted file mode 100644
---- a/taskcluster/docker/upload-symbols/Dockerfile
-+++ /dev/null
-@@ -1,21 +0,0 @@
--FROM        ubuntu:14.04
--MAINTAINER  Anthony Miyaguchi <amiyaguchi@mozilla.com>
--
--WORKDIR     /tmp
--
--# Add the upload script
--ADD         bin /tmp/bin
--RUN         chmod +x /tmp/bin/*
--
--# Define the environmental variables for the scripts
--COPY        socorro_token   /tmp/
--ENV         SOCORRO_SYMBOL_UPLOAD_TOKEN_FILE /tmp/socorro_token
--ENV         SCRIPT_PATH toolkit/crashreporter/tools/upload_symbols.py 
--
--# Install dependencies for the script
--RUN         apt-get update
--RUN         apt-get install -y python python-pip wget
--RUN         pip install redo requests
--
--# Default command
--CMD         ["/bin/bash", "--login"]
-diff --git a/taskcluster/docker/upload-symbols/README.md b/taskcluster/docker/upload-symbols/README.md
-deleted file mode 100644
---- a/taskcluster/docker/upload-symbols/README.md
-+++ /dev/null
-@@ -1,28 +0,0 @@
--# Upload Symbols
--Docker worker to upload crashreporter symbols as a separate taskcluster task.
--
--## Building
--`$ docker build -t upload_symbols .`
--
--`$ docker run -i -t upload_symbols`
--
--Then from inside the container, run:
--
--`$ ./bin/upload.sh`
--
--In order to run the `upload_symbols.py` script properly, the Dockerfile expects  a text file `socorro_token` embedded with the api token at the root directory before.
--
--The following environmental variables must be set for a sucessful run.
--- `ARTIFACT_TASKID` : TaskId of the parent build task
--- `GECKO_HEAD_REPOSITORY` : The head repository to download the checkout script
--- `GECKO_HEAD_REV` : Revision of the head repository to look for
--
--## Example
--The container can be run similar to its production environment with the following command:
--```
--docker run -ti \
---e ARTIFACT_TASKID=Hh5vLCaTRRO8Ql9X6XBdxg \
---e GECKO_HEAD_REV=beed30cce69bc9783d417d3d29ce2c44989961ed \
---e GECKO_HEAD_REPOSITORY=https://hg.mozilla.org/try/ \
--upload_symbols /bin/bash bin/upload.sh
--```
-diff --git a/taskcluster/docker/upload-symbols/bin/checkout-script.sh b/taskcluster/docker/upload-symbols/bin/checkout-script.sh
-deleted file mode 100755
---- a/taskcluster/docker/upload-symbols/bin/checkout-script.sh
-+++ /dev/null
-@@ -1,16 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--# Inputs, with defaults
--
--: GECKO_HEAD_REPOSITORY         ${GECKO_HEAD_REPOSITORY:=https://hg.mozilla.org/mozilla-central}
--: GECKO_HEAD_REV                ${GECKO_HEAD_REV:=default}
--
--: SCRIPT_DOWNLOAD_PATH          ${SCRIPT_DOWNLOAD_PATH:=$PWD}
--: SCRIPT_PATH                   ${SCRIPT_PATH:?"script path must be set"}
--set -v
--
--# download script from the gecko repository
--url=${GECKO_HEAD_REPOSITORY}/raw-file/${GECKO_HEAD_REV}/${SCRIPT_PATH}
--wget --directory-prefix=${SCRIPT_DOWNLOAD_PATH} $url
-diff --git a/taskcluster/docker/upload-symbols/bin/upload.sh b/taskcluster/docker/upload-symbols/bin/upload.sh
-deleted file mode 100755
---- a/taskcluster/docker/upload-symbols/bin/upload.sh
-+++ /dev/null
-@@ -1,21 +0,0 @@
--#! /bin/bash
--
--set -e
--
--# checkout the script
--source $(dirname $0)/checkout-script.sh
--
--# Check that we have a taskid to checkout
--if [ -z ${ARTIFACT_TASKID} ]; then
--    echo "Please set ARTIFACT_TASKID. Exiting"
--    exit 0
--fi
--
--# grab the symbols from an arbitrary task
--symbol_url=https://queue.taskcluster.net/v1/task/${ARTIFACT_TASKID}/artifacts/public/build/target.crashreporter-symbols-full.zip
--wget ${symbol_url}
--
--# run
--symbol_zip=$(basename ${symbol_url})
--script_name=$(basename ${SCRIPT_PATH})
--python -u ${script_name} ${symbol_zip}
-diff --git a/taskcluster/docker/upload-symbols/test_exports.sh b/taskcluster/docker/upload-symbols/test_exports.sh
-deleted file mode 100755
---- a/taskcluster/docker/upload-symbols/test_exports.sh
-+++ /dev/null
-@@ -1,6 +0,0 @@
--#! /bin/bash
--export SOCORRO_SYMBOL_UPLOAD_TOKEN_FILE=./socorro_token
--export ARTIFACT_TASKID=Hh5vLCaTRRO8Ql9X6XBdxg
--export GECKO_HEAD_REV=beed30cce69bc9783d417d3d29ce2c44989961ed
--export GECKO_HEAD_REPOSITORY=https://hg.mozilla.org/try/
--export SCRIPT_PATH=toolkit/crashreporter/tools/upload_symbols.py
-diff --git a/taskcluster/docs/action-implementation.rst b/taskcluster/docs/action-implementation.rst
-deleted file mode 100644
---- a/taskcluster/docs/action-implementation.rst
-+++ /dev/null
-@@ -1,244 +0,0 @@
--Action Task Implementation
--==========================
--
--This document shows how to define an action in-tree such that it shows up in
--supported user interfaces like Treeherder. For details on interface between
--in-tree logic and external user interfaces, see
--:doc:`the specification for actions.json <action-spec>`.
--
--There are two options for defining actions: creating a callback action, or
--creating a custom action task.  A callback action automatically defines an
--action task that will invoke a Python function of your devising.
--
--A custom action task is an arbitrary task definition that will be created
--directly.  In cases where the callback would simply call ``queue.createTask``,
--a custom action task can be more efficient.
--
--Creating a Callback Action
----------------------------
--A *callback action* is an action that calls back into in-tree logic. That is,
--you register the action with name, title, description, context, input schema and a
--python callback. When the action is triggered in a user interface,
--input matching the schema is collected, passed to a new task which then calls
--your python callback, enabling it to do pretty much anything it wants to.
--
--To create a new action you must create a file
--``taskcluster/taskgraph/actions/my-action.py``, that at minimum contains::
--
--  from registry import register_callback_action
--
--  @register_callback_action(
--      name='hello',
--      title='Say Hello',
--      symbol='hw',  # Show the callback task in treeherder as 'hw'
--      description="Simple **proof-of-concept** callback action",
--      order=10000,  # Order in which it should appear relative to other actions
--  )
--  def hello_world_action(parameters, input, task_group_id, task_id, task):
--      # parameters is an instance of taskgraph.parameters.Parameters
--      # it carries decision task parameters from the original decision task.
--      # input, task_id, and task should all be None
--      print "Hello was triggered from taskGroupId: " + taskGroupId
--
--The example above defines an action that is available in the context-menu for
--the entire task-group (result-set or push in Treeherder terminology). To create
--an action that shows up in the context menu for a task we would specify the
--``context`` parameter.
--
--
--Setting the Action Context
--..........................
--The context parameter should be a list of tag-sets, such as
--``context=[{"platform": "linux"}]``, which will make the task show up in the
--context-menu for any task with ``task.tags.platform = 'linux'``. Below is
--some examples of context parameters and the resulting conditions on
--``task.tags`` (tags used below are just illustrative).
--
--``context=[{"platform": "linux"}]``:
--  Requires ``task.tags.platform = 'linux'``.
--``context=[{"kind": "test", "platform": "linux"}]``:
--  Requires ``task.tags.platform = 'linux'`` **and** ``task.tags.kind = 'test'``.
--``context=[{"kind": "test"}, {"platform": "linux"}]``:
--  Requires ``task.tags.platform = 'linux'`` **or** ``task.tags.kind = 'test'``.
--``context=[{}]``:
--  Requires nothing and the action will show up in the context menu for all tasks.
--``context=[]``:
--  Is the same as not setting the context parameter, which will make the action
--  show up in the context menu for the task-group.
--  (i.e., the action is not specific to some task)
--
--The example action below will be shown in the context-menu for tasks with
--``task.tags.platform = 'linux'``::
--
--  from registry import register_callback_action
--
--  @register_callback_action(
--      name='retrigger',
--      title='Retrigger',
--      symbol='re-c',  # Show the callback task in treeherder as 're-c'
--      description="Create a clone of the task",
--      order=1,
--      context=[{'platform': 'linux'}]
--  )
--  def retrigger_action(parameters, input, task_group_id, task_id, task):
--      # input will be None
--      print "Retriggering: {}".format(task_id)
--      print "task definition: {}".format(task)
--
--When the ``context`` parameter is set, the ``task_id`` and ``task`` parameters
--will provided to the callback. In this case the ``task_id`` and ``task``
--parameters will be the ``taskId`` and *task definition* of the task from whose
--context-menu the action was triggered.
--
--Typically, the ``context`` parameter is used for actions that operate on
--tasks, such as retriggering, running a specific test case, creating a loaner,
--bisection, etc. You can think of the context as a place the action should
--appear, but it's also very much a form of input the action can use.
--
--
--Specifying an Input Schema
--..........................
--In call examples so far the ``input`` parameter for the callbacks has been
--``None``. To make an action that takes input you must specify an input schema.
--This is done by passing a JSON schema as the ``schema`` parameter.
--
--When designing a schema for the input it is important to exploit as many of the
--JSON schema validation features as reasonably possible. Furthermore, it is
--*strongly* encouraged that the ``title`` and ``description`` properties in
--JSON schemas is used to provide a detailed explanation of what the input
--value will do. Authors can reasonably expect JSON schema ``description``
--properties to be rendered as markdown before being presented.
--
--The example below illustrates how to specify an input schema. Notice that while
--this example doesn't specify a ``context`` it is perfectly legal to specify
--both ``input`` and ``context``::
--
--  from registry import register_callback_action
--
--  @register_callback_action(
--      name='run-all',
--      title='Run All Tasks',
--      symbol='ra-c',  # Show the callback task in treeherder as 'ra-c'
--      description="**Run all tasks** that have been _optimized_ away.",
--      order=1,
--      input={
--          'title': 'Action Options',
--          'description': 'Options for how you wish to run all tasks',
--          'properties': {
--              'priority': {
--                  'title': 'priority'
--                  'description': 'Priority that should be given to the tasks',
--                  'type': 'string',
--                  'enum': ['low', 'normal', 'high'],
--                  'default': 'low',
--              },
--              'runTalos': {
--                  'title': 'Run Talos'
--                  'description': 'Do you wish to also include talos tasks?',
--                  'type': 'boolean',
--                  'default': 'false',
--              }
--          },
--          'required': ['priority', 'runTalos'],
--          'additionalProperties': False,
--      },
--  )
--  def retrigger_action(parameters, input, task_group_id, task_id, task):
--      print "Create all pruned tasks with priority: {}".format(input['priority'])
--      if input['runTalos']:
--          print "Also running talos jobs..."
--
--When the ``schema`` parameter is given the callback will always be called with
--an ``input`` parameter that satisfies the previously given JSON schema.
--It is encouraged to set ``additionalProperties: false``, as well as specifying
--all properties as ``required`` in the JSON schema. Furthermore, it's good
--practice to provide ``default`` values for properties, as user interface generators
--will often take advantage of such properties.
--
--Once you have specified input and context as applicable for your action you can
--do pretty much anything you want from within your callback. Whether you want
--to create one or more tasks or run a specific piece of code like a test.
--
--Conditional Availability
--........................
--The decision parameters ``taskgraph.parameters.Parameters`` passed to
--the callback are also available when the decision task generates the list of
--actions to be displayed in the user interface. When registering an action
--callback the ``availability`` option can be used to specify a callable
--which, given the decision parameters, determines if the action should be available.
--The feature is illustrated below::
--
--  from registry import register_callback_action
--
--  @register_callback_action(
--      name='hello',
--      title='Say Hello',
--      symbol='hw',  # Show the callback task in treeherder as 'hw'
--      description="Simple **proof-of-concept** callback action",
--      order=2,
--      # Define an action that is only included if this is a push to try
--      available=lambda parameters: parameters.get('project', None) == 'try',
--  )
--  def try_only_action(parameters, input, task_group_id, task_id, task):
--      print "My try-only action"
--
--Properties of ``parameters``  are documented in the
--:doc:`parameters section <parameters>`. You can also examine the
--``parameters.yml`` artifact created by decisions tasks.
--
--
--Creating a Custom Action Task
--------------------------------
--
--It is possible to define an action that doesn't take a callback. Instead, you'll
--then have to provide a task template. For details on how the task template
--language works refer to :doc:`the specification for actions.json <action-spec>`,
--the example below illustrates how to create such an action::
--
--  from registry import register_task_action
--
--  @register_task_action(
--      name='retrigger',
--      title='Retrigger',
--      description="Create a clone of the task",
--      order=1,
--      context=[{'platform': 'linux'}],
--      input={
--          'title': 'priority'
--          'description': 'Priority that should be given to the tasks',
--          'type': 'string',
--          'enum': ['low', 'normal', 'high'],
--          'default': 'low',
--      },
--  )
--  def task_template_builder(parameters):
--      # The task template builder may return None to signal that the action
--      # isn't available.
--      if parameters.get('project', None) != 'try':
--        return None
--      return {
--          'created': {'$fromNow': ''},
--          'deadline': {'$fromNow': '1 hour'},
--          'expires': {'$fromNow': '14 days'},
--          'provisionerId': '...',
--          'workerType': '...',
--          'priority': '${input}',
--          'payload': {
--              'command': '...',
--              'env': {
--                  'TASK_DEFINITION': {'$json': {'eval': 'task'}}
--              },
--              ...
--          },
--          # It's now your responsibility to include treeherder routes, as well
--          # additional metadata for treeherder in task.extra.treeherder.
--          ...
--      },
--
--This kind of action is useful for creating simple derivative tasks, but is
--limited by the expressiveness of the template language. On the other hand, it
--is more efficient than an action callback as it does not involve an
--intermediate action task before creating the task the user requested.
--
--For further details on the template language, see :doc:`the specification for
--actions.json <action-spec>`.
-diff --git a/taskcluster/docs/action-spec.rst b/taskcluster/docs/action-spec.rst
-deleted file mode 100644
---- a/taskcluster/docs/action-spec.rst
-+++ /dev/null
-@@ -1,242 +0,0 @@
--Action Specification
--====================
--This document specifies how actions exposed by the *decision task* are to be
--presented and triggered from Treeherder, or similar user interfaces.
--
--The *decision task* creates an artifact ``public/actions.json`` to be consumed
--by a user interface such as Treeherder. The ``public/actions.json`` file
--specifies actions that can be triggered such as:
--
-- * Retrigger a task,
-- * Retry specific test cases many times,
-- * Obtain a loaner machine,
-- * Back-fill missing tasks,
-- * ...
--
--Through the ``public/actions.json`` file it is possible expose actions defined
--in-tree such that the actions can be conveniently triggered in Treeherder.
--This has two purposes:
--
-- 1. Facilitate development of utility actions/tools in-tree, and,
-- 2. Strongly decouple build/test configuration from Treeherder.
--
--For details on how define custom actions in-tree, refer to
--:doc:`the in-tree actions section <action-details>`. This document merely
--specifies how ``actions.json`` shall be interpreted.
--
--Actions
---------
--
--The content of ``actions.json`` is a list of actions (and variables, to be
--described later).  Each action has a ``kind`` describing how a user interface
--should trigger it.  There is currently only one kind defined: ``task``.
--
--An action with ``kind: 'task'`` specifies a task that the user interface should
--create. That is, when the action is triggered, the user interface calls the
--Taskcluster API to create a new task, with the content of that task determined
--from ``actions.json``.
--
--The task created by the action may be useful in its own right (for example,
--running a test with additional debugging), or it may simplify trigger in-tree
--scripts that create new tasks.  The latter form is called an *action task*, and
--is similar to a decision task. This allows in-tree scripts to execute
--complicated actions such as backfilling.
--
--Actions of the ``'task'`` *kind* **must** have a ``task`` property. This
--property specifies the task template to be parameterized and created in order
--to trigger the action.
--
--The template is parameterized using `JSON-e
--<https://github.com/taskcluster/json-e>`_, with the following context entries
--available:
--
--``taskGroupId``
--  the ``taskGroupId`` of task-group this is triggerd from,
--``taskId``
--  the ``taskId`` of the selected task, ``null`` if no task is
--  selected (this is the case if the action has ``context: []``),
--``task``
--  the task definition of the selected task, ``null`` if no task is
--  selected (this is the case if the action has ``context: []``), and,
--``input``
--  the input matching the ``schema`` property, ``null`` if the action
--  doesn't have a ``schema`` property.  See "Action Input" below.
--``<key>``
--  Any ``<key>`` defined in the ``variables`` property may also be referenced.
--  See "Variables" below.
--
--The following **example** demonstrates how a task template can specify
--timestamps and dump input JSON into environment variables::
--
--  {
--    "version": 1,
--    "actions": [
--      {
--        "kind": "task",
--        "name": "thing",
--        "title: "Do A Thing",
--        "description": "Do something",
--        "task": {
--          "workerType": "my-worker",
--          "payload": {
--            "created": {"$fromNow": ""},
--            "deadline": {"$fromNow": "1 hour 15 minutes"},
--            "expiration": {"$fromNow": "14 days"},
--            "image": "my-docker-image",
--            "env": {
--              "TASKID_TRIGGERED_FOR": "${taskId}",
--              "INPUT_JSON": {"$json": {"$eval": "input"}}
--            },
--            ...
--          },
--          ...
--        }
--      }
--    ],
--    "variables: {},
--  }
--
--
--MetaData
--........
--
--Each action entry must define a ``name``, ``title`` and ``description``.
--furthermore, the list of actions should be sorted by the order in which actions
--should appear in a menu.
--
--The ``name`` is used by user interfaces to identify the action. For example, a
--retrigger button might look for an action with `name = "retrigger"`.
--
--Action names must be unique for a given task, or for a taskgroup, but the same
--name may be used for actions applying to disjoint sets of tasks. For example,
--it may be helpful to define different "retrigger" actions for build tasks
--`[{jobKind: 'build'}]` and test tasks `[{jobKind: 'test'}]`, and in this case
--only one such action would apply to any given task.
--
--The ``title`` is a human readable string intended to be used as label on the
--button, link or menu entry that triggers the action. This should be short and
--concise.  Ideally, you'll want to avoid duplicates.
--
--The ``description`` property contains a human readable string describing the
--action, such as what it does, how it does it, what it is useful for. This string
--is to be render as **markdown**, allowing for bullet points, links and other
--simple formatting to explain what the action does.
--
--
--Action Context
--..............
--
--Few actions are relevant in all contexts. For this reason each action specifies
--a ``context`` property. This property specifies when an action is relevant.
--Actions *relevant* for a task should be displayed in a context menu for the
--given task. Similarly actions *not relevant* for a given task should not be
--displayed in the context menu for the given task.
--
--As a special case we say that actions for which *no relevant* contexts can
--exist, are *relevant* for the task-group. This could for example be an action
--to create tasks that was optimized away.
--
--The ``context`` property is specified as a list of *tag-sets*. A *tag-set* is a
--set of key-value pairs. A task is said to *match* a *tag-set* if ``task.tags``
--is a super-set of the *tag-set*. An action is said to be *relevant* for a given
--task, if ``task.tags`` *match* one of the *tag-sets* given in the ``context``
--property for the action.
--
--Naturally, it follows that an action with an empty list of *tag-sets* in its
--``context`` property cannot possibly be *relevant* for any task. Hence, by
--previously declared special case such an action is *relevant* for the
--task-group.
--
--**Examples**::
--
--    // Example task definitions (everything but tags eclipsed)
--    TaskA = {..., tags: {kind: 'test', platform: 'linux'}}
--    TaskB = {..., tags: {kind: 'test', platform: 'windows'}}
--    TaskC = {..., tags: {kind: 'build', platform: 'linux'}}
--
--    Action1 = {..., context: [{kind: 'test'}]}
--    // Action1 is relevant to: TaskA, TaskB
--
--    Action2 = {..., context: [{kind: 'test', platform: 'linux'}]}
--    // Action2 is relevant to: TaskA
--
--    Action3 = {..., context: [{platform: 'linux'}]}
--    // Action3 is relevant to: TaskA, TaskC
--
--    Action4 = {..., context: [{kind: 'test'}, {kind: 'build'}]}
--    // Action4 is relevant to: TaskA, TaskB, TaskC
--
--    Action5 = {..., context: [{}]}
--    // Action5 is relevant to: TaskA, TaskB, TaskC (all tasks in fact)
--
--    Action6 = {..., context: []}
--    // Action6 is relevant to the task-group
--
--
--Action Input
--............
--
--An action can take JSON input, the input format accepted by an action is
--specified using a `JSON schema <http://json-schema.org/>`_. This schema is
--specified with by the action's ``schema`` property.  For example::
--
--  {
--    "version": 1,
--    "actions": [
--      {
--        "kind": "task",
--        "name": "thing",
--        "title: "Do A Thing",
--        "description": "Do something",
--        "schema": {
--          "description": "The thing to do",
--          "title": "Thing",
--          "default": "something",
--          "type": "string"
--          "maxLength": 255
--        },
--        "task": {
--          "payload": {
--            "env": {
--              "INPUT_JSON": {"$json": {"$eval": "input"}}
--            },
--            ...
--          },
--          ...
--        }
--      }
--    ],
--    "variables: {},
--  }
--
--User interfaces for triggering actions, like Treeherder, are expected to provide
--JSON input that satisfies this schema. These interfaces are also expected to
--validate the input against the schema before attempting to trigger the action.
--
--It is perfectly legal to reference external schemas using
--constructs like ``{"$ref": "https://example.com/my-schema.json"}``, in this case
--it however strongly recommended that the external resource is available over
--HTTPS and allows CORS requests from any source.
--
--When writing schemas it is strongly encouraged that the JSON schema
--``description`` properties are used to provide detailed descriptions. It is
--assumed that consumers will render these ``description`` properties as markdown.
--
--
--Variables
-----------
--
--The ``public/actions.json`` artifact has a ``variables`` property that is a
--mapping from variable names to JSON values to be used as constants.
--These variables can be referenced from task templates, but beware that they
--may overshadow builtin variables. This is mainly useful to deduplicate commonly
--used values, in order to reduce template size. This feature does not
--introduce further expressiveness.
--
--Formal Specification
----------------------
--
--The following is the JSON schema for ``actions.json``.
--
--.. literalinclude:: actions-schema.yml
--   :language: YAML
-diff --git a/taskcluster/docs/action-uis.rst b/taskcluster/docs/action-uis.rst
-deleted file mode 100644
---- a/taskcluster/docs/action-uis.rst
-+++ /dev/null
-@@ -1,87 +0,0 @@
--User Interface Considerations
--=============================
--
--The actions system decouples in-tree changes from user interface changes by
--taking advantage of graceful degradation. User interfaces, when presented with
--an unfamiliar action, fall back to a usable default behavior, and can later be
--upgraded to handle that action with a more refined approach.
--
--Default Behavior
------------------
--
--Every user interface should support the following:
--
-- * Displaying a list of actions relevant to each task, and displaying
--   task-group tasks for the associated task-group.
--
-- * Providing an opportuntity for the user to enter input for an action.  This
--   might be in JSON or YAML, or using a form auto-generated from the action's
--   JSON-schema.  If the action has no schema, this step should be skipped.
--   The user's input should be validated against the schema.
--
-- * For ``action.kind = 'task'``, rendering the template using the JSON-e
--   library, using the variables described in :doc:`action-spec`.
--
-- * Calling ``Queue.createTask`` with the resulting task, using the user's
--   Taskcluster credentials.  See the next section for some important
--   security-related concerns.
--
--Creating Tasks
----------------
--
--When executing an action, a UI must ensure that the user is authorized to
--perform the action, and that the user is not being "tricked" into executing
--an unexpected action.
--
--To accomplish the first, the UI should create tasks with the user's Taskcluster
--credentials. Do not use credentials configured as part of the service itself!
--
--To accomplish the second, use the decision tasks's ``scopes`` property as the
--`authorizedScopes
--<https://docs.taskcluster.net/manual/design/apis/hawk/authorized-scopes>`_ for
--the ``Queue.createTask`` call.  This prevents action tasks from doing anything
--the original decision task couldn't do.
--
--Specialized Behavior
----------------------
--
--The default behavior is too clumsy for day-to-day use for common actions.  User
--interfaces may want to provide a more natural interface that still takes advantage
--of the actions system.
--
--Specialized Input
--.................
--
--A user interface may provide specialized input forms for specific schemas.  The
--input generated from the form must conform to the schema.
--
--To ensure that the schema has not changed, implementers should do a deep
--comparison between a schema for which a hand-written form exists, and the
--schema required by the action. If the two differ, the default behavior should
--be used instead.
--
--Specialized Triggering
--......................
--
--A user interface may want to trigger a specific action using a dedicated UI
--element.  For example, an "start interactive session" button might be placed
--next to each failing test in a list of tests.
--
--User interfaces should look for the desired action by name. The UI should check
--that there is exactly one matching action available for the given task or
--task-graph. If multiple actions match, the UI should treat that as an error
--(helping to avoid actions being surreptitiously replaced by similarly-named,
--malicious actions).
--
--Having discovered the task, the user interface has a choice in how to provide
--its input. It can use the "specialized input" approach outlined above, providing
--a customized form if the action's schema is recognized and gracefully degrading
--if not.
--
--But if the user interface is generating the input internally, it may instead
--validate that generated input against the action's schema as given, proceeding
--if validation succeeds.  In this alternative, there is no need to do a deep
--comparison of the schema.  This approach allows in-tree changes that introduce
--backward-compatible changes to the schema, without breaking support in user
--interfaces.  Of course, if the changes are not backward-compatibile, breakage
--will ensue.
-diff --git a/taskcluster/docs/actions-schema.yml b/taskcluster/docs/actions-schema.yml
-deleted file mode 100644
---- a/taskcluster/docs/actions-schema.yml
-+++ /dev/null
-@@ -1,197 +0,0 @@
--$schema: http://json-schema.org/draft-04/schema#
--id: https://hg.mozilla.org/mozilla-central/raw-file/tip/taskcluster/docs/actions-schema.yml
--title: Schema for Exposing Actions
--description: |
--  This document specifies the schema for the `public/actions.json` used by
--  _decision tasks_ to expose actions that can be triggered by end-users.
--
--  For the purpose of this document the _consumer_ is the user-interface that
--  displays task results to the end-user and allows end-users to trigger actions
--  defined by `public/actions.json`. A _consumer_ might be Treeherder.
--  The _end-user_ is a developer who is inspecting the results, and wish to
--  trigger actions.
--type: object
--properties:
--  version:
--    enum: [1]
--    type: integer
--  variables:
--    type: object
--    description: |
--      Mapping from variable name to constants that can be referenced using
--      `{$eval: '<variable>'}` within the task templates defined for each action.
--
--      This is useful for commonly used constants that are used in many task
--      templates. Whether it's to reduce the size of the `public/actions.json`
--      artifact by reuseing large constants, or simply to make it easier to
--      write task templates by exposing additional variables.
--
--      These will overwrite any builtin variables, such as `taskGroupId`,
--      `input`, `taskId`, `task`, and any further variables that future
--      backwards compatible iterations of this specifcation adds. Hence, you
--      should avoid declaring variables such as `input`, as it will shadow the
--      builtin `input` variable.
--    additionalProperties: true
--  actions:
--    type: array
--    description: |
--      List of actions that can be triggered.
--    items:
--      type: object
--      properties:
--        name:
--          type: string
--          maxLength: 255
--          description: |
--            The name of this action.  This is used by user interfaces to
--            identify the action. For example, a retrigger button might look for
--            an action with `name = "retrigger"`.
--
--            Action names must be unique for a given task, or for a taskgroup,
--            but the same name may be used for actions applying to disjoint sets
--            of tasks. For example, it may be helpful to define different
--            "retrigger" actions for build tasks `[{jobKind: 'build'}]` and test
--            tasks `[{jobKind: 'test'}]`, and in this case only one such action
--            would apply to any given task.
--        title:
--          type: string
--          maxLength: 255
--          description: |
--            Title text to be displayed on the button or link triggering the action.
--        description:
--          type: string
--          maxLength: 4096
--          description: |
--            Human readable description of the action in markdown.
--            Can be displayed in tooltip, popup and/or dialog when triggering
--            the action.
--        kind:
--          enum:
--            - task
--          description: |
--            Specifies the kind of action this is.
--
--            The `task` _action kind_ is triggered by creating a task, following
--            a task template.
--
--            Other kinds might be added in the future. Consumers should ignore
--            all entries featuring a `kind` property they don't recognize.
--        context:
--          type: array
--          default: []
--          items:
--            type: object
--            additionalProperties:
--              type: string
--              maxLength: 4096
--            title: tag-set
--            description: |
--              A set of key-value pairs specifying a _tag-set_.
--          description: |
--            The `context` property determines in what context the action is
--            relevant. Thus, what context the action should be presented to the
--            end-user.
--
--            The `context` property contains a set of tag-sets. A _tag-set_ is a
--            set of key-value pairs. A task is said satisfy a tag-set if
--            `task.tags` is a super-set of the given tag-set. An action is
--            relevant for a task if the task satisfies at-least one of
--            the tag-sets.
--
--            Hence, an action with `context: [{a: '1'}, {b: '2'}]` is relevant
--            for any task with `task.tags.a = '1'` or `task.tags.b = '2'`.
--            An action with `context: [{a: '1', b: '2'}]` is only relevant for
--            tasks with `task.tags.a = '1'` and `task.tags.b = '2'`.
--
--            This allows restrictions of what tasks an action is relevant for.
--            For example some tasks might not support running under a debugger.
--
--            The keen reader observes that actions with `context: [{}]` are
--            relevant for all tasks. Conversely, we have that tasks with
--            `context: []` are irrelevant for all tasks. We abuse this property
--            and define actions with `context: []` to be relevant for the
--            _task-group_ only.
--
--            That is an action with `context: []` should not be display in the
--            context-sensitive menu for a task, rather it should be display when
--            selecting the entire task-group. Presentation details are left for
--            consumer to decide.
--
--            Notice that the `context` property is optional, but defined to have
--            a default value `context: []`. Hence, if the `context` is not
--            specified consumer should take this to mean `context: []` implying
--            that the action is relevant to the task-group, rather than any
--            subset of tasks.
--        schema:
--          $ref: http://json-schema.org/schema
--          description: |
--            JSON schema for input parameters to the `task` template property.
--            Consumers shall offer a user-interface where end-users can enter
--            values that satisfy this schema. Furthermore, consumers **must**
--            validate enter values against the given schema before parameterizing
--            the `task` template property and triggering the action.
--
--            In practice it's encourage that consumers employ a facility that
--            can generate HTML forms from JSON schemas. However, if certain
--            schemas are particularly complicated or common, consumers may also
--            hand-write a user-interface for collecting the input. In this case
--            the consumer **must** do a deep comparison between the schema given
--            in the action, and the schema for which a custom user-interface have
--            been written, and fall-back to an auto-generated form if the schema
--            doesn't match.
--
--            It is assumed that the JSON schema `description` property will be
--            rendered as markdown when displayed as documentation for end-users.
--            Producers of `public/actions.json` is encouraged to provide a
--            detailed explanation of the input parameters using these
--            `description` properties. And consumers are *strongly* encouraged
--            to render `description` values as markdown.
--
--            The `schema` property is optional, and if not given the input for
--            `task` template parameterization shall be `null`.
--        task:
--          type: object
--          title: task template
--          description: |
--            Task template for triggering the action.
--
--            When an action have been selected in the appropriate context and
--            input satisfying the `schema` (if any) has been collected. The
--            action is triggered by parameterizing the task template given in
--            this property, and creating the resulting task.
--
--            The template is an object that is parameterized using
--            [JSON-e](https://github.com/taskcluster/json-e), with the above
--            variables supplied as context.
--
--            This allows for dumping `input` and `taskId` into environment
--            variables for the task to be created. The following task template
--            injects `input` and `taskId` as environment variables:
--            ```json
--            {
--              "workerType": "my-worker",
--              "payload": {
--                "created": {"$fromNow": ""},
--                "deadline": {"$fromNow": "1 hour 15 minutes"},
--                "expiration": {"$fromNow": "14 days"},
--                "image": "my-docker-image",
--                "env": {
--                  "TASKID_TRIGGERED_FOR": "${taskId}",
--                  "INPUT_JSON": {"$json": {"$eval": "input"}}
--                },
--                ...
--              },
--              ...
--            }
--            ```
--      additionalProperties: false
--      required:
--        - title
--        - description
--        - kind
--        - task
--additionalProperties: false
--required:
--  - version
--  - actions
--  - variables
-diff --git a/taskcluster/docs/actions.rst b/taskcluster/docs/actions.rst
-deleted file mode 100644
---- a/taskcluster/docs/actions.rst
-+++ /dev/null
-@@ -1,28 +0,0 @@
--Actions
--=======
--
--This section shows how to define an action in-tree such that it shows up in
--supported user interfaces like Treeherder.
--
--At a very high level, the process looks like this:
--
-- * The decision task produces an artifact, ``public/actions.json``, indicating
--   what actions are available.
--
-- * A user interface (for example, Treeherder or the Taskcluster tools) consults
--   ``actions.json`` and presents appropriate choices to the user, if necessary
--   gathering additional data from the user, such as the number of times to
--   re-trigger a test case.
--
-- * The user interface follows the action description to carry out the action.
--   In most cases (``action.kind == 'task'``), that entails creating an "action
--   task", including the provided information. That action task is responsible
--   for carrying out the named action, and may create new sub-tasks if necessary
--   (for example, to re-trigger a task).
--
--
--.. toctree::
--
--    action-spec
--    action-uis
--    action-implementation
-diff --git a/taskcluster/docs/attributes.rst b/taskcluster/docs/attributes.rst
-deleted file mode 100644
---- a/taskcluster/docs/attributes.rst
-+++ /dev/null
-@@ -1,171 +0,0 @@
--===============
--Task Attributes
--===============
--
--Tasks can be filtered, for example to support "try" pushes which only perform a
--subset of the task graph or to link dependent tasks.  This filtering is the
--difference between a full task graph and a target task graph.
--
--Filtering takes place on the basis of attributes.  Each task has a dictionary
--of attributes and filters over those attributes can be expressed in Python.  A
--task may not have a value for every attribute.
--
--The attributes, and acceptable values, are defined here.  In general, attribute
--names and values are the short, lower-case form, with underscores.
--
--kind
--====
--
--A task's ``kind`` attribute gives the name of the kind that generated it, e.g.,
--``build`` or ``spidermonkey``.
--
--run_on_projects
--===============
--
--The projects where this task should be in the target task set.  This is how
--requirements like "only run this on inbound" get implemented.  These are
--either project names or the aliases
--
-- * `integration` -- integration repositories (autoland, inbound, etc)
-- * `trunk` -- integration repositories plus mozilla-central
-- * `release` -- release repositories including mozilla-central
-- * `all` -- everywhere (the default)
--
--For try, this attribute applies only if ``-p all`` is specified.  All jobs can
--be specified by name regardless of ``run_on_projects``.
--
--If ``run_on_projects`` is set to an empty list, then the task will not run
--anywhere, unless its build platform is specified explicitly in try syntax.
--
--task_duplicates
--===============
--
--This is used to indicate that we want multiple copies of the task created.
--This feature is used to track down intermittent job failures.
--
--If this value is set to N, the task-creation machinery will create a total of N
--copies of the task.  Only the first copy will be included in the taskgraph
--output artifacts, although all tasks will be contained in the same taskGroup.
--
--While most attributes are considered read-only, target task methods may alter
--this attribute of tasks they include in the target set.
--
--build_platform
--==============
--
--The build platform defines the platform for which the binary was built.  It is
--set for both build and test jobs, although test jobs may have a different
--``test_platform``.
--
--build_type
--==========
--
--The type of build being performed.  This is a subdivision of ``build_platform``,
--used for different kinds of builds that target the same platform.  Values are
--
-- * ``debug``
-- * ``opt``
--
--test_platform
--=============
--
--The test platform defines the platform on which tests are run.  It is only
--defined for test jobs and may differ from ``build_platform`` when the same binary
--is tested on several platforms (for example, on several versions of Windows).
--This applies for both talos and unit tests.
--
--Unlike build_platform, the test platform is represented in a slash-separated
--format, e.g., ``linux64/opt``.
--
--unittest_suite
--==============
--
--This is the unit test suite being run in a unit test task.  For example,
--``mochitest`` or ``cppunittest``.
--
--unittest_flavor
--===============
--
--If a unittest suite has subdivisions, those are represented as flavors.  Not
--all suites have flavors, in which case this attribute should be set to match
--the suite.  Examples: ``mochitest-devtools-chrome-chunked`` or ``a11y``.
--
--unittest_try_name
--=================
--
--This is the name used to refer to a unit test via try syntax.  It
--may not match either of ``unittest_suite`` or ``unittest_flavor``.
--
--talos_try_name
--==============
--
--This is the name used to refer to a talos job via try syntax.
--
--job_try_name
--============
--
--This is the name used to refer to a "job" via try syntax (``-j``).  Note that for
--some kinds, ``-j`` also matches against ``build_platform``.
--
--test_chunk
--==========
--
--This is the chunk number of a chunked test suite (talos or unittest).  Note
--that this is a string!
--
--e10s
--====
--
--For test suites which distinguish whether they run with or without e10s, this
--boolean value identifies this particular run.
--
--image_name
--==========
--
--For the ``docker_image`` kind, this attribute contains the docker image name.
--
--nightly
--=======
--
--Signals whether the task is part of a nightly graph. Useful when filtering
--out nightly tasks from full task set at target stage.
--
--all_locales
--===========
--
--For the ``l10n`` and ``nightly-l10n`` kinds, this attribute contains the list
--of relevant locales for the platform.
--
--all_locales_with_changesets
--===========================
--
--Contains a dict of l10n changesets, mapped by locales (same as in ``all_locales``).
--
--l10n_chunk
--==========
--For the ``l10n`` and ``nightly-l10n`` kinds, this attribute contains the chunk
--number of the job. Note that this is a string!
--
--chunk_locales
--=============
--For the ``l10n`` and ``nightly-l10n`` kinds, this attribute contains an array of
--the individual locales this chunk is responsible for processing.
--
--locale
--======
--For jobs that operate on only one locale, we set the attribute ``locale`` to the
--specific locale involved. Currently this is only in l10n versions of the
--``beetmover`` and ``balrog`` kinds.
--
--signed
--======
--Signals that the output of this task contains signed artifacts.
--
--repackage_type
--==============
--This is the type of repackage. Can be ``repackage`` or 
--``repackage_signing``.
--
--toolchain-artifact
--==================
--For toolchain jobs, this is the path to the artifact for that toolchain.
-diff --git a/taskcluster/docs/caches.rst b/taskcluster/docs/caches.rst
-deleted file mode 100644
---- a/taskcluster/docs/caches.rst
-+++ /dev/null
-@@ -1,50 +0,0 @@
--.. taskcluster_caches:
--
--=============
--Common Caches
--=============
--
--There are various caches used by the in-tree tasks. This page attempts to
--document them and their appropriate use.
--
--Version Control Caches
--======================
--
--``level-{{level}}-checkouts-{{version}}``
--   This cache holds version control checkouts, each in a subdirectory named
--   after the repo (e.g., ``gecko``).
--
--   Checkouts should be read-only. If a task needs to create new files from
--   content of a checkout, this content should be written in a separate
--   directory/cache (like a workspace).
--
--   A ``version`` parameter appears in the cache name to allow
--   backwards-incompatible changes to the cache's behavior.
--
--   The ``hg-store`` contains a `shared store <https://www.mercurial-scm.org/wiki/ShareExtension>`
--   that is is used by ``hg robustcheckout``. If you are using ``run-task`` you
--   should set the ``HG_STORE_PATH`` environment variable to point to this
--   directory. If you are using ``hg robustcheckout``, pass this directory to the
--   ``--sharebase`` option.
--
--``level-{{level}}-{{project}}-tc-vcs`` (deprecated)
--    This cache is used internally by ``tc-vcs``.  This tool is deprecated and
--    should be replaced with ``hg robustcheckout``.
--
--
--Workspace Caches
--================
--
--``level-{{level}}-*-workspace``
--   These caches (of various names typically ending with ``workspace``)
--   contain state to be shared between task invocations. Use cases are
--   dependent on the task.
--
--Other
--=====
--
--``tooltool-cache``
--    Tooltool invocations should use this cache.  Tooltool will store files here
--    indexed by their hash, and will verify hashes before copying files from
--    this directory, so there is no concern with sharing the cache between jobs
--    of different levels.
-diff --git a/taskcluster/docs/cron.rst b/taskcluster/docs/cron.rst
-deleted file mode 100644
---- a/taskcluster/docs/cron.rst
-+++ /dev/null
-@@ -1,58 +0,0 @@
--Periodic Taskgraphs
--===================
--
--The cron functionality allows in-tree scheduling of task graphs that run
--periodically, instead of on a push.
--
--Cron.yml
----------
--
--In the root of the Gecko directory, you will find `.cron.yml`.  This defines
--the periodic tasks ("cron jobs") run for Gecko.  Each specifies a name, what to
--do, and some parameters to determine when the cron job should occur.
--
--See ``taskcluster/taskgraph/cron/schema.py`` for details on the format and
--meaning of this file.
--
--How It Works
--------------
--
--The `TaskCluster Hooks Service <https://tools.taskcluster.net/hooks>`_ has a
--hook configured for each repository supporting periodic task graphs.  The hook
--runs every 15 minutes, and the resulting task is referred to as a "cron task".
--That cron task runs `./mach taskgraph cron` in a checkout of the Gecko source
--tree.
--
--The mach subcommand reads ``.cron.yml``, then consults the current time
--(actually the time the cron task was created, rounded down to the nearest 15
--minutes) and creates tasks for any cron jobs scheduled at that time.
--
--Each cron job in ``.cron.yml`` specifies a ``job.type``, corresponding to a
--function responsible for creating TaskCluster tasks when the job runs.
--
--Decision Tasks
--..............
--
--For ``job.type`` "decision-task", tasks are created based on
--``.taskcluster.yml`` just like the decision tasks that result from a push to a
--repository.  They run with a distinct ``taskGroupId``, and are free to create
--additional tasks comprising a task graph.
--
--Scopes
--------
--
--The cron task runs with the sum of all cron job scopes for the given repo.  For
--example, for the "sequoia" project, the scope would be
--``assume:repo:hg.mozilla.org/projects/sequoia:cron:*``.  Each cron job creates
--tasks with scopes for that particular job, by name.  For example, the
--``check-frob`` cron job on that repo would run with
--``assume:repo:hg.mozilla.org/projects/sequoia:cron:check-frob``.
--
--.. important::
--
--    The individual cron scopes are a useful check to ensure that a job is not
--    accidentally doing something it should not, but cannot actually *prevent* a
--    job from using any of the scopes afforded to the cron task itself (the
--    ``..cron:*`` scope).  This is simply because the cron task runs arbitrary
--    code from the repo, and that code can be easily modified to create tasks
--    with any scopes that it posesses.
-diff --git a/taskcluster/docs/docker-images.rst b/taskcluster/docs/docker-images.rst
-deleted file mode 100644
---- a/taskcluster/docs/docker-images.rst
-+++ /dev/null
-@@ -1,42 +0,0 @@
--.. taskcluster_dockerimages:
--
--=============
--Docker Images
--=============
--
--TaskCluster Docker images are defined in the source directory under
--``taskcluster/docker``. Each directory therein contains the name of an
--image used as part of the task graph.
--
--Adding Extra Files to Images
--============================
--
--Dockerfile syntax has been extended to allow *any* file from the
--source checkout to be added to the image build *context*. (Traditionally
--you can only ``ADD`` files from the same directory as the Dockerfile.)
--
--Simply add the following syntax as a comment in a Dockerfile::
--
--   # %include <path>
--
--e.g.
--
--   # %include mach
--   # %include testing/mozharness
--
--The argument to ``# %include`` is a relative path from the root level of
--the source directory. It can be a file or a directory. If a file, only that
--file will be added. If a directory, every file under that directory will be
--added (even files that are untracked or ignored by version control).
--
--Files added using ``# %include`` syntax are available inside the build
--context under the ``topsrcdir/`` path.
--
--Files are added as they exist on disk. e.g. executable flags should be
--preserved. However, the file owner/group is changed to ``root`` and the
--``mtime`` of the file is normalized.
--
--Here is an example Dockerfile snippet::
--
--   # %include mach
--   ADD topsrcdir/mach /home/worker/mach
-diff --git a/taskcluster/docs/how-tos.rst b/taskcluster/docs/how-tos.rst
-deleted file mode 100644
---- a/taskcluster/docs/how-tos.rst
-+++ /dev/null
-@@ -1,305 +0,0 @@
--How Tos
--=======
--
--All of this equipment is here to help you get your work done more efficiently.
--However, learning how task-graphs are generated is probably not the work you
--are interested in doing.  This section should help you accomplish some of the
--more common changes to the task graph with minimal fuss.
--
--.. important::
--
--    If you cannot accomplish what you need with the information provided here,
--    please consider whether you can achieve your goal in a different way.
--    Perhaps something simpler would cost a bit more in compute time, but save
--    the much more expensive resource of developers' mental bandwidth.
--    Task-graph generation is already complex enough!
--
--    If you want to proceed, you may need to delve into the implementation of
--    task-graph generation.  The documentation and code are designed to help, as
--    are the authors - ``hg blame`` may help track down helpful people.
--
--    As you write your new transform or add a new kind, please consider the next
--    developer.  Where possible, make your change data-driven and general, so
--    that others can make a much smaller change.  Document the semantics of what
--    you are changing clearly, especially if it involves modifying a transform
--    schema.  And if you are adding complexity temporarily while making a
--    gradual transition, please open a new bug to remind yourself to remove the
--    complexity when the transition is complete.
--
--Hacking Task Graphs
---------------------
--
--The recommended process for changing task graphs is this:
--
--1. Find a recent decision task on the project or branch you are working on, and
--   download its ``parameters.yml`` artifact.  Alternately, you
--   can simply take note of the artifact URL, or just the decision task's
--   ``task-id``.  This file contains all of the inputs to the task-graph
--   generation process.  Its contents are simple enough if you would like to
--   modify it, and it is documented in :doc:`parameters`.
--
--2. Run one of the ``mach taskgraph`` subcommands (see :doc:`taskgraph`) to
--   generate a baseline against which to measure your changes, passing the
--   parameters you found in the previous step.  For example:
--
--   .. code-block:: none
--
--       ./mach taskgraph tasks --json -p parameters.yml > old-tasks.json
--       ./mach taskgraph tasks --json -p url/to/parameters.yml > old-tasks.json
--       ./mach taskgraph tasks --json -p task-id=<task-id> > old-tasks.json
--
--3. Make your modifications under ``taskcluster/``.
--
--4. Run the same ``mach taskgraph`` command, sending the output to a new file,
--   and use ``diff`` to compare the old and new files.  Make sure your changes
--   have the desired effect and no undesirable side-effects.
--
--5. When you are satisfied with the changes, push them to try to ensure that the
--   modified tasks work as expected.
--
--Hacking Actions
--...............
--
--If you are working on an action task and wish to test it out locally, use the
--``./mach taskgraph test-action-callback`` command:
--
--   .. code-block:: none
--
--        ./mach taskgraph test-action-task \
--            --task-id I4gu9KDmSZWu3KHx6ba6tw --task-group-id sMO4ybV9Qb2tmcI1sDHClQ \
--            -p parameters.yml --input input.yml \
--            hello_world_action
--
--This invocation will run the hello world callback with the given inputs and
--print any created tasks to stdout, rather than actually creating them.
--
--Common Changes
----------------
--
--Changing Test Characteristics
--.............................
--
--First, find the test description.  This will be in
--``taskcluster/ci/*/tests.yml``, for the appropriate kind (consult
--:doc:`kinds`).  You will find a YAML stanza for each test suite, and each
--stanza defines the test's characteristics.  For example, the ``chunks``
--property gives the number of chunks to run.  This can be specified as a simple
--integer if all platforms have the same chunk count, or it can be keyed by test
--platform.  For example:
--
--.. code-block:: yaml
--
--    chunks:
--        by-test-platform:
--            linux64/debug: 10
--            default: 8
--
--The full set of available properties is in
--``taskcluster/taskgraph/transform/tests/test_description.py``.  Some other
--commonly-modified properties are ``max-run-time`` (useful if tests are being
--killed for exceeding maxRunTime) and ``treeherder-symbol``.
--
--.. note::
--
--    Android tests are also chunked at the mozharness level, so you will need to
--    modify the relevant mozharness config, as well.
--
--Adding a Test Suite
--...................
--
--To add a new test suite, you will need to know the proper mozharness invocation
--for that suite, and which kind it fits into (consult :doc:`kinds`).
--
--Add a new stanza to ``taskcluster/ci/<kind>/tests.yml``, copying from the other
--stanzas in that file.  The meanings should be clear, but authoritative
--documentation is in
--``taskcluster/taskgraph/transform/tests/test_description.py`` should you need
--it.  The stanza name is the name by which the test will be referenced in try
--syntax.
--
--Add your new test to a test set in ``test-sets.yml`` in the same directory.  If
--the test should only run on a limited set of platforms, you may need to define
--a new test set and reference that from the appropriate platforms in
--``test-platforms.yml``.  If you do so, include some helpful comments in
--``test-sets.yml`` for the next person.
--
--Greening Up a New Test
--......................
--
--When a test is not yet reliably green, configuration for that test should not
--be landed on integration branches.  Of course, you can control where the
--configuration is landed!  For many cases, it is easiest to green up a test in
--try: push the configuration to run the test to try along with your work to fix
--the remaining test failures.
--
--When working with a group, check out a "twig" repository to share among your
--group, and land the test configuration in that repository.  Once the test is
--green, merge to an integration branch and the test will begin running there as
--well.
--
--Adding a New Task
--.................
--
--If you are adding a new task that is not a test suite, there are a number of
--options.  A few questions to consider:
--
-- * Is this a new build platform or variant that will produce an artifact to
--   be run through the usual test suites?
--
-- * Does this task depend on other tasks?  Do other tasks depend on it?
--
-- * Is this one of a few related tasks, or will you need to generate a large
--   set of tasks using some programmatic means (for example, chunking)?
--
-- * How is the task actually excuted?  Mozharness?  Mach?
--
-- * What kind of environment does the task require?
--
--Armed with that information, you can choose among a few options for
--implementing this new task.  Try to choose the simplest solution that will
--satisfy your near-term needs.  Since this is all implemented in-tree, it
--is not difficult to refactor later when you need more generality.
--
--Existing Kind
--`````````````
--
--The simplest option is to add your task to an existing kind.  This is most
--practical when the task "makes sense" as part of that kind -- for example, if
--your task is building an installer for a new platform using mozharness scripts
--similar to the existing build tasks, it makes most sense to add your task to
--the ``build`` kind.  If you need some additional functionality in the kind,
--it's OK to modify the implementation as necessary, as long as the modification
--is complete and useful to the next developer to come along.
--
--Tasks in the ``build`` kind generate Firefox installers, and the ``test`` kind
--will add a full set of Firefox tests for each ``build`` task.
--
--New Kind
--````````
--
--The next option to consider is adding a new kind.  A distinct kind gives you
--some isolation from other task types, which can be nice if you are adding an
--experimental kind of task.
--
--Kinds can range in complexity.  The simplest sort of kind uses the transform
--loader to read a list of jobs from the ``jobs`` key, and applies the standard
--``job`` and ``task`` transforms:
--
--.. code-block:: yaml
--
--    implementation: taskgraph.task.transform:TransformTask
--    transforms:
--       - taskgraph.transforms.job:transforms
--       - taskgraph.transforms.task:transforms
--    jobs:
--       - ..your job description here..
--
--Job descriptions are defined and documented in
--``taskcluster/taskgraph/transforms/job/__init__.py``.
--
--Custom Kind Loader
--``````````````````
--
--If your task depends on other tasks, then the decision of which tasks to create
--may require some code.  For example, the ``test`` kind iterates over
--the builds in the graph, generating a full set of test tasks for each one.  This specific
--post-build behavior is implemented as a loader defined in ``taskcluster/taskgraph/loader/test.py``.
--
--A custom loader is useful when the set of tasks you want to create is not
--static but based on something else (such as the available builds) or when the
--dependency relationships for your tasks are complex.
--
--Custom Transforms
--`````````````````
--
--Most loaders apply a series of ":doc:`transforms <transforms>`" that start with
--an initial human-friendly description of a task and end with a task definition
--suitable for insertion into a Taskcluster queue.
--
--Custom transforms can be useful to apply defaults, simplifying the YAML files
--in your kind. They can also apply business logic that is more easily expressed
--in code than in YAML.
--
--Transforms need not be one-to-one: a transform can produce zero or more outputs
--for each input. For example, the test transforms perform chunking by producing
--an output for each chunk of a given input.
--
--Ideally those transforms will produce job descriptions, so you can use the
--existing ``job`` and ``task`` transforms:
--
--.. code-block:: yaml
--
--    transforms:
--       - taskgraph.transforms.my_stuff:transforms
--       - taskgraph.transforms.job:transforms
--       - taskgraph.transforms.task:transforms
--
--Try to keep transforms simple, single-purpose and well-documented!
--
--Custom Run-Using
--````````````````
--
--If the way your task is executed is unique (so, not a mach command or
--mozharness invocation), you can add a new implementation of the job
--description's "run" section.  Before you do this, consider that it might be a
--better investment to modify your task to support invocation via mozharness or
--mach, instead.  If this is not possible, then adding a new file in
--``taskcluster/taskgraph/transforms/jobs`` with a structure similar to its peers
--will make the new run-using option available for job descriptions.
--
--Something Else?
--...............
--
--If you make another change not described here that turns out to be simple or
--common, please include an update to this file in your patch.
--
--
--Schedule a Task on Try
------------------------
--
--There are two methods for scheduling a task on try.
--
--The first method is a command line string called ``try syntax`` which is passed
--into the decision task via the commit message. An example try syntax might look
--like:
--
--.. parsed-literal::
--
--    try: -b o -p linux64 -u mochitest-1 -t none
--
--This gets parsed by ``taskgraph.try_option_syntax:TryOptionSyntax`` and returns
--a list of matching task labels. For more information see the
--`TryServer wiki page <https://wiki.mozilla.org/Try>`_.
--
--The second method uses a checked-in file called ``try_task_config.json`` which
--lives at the root of the source dir. The format of this file is either a list
--of task labels, or a JSON object where task labels make up the keys. For
--example, the ``try_task_config.json`` file might look like:
--
--.. parsed-literal::
--
--    [
--      "test-windows10-64/opt-web-platform-tests-12",
--      "test-windows7-32/opt-reftest-1",
--      "test-windows7-32/opt-reftest-2",
--      "test-windows7-32/opt-reftest-3",
--      "build-linux64/debug",
--      "source-test-mozlint-eslint"
--    ]
--
--Very simply, this will run any task label that gets passed in as well as their
--dependencies. While it is possible to manually commit this file and push to
--try, it is mainly meant to be a generation target for various trychooser tools.
--
--A list of all possible task labels can be obtained by running:
--
--.. parsed-literal::
--
--    $ ./mach taskgraph tasks
--
--A list of task labels relevant to a tree (defaults to mozilla-central) can be
--obtained with:
--
--.. parsed-literal::
--
--    $ ./mach taskgraph target
-diff --git a/taskcluster/docs/index.rst b/taskcluster/docs/index.rst
-deleted file mode 100644
---- a/taskcluster/docs/index.rst
-+++ /dev/null
-@@ -1,33 +0,0 @@
--.. taskcluster_index:
--
--TaskCluster Task-Graph Generation
--=================================
--
--The ``taskcluster`` directory contains support for defining the graph of tasks
--that must be executed to build and test the Gecko tree.  This is more complex
--than you might suppose!  This implementation supports:
--
-- * A huge array of tasks
-- * Different behavior for different repositories
-- * "Try" pushes, with special means to select a subset of the graph for execution
-- * Optimization -- skipping tasks that have already been performed
-- * Extremely flexible generation of a variety of tasks using an approach of
--   incrementally transforming job descriptions into task definitions.
--
--This section of the documentation describes the process in some detail,
--referring to the source where necessary.  If you are reading this with a
--particular goal in mind and would rather avoid becoming a task-graph expert,
--check out the :doc:`how-to section <how-tos>`.
--
--.. toctree::
--
--    taskgraph
--    loading
--    transforms
--    optimization
--    yaml-templates
--    docker-images
--    cron
--    actions
--    how-tos
--    reference
-diff --git a/taskcluster/docs/kinds.rst b/taskcluster/docs/kinds.rst
-deleted file mode 100644
---- a/taskcluster/docs/kinds.rst
-+++ /dev/null
-@@ -1,236 +0,0 @@
--Task Kinds
--==========
--
--This section lists and documents the available task kinds.
--
--build
-------
--
--Builds are tasks that produce an installer or other output that can be run by
--users or automated tests.  This is more restrictive than most definitions of
--"build" in a Mozilla context: it does not include tasks that run build-like
--actions for static analysis or to produce instrumented artifacts.
--
--build-signing
---------------
--
--Many builds must be signed. The build-signing task takes the unsigned `build`
--kind artifacts and passes them through signingscriptworker to a signing server
--and returns signed results.
--
--artifact-build
----------------
--
--This kind performs an artifact build: one based on precompiled binaries
--discovered via the TaskCluster index.  This task verifies that such builds
--continue to work correctly.
--
--hazard
--------
--
--Hazard builds are similar to "regular' builds, but use a compiler extension to
--extract a bunch of data from the build and then analyze that data looking for
--hazardous behaviors.
--
--l10n
------
--
--The l10n kind takes the last published nightly build, and generates localized builds
--from it. You can read more about how to trigger these on the `wiki
--<https://wiki.mozilla.org/ReleaseEngineering/TryServer#Desktop_l10n_jobs_.28on_Taskcluster.29>`_.
--
--nightly-l10n
--------------
--
--The nightly l10n kind repacks a specific nightly build (from the same source code)
--in order to provide localized versions of the same source.
--
--nightly-l10n-signing
----------------------
--
--The nightly l10n signing kind takes artifacts from the nightly-l10n kind and
--passes them to signing servers to have their contents signed appropriately, based
--on an appropriate signing format. One signing job is created for each nightly-l10n
--job (usually chunked).
--
--source-test
-------------
--
--Source-tests are tasks that run directly from the Gecko source. This can include linting,
--unit tests, source-code analysis, or measurement work. While source-test tasks run from
--a source checkout, it is still possible for them to depend on a build artifact, though
--often they do not.
--
--upload-symbols
----------------
--
--Upload-symbols tasks run after builds and upload the symbols files generated by
--build tasks to Socorro for later use in crash analysis.
--
--valgrind
----------
--
--Valgrind tasks produce builds instrumented by valgrind.
--
--static-analysis
-----------------
--
--Static analysis builds use the compiler to perform some detailed analysis of
--the source code while building.  The useful output from these tasks are their
--build logs, and while they produce a binary, they do not upload it as an
--artifact.
--
--toolchain
-----------
--
--Toolchain builds create the compiler toolchains used to build Firefox.  These
--will eventually be dependencies of the builds themselves, but for the moment
--are run manually via try pushes and the results uploaded to tooltool.
--
--spidermonkey
--------------
--
--Spidermonkey tasks check out the full gecko source tree, then compile only the
--spidermonkey portion.  Each task runs specific tests after the build.
--
--Tests
-------
--
--Test tasks for Gecko products are divided into several kinds, but share a
--common implementation.  The process goes like this, based on a set of YAML
--files named in ``kind.yml``:
--
-- * For each build task, determine the related test platforms based on the build
--   platform.  For example, a Windows 2010 build might be tested on Windows 7
--   and Windows 10.  Each test platform specifies "test sets" indicating which
--   tests to run.  This is configured in the file named
--   ``test-platforms.yml``.
--
-- * Each test set is expanded to a list of tests to run.  This is configured in
--   the file named by ``test-sets.yml``. A platform may specify several test
--   sets, in which case the union of those sets is used.
--
-- * Each named test is looked up in the file named by ``tests.yml`` to find a
--   test description.  This test description indicates what the test does, how
--   it is reported to treeherder, and how to perform the test, all in a
--   platform-independent fashion.
--
-- * Each test description is converted into one or more tasks.  This is
--   performed by a sequence of transforms defined in the ``transforms`` key in
--   ``kind.yml``.  See :doc:`transforms`: for more information on these
--   transforms.
--
-- * The resulting tasks become a part of the task graph.
--
--.. important::
--
--    This process generates *all* test jobs, regardless of tree or try syntax.
--    It is up to a later stage of the task-graph generation (the target set) to
--    select the tests that will actually be performed.
--
--test
--....
--
--The ``desktop-test`` kind defines tests for builds.  Its ``tests.yml`` defines
--the full suite of desktop tests and their particulars, leaving it to the
--transforms to determine how those particulars apply to the various platforms.
--
--This kind includes both unit tests and talos.
--
--docker-image
--------------
--
--Tasks of the ``docker-image`` kind build the Docker images in which other
--Docker tasks run.
--
--The tasks to generate each docker image have predictable labels:
--``build-docker-image-<name>``.
--
--Docker images are built from subdirectories of ``taskcluster/docker``, using
--``docker build``.  There is currently no capability for one Docker image to
--depend on another in-tree docker image, without uploading the latter to a
--Docker repository
--
--The task definition used to create the image-building tasks is given in
--``image.yml`` in the kind directory, and is interpreted as a :doc:`YAML
--Template <yaml-templates>`.
--
--android-stuff
---------------
--
--balrog
--------
--
--Balrog is the Mozilla Update Server. Jobs of this kind are submitting information
--which assists in telling Firefox that an update is available for the related job.
--
--balrog-l10n
-------------
--
--Balrog is the Mozilla Update Server. Jobs of this kind are submitting information
--which assists in telling Firefox that an update is available for the localized
--job involved.
--
--beetmover
-----------
--
--Beetmover, takes specific artifacts, "Beets", and pushes them to a location outside
--of Taskcluster's task artifacts, (archive.mozilla.org as one place) and in the
--process determines the final location and a "pretty" name (versioned product name)
--
--beetmover-l10n
----------------
--
--Beetmover L10n, takes specific artifacts, "Beets", and pushes them to a location outside
--of Taskcluster's task artifacts, (archive.mozilla.org as one place) and in the
--process determines the final location and a "pretty" name (versioned product name)
--This separate kind uses logic specific to localized artifacts, such as including
--the language in the final artifact names.
--
--beetmover-repackage
---------------------
--
--Beetmover-repackage is beetmover but for tasks that need an intermediate step
--between signing and packaging, such as OSX. For more details see the definitions
--of the Beetmover kind above and the repackage kind below.
--
--checksums-signing
-------------------
--Checksums-signing take as input the checksums file generated by beetmover tasks
--and sign it via the signing scriptworkers. Returns the same file signed and
--additional detached signature.
--
--beetmover-checksums
---------------------
--Beetmover, takes specific artifact checksums and pushes it to a location outside
--of Taskcluster's task artifacts (archive.mozilla.org as one place) and in the
--process determines the final location and "pretty" names it (version product name)
--
--push-apk-breakpoint
---------------------
--Decides whether or not APKs should be published onto Google Play Store. Jobs of this
--kind depend on all the signed multi-locales (aka "multi") APKs for a given release,
--in order to make the decision.
--
--push-apk
----------
--PushApk publishes Android packages onto Google Play Store. Jobs of this kind take
--all the signed multi-locales (aka "multi") APKs for a given release and upload them
--all at once. They also depend on the breakpoint.
--
--repackage
-----------
--Repackage tasks take a signed output and package them up into something suitable
--for shipping to our users. For example, on OSX we return a tarball as the signed output
--and this task would package that up as an Apple Disk Image (.dmg)
--
--
--repackage-l10n
----------------
--Repackage-L10n is a ```Repackage``` task split up to be suitable for use after l10n repacks.
--
--
--repackage-signing
-------------------
--Repackage-signing take the repackaged installers (windows) and update packaging (with
--the signed internal bits) and signs them.
-diff --git a/taskcluster/docs/loading.rst b/taskcluster/docs/loading.rst
-deleted file mode 100644
---- a/taskcluster/docs/loading.rst
-+++ /dev/null
-@@ -1,43 +0,0 @@
--Loading Tasks
--=============
--
--The full task graph generation involves creating tasks for each kind.  Kinds
--are ordered to satisfy ``kind-dependencies``, and then the ``loader`` specified
--in ``kind.yml`` is used to load the tasks for that kind. It should point to
--a Python function like::
--
--    def loader(cls, kind, path, config, parameters, loaded_tasks):
--        pass
--
--The ``kind`` is the name of the kind; the configuration for that kind
--named this class.
--
--The ``path`` is the path to the configuration directory for the kind. This
--can be used to load extra data, templates, etc.
--
--The ``parameters`` give details on which to base the task generation. See
--:ref:`parameters` for details.
--
--At the time this method is called, all kinds on which this kind depends
--(that is, specified in the ``kind-dependencies`` key in ``config``)
--have already loaded their tasks, and those tasks are available in
--the list ``loaded_tasks``.
--
--The return value is a list of inputs to the transforms listed in the kind's
--``transforms`` property. The specific format for the input depends on the first
--transform - whatever it expects. The final transform should be
--``taskgraph.transform.task:transforms``, which produces the output format the
--task-graph generation infrastructure expects.
--
--The ``transforms`` key in ``kind.yml`` is further documented in
--:doc:`transforms`.  For more information on how all of this works, consult the
--docstrings and comments in the source code itself.
--
--Try option syntax
-------------------
--
--The ``parse-commit`` optional field specified in ``kind.yml`` links to a
--function to parse the command line options in the ``--message`` mach parameter.
--Currently, the only valid value is ``taskgraph.try_option_syntax:parse_message``.
--The parsed arguments are stored in ``config.config['args']``, it corresponds
--to the same object returned by ``parse_args`` from ``argparse`` Python module.
-diff --git a/taskcluster/docs/optimization.rst b/taskcluster/docs/optimization.rst
-deleted file mode 100644
---- a/taskcluster/docs/optimization.rst
-+++ /dev/null
-@@ -1,44 +0,0 @@
--Optimization
--============
--
--The objective of optimization to remove as many tasks from the graph as
--possible, as efficiently as possible, thereby delivering useful results as
--quickly as possible.  For example, ideally if only a test script is modified in
--a push, then the resulting graph contains only the corresponding test suite
--task.
--
--A task is said to be "optimized" when it is either replaced with an equivalent,
--already-existing task, or dropped from the graph entirely.
--
--Optimization Functions
------------------------
--
--During the optimization phase of task-graph generation, each task is optimized
--in post-order, meaning that each task's dependencies will be optimized before
--the task itself is optimized.
--
--Each task has a ``task.optimizations`` property describing the optimization
--methods that apply.  Each is specified as a list of method and arguments. For
--example::
--
--    task.optimizations = [
--        ['seta'],
--        ['skip-unless-changed', ['js/**', 'tests/**']],
--    ]
--
--These methods are defined in ``taskcluster/taskgraph/optimize.py``.  They are
--applied in order, and the first to return a success value causes the task to
--be optimized.
--
--Each method can return either a taskId (indicating that the given task can be
--replaced) or indicate that the task can be optimized away. If a task on which
--others depend is optimized away, task-graph generation will fail.
--
--Optimizing Target Tasks
-------------------------
--
--In some cases, such as try pushes, tasks in the target task set have been
--explicitly requested and are thus excluded from optimization. In other cases,
--the target task set is almost the entire task graph, so targetted tasks are
--considered for optimization.  This behavior is controlled with the
--``optimize_target_tasks`` parameter.
-diff --git a/taskcluster/docs/parameters.rst b/taskcluster/docs/parameters.rst
-deleted file mode 100644
---- a/taskcluster/docs/parameters.rst
-+++ /dev/null
-@@ -1,104 +0,0 @@
--==========
--Parameters
--==========
--
--Task-graph generation takes a collection of parameters as input, in the form of
--a JSON or YAML file.
--
--During decision-task processing, some of these parameters are supplied on the
--command line or by environment variables.  The decision task helpfully produces
--a full parameters file as one of its output artifacts.  The other ``mach
--taskgraph`` commands can take this file as input.  This can be very helpful
--when working on a change to the task graph.
--
--When experimenting with local runs of the task-graph generation, it is always
--best to find a recent decision task's ``parameters.yml`` file, and modify that
--file if necessary, rather than starting from scratch.  This ensures you have a
--complete set of parameters.
--
--The properties of the parameters object are described here, divided rougly by
--topic.
--
--Push Information
------------------
--
--``base_repository``
--   The repository from which to do an initial clone, utilizing any available
--   caching.
--
--``head_repository``
--   The repository containing the changeset to be built.  This may differ from
--   ``base_repository`` in cases where ``base_repository`` is likely to be cached
--   and only a few additional commits are needed from ``head_repository``.
--
--``head_rev``
--   The revision to check out; this can be a short revision string
--
--``head_ref``
--   For Mercurial repositories, this is the same as ``head_rev``.  For
--   git repositories, which do not allow pulling explicit revisions, this gives
--   the symbolic ref containing ``head_rev`` that should be pulled from
--   ``head_repository``.
--
--``include_nightly``
--   Include nightly builds and tests in the graph.
--
--``owner``
--   Email address indicating the person who made the push.  Note that this
--   value may be forged and *must not* be relied on for authentication.
--
--``message``
--   The commit message
--
--``pushlog_id``
--   The ID from the ``hg.mozilla.org`` pushlog
--
--``pushdate``
--   The timestamp of the push to the repository that triggered this decision
--   task.  Expressed as an integer seconds since the UNIX epoch.
--
--``build_date``
--   The timestamp of the build date. Defaults to ``pushdate`` and falls back to present time of
--   taskgraph invocation. Expressed as an integer seconds since the UNIX epoch.
--
--``moz_build_date``
--   A formatted timestamp of ``build_date``. Expressed as a string with the following
--   format: %Y%m%d%H%M%S
--
--Tree Information
------------------
--
--``project``
--   Another name for what may otherwise be called tree or branch or
--   repository.  This is the unqualified name, such as ``mozilla-central`` or
--   ``cedar``.
--
--``level``
--   The `SCM level
--   <https://www.mozilla.org/en-US/about/governance/policies/commit/access-policy/>`_
--   associated with this tree.  This dictates the names of resources used in the
--   generated tasks, and those tasks will fail if it is incorrect.
--
--Target Set
------------
--
--The "target set" is the set of task labels which must be included in a task
--graph.  The task graph generation process will include any tasks required by
--those in the target set, recursively.  In a decision task, this set can be
--specified programmatically using one of a variety of methods (e.g., parsing try
--syntax or reading a project-specific configuration file).
--
--``filters``
--    List of filter functions (from ``taskcluster/taskgraph/filter_tasks.py``) to
--    apply. This is usually defined internally, as filters are typically
--    global.
--
--``target_tasks_method``
--    The method to use to determine the target task set.  This is the suffix of
--    one of the functions in ``taskcluster/taskgraph/target_tasks.py``.
--
--``optimize_target_tasks``
--   If true, then target tasks are eligible for optimization.
--
--``include_nightly``
--   If true, then nightly tasks are eligible for optimization.
-diff --git a/taskcluster/docs/reference.rst b/taskcluster/docs/reference.rst
-deleted file mode 100644
---- a/taskcluster/docs/reference.rst
-+++ /dev/null
-@@ -1,12 +0,0 @@
--Reference
--=========
--
--These sections contain some reference documentation for various aspects of
--taskgraph generation.
--
--.. toctree::
--
--    kinds
--    parameters
--    attributes
--    caches
-diff --git a/taskcluster/docs/taskgraph.rst b/taskcluster/docs/taskgraph.rst
-deleted file mode 100644
---- a/taskcluster/docs/taskgraph.rst
-+++ /dev/null
-@@ -1,265 +0,0 @@
--======================
--TaskGraph Mach Command
--======================
--
--The task graph is built by linking different kinds of tasks together, pruning
--out tasks that are not required, then optimizing by replacing subgraphs with
--links to already-completed tasks.
--
--Concepts
----------
--
--* *Task Kind* - Tasks are grouped by kind, where tasks of the same kind do not
--  have interdependencies but have substantial similarities, and may depend on
--  tasks of other kinds.  Kinds are the primary means of supporting diversity,
--  in that a developer can add a new kind to do just about anything without
--  impacting other kinds.
--
--* *Task Attributes* - Tasks have string attributes by which can be used for
--  filtering.  Attributes are documented in :doc:`attributes`.
--
--* *Task Labels* - Each task has a unique identifier within the graph that is
--  stable across runs of the graph generation algorithm.  Labels are replaced
--  with TaskCluster TaskIds at the latest time possible, facilitating analysis
--  of graphs without distracting noise from randomly-generated taskIds.
--
--* *Optimization* - replacement of a task in a graph with an equivalent,
--  already-completed task, or a null task, avoiding repetition of work.
--
--Kinds
-------
--
--Kinds are the focal point of this system.  They provide an interface between
--the large-scale graph-generation process and the small-scale task-definition
--needs of different kinds of tasks.  Each kind may implement task generation
--differently.  Some kinds may generate task definitions entirely internally (for
--example, symbol-upload tasks are all alike, and very simple), while other kinds
--may do little more than parse a directory of YAML files.
--
--A ``kind.yml`` file contains data about the kind, as well as referring to a
--Python class implementing the kind in its ``implementation`` key.  That
--implementation may rely on lots of code shared with other kinds, or contain a
--completely unique implementation of some functionality.
--
--The full list of pre-defined keys in this file is:
--
--``implementation``
--   Class implementing this kind, in the form ``<module-path>:<object-path>``.
--   This class should be a subclass of ``taskgraph.kind.base:Kind``.
--
--``kind-dependencies``
--   Kinds which should be loaded before this one.  This is useful when the kind
--   will use the list of already-created tasks to determine which tasks to
--   create, for example adding an upload-symbols task after every build task.
--
--Any other keys are subject to interpretation by the kind implementation.
--
--The result is a nice segmentation of implementation so that the more esoteric
--in-tree projects can do their crazy stuff in an isolated kind without making
--the bread-and-butter build and test configuration more complicated.
--
--Dependencies
--------------
--
--Dependencies between tasks are represented as labeled edges in the task graph.
--For example, a test task must depend on the build task creating the artifact it
--tests, and this dependency edge is named 'build'.  The task graph generation
--process later resolves these dependencies to specific taskIds.
--
--Decision Task
---------------
--
--The decision task is the first task created when a new graph begins.  It is
--responsible for creating the rest of the task graph.
--
--The decision task for pushes is defined in-tree, in ``.taskcluster.yml``.  That
--task description invokes ``mach taskcluster decision`` with some metadata about
--the push.  That mach command determines the optimized task graph, then calls
--the TaskCluster API to create the tasks.
--
--Note that this mach command is *not* designed to be invoked directly by humans.
--Instead, use the mach commands described below, supplying ``parameters.yml``
--from a recent decision task.  These commands allow testing everything the
--decision task does except the command-line processing and the
--``queue.createTask`` calls.
--
--Graph Generation
------------------
--
--Graph generation, as run via ``mach taskgraph decision``, proceeds as follows:
--
--#. For all kinds, generate all tasks.  The result is the "full task set"
--#. Create dependency links between tasks using kind-specific mechanisms.  The
--   result is the "full task graph".
--#. Filter the target tasks (based on a series of filters, such as try syntax,
--   tree-specific specifications, etc). The result is the "target task set".
--#. Based on the full task graph, calculate the transitive closure of the target
--   task set.  That is, the target tasks and all requirements of those tasks.
--   The result is the "target task graph".
--#. Optimize the target task graph using task-specific optimization methods.
--   The result is the "optimized task graph" with fewer nodes than the target
--   task graph.  See :ref:`optimization`.
--#. Morph the graph. Morphs are like syntactic sugar: they keep the same meaning,
--   but express it in a lower-level way. These generally work around limitations
--   in the TaskCluster platform, such as number of dependencies or routes in
--   a task.
--#. Create tasks for all tasks in the morphed task graph.
--
--Transitive Closure
--..................
--
--Transitive closure is a fancy name for this sort of operation:
--
-- * start with a set of tasks
-- * add all tasks on which any of those tasks depend
-- * repeat until nothing changes
--
--The effect is this: imagine you start with a linux32 test job and a linux64 test job.
--In the first round, each test task depends on the test docker image task, so add that image task.
--Each test also depends on a build, so add the linux32 and linux64 build tasks.
--
--Then repeat: the test docker image task is already present, as are the build
--tasks, but those build tasks depend on the build docker image task.  So add
--that build docker image task.  Repeat again: this time, none of the tasks in
--the set depend on a task not in the set, so nothing changes and the process is
--complete.
--
--And as you can see, the graph we've built now includes everything we wanted
--(the test jobs) plus everything required to do that (docker images, builds).
--
--
--Action Tasks
--------------
--
--Action Tasks are tasks which help you to schedule new jobs via Treeherder's
--"Add New Jobs" feature. The Decision Task creates a YAML file named
--``action.yml`` which can be used to schedule Action Tasks after suitably replacing
--``{{decision_task_id}}`` and ``{{task_labels}}``, which correspond to the decision
--task ID of the push and a comma separated list of task labels which need to be
--scheduled.
--
--This task invokes ``mach taskgraph action-task`` which builds up a task graph of
--the requested tasks. This graph is optimized using the tasks running initially in
--the same push, due to the decision task.
--
--So for instance, if you had already requested a build task in the ``try`` command,
--and you wish to add a test which depends on this build, the original build task
--is re-used.
--
--Action Tasks are currently scheduled by
--[pulse_actions](https://github.com/mozilla/pulse_actions). This feature is only
--present on ``try`` pushes for now.
--
--Mach commands
---------------
--
--A number of mach subcommands are available aside from ``mach taskgraph
--decision`` to make this complex system more accessible to those trying to
--understand or modify it.  They allow you to run portions of the
--graph-generation process and output the results.
--
--``mach taskgraph tasks``
--   Get the full task set
--
--``mach taskgraph full``
--   Get the full task graph
--
--``mach taskgraph target``
--   Get the target task set
--
--``mach taskgraph target-graph``
--   Get the target task graph
--
--``mach taskgraph optimized``
--   Get the optimized task graph
--
--Each of these commands taskes a ``--parameters`` option giving a file with
--parameters to guide the graph generation.  The decision task helpfully produces
--such a file on every run, and that is generally the easiest way to get a
--parameter file.  The parameter keys and values are described in
--:doc:`parameters`; using that information, you may modify an existing
--``parameters.yml`` or create your own.
--
--By default, the above commands will only output a list of tasks. Use `-J` flag
--to output full task definitions. For example:
--
--.. code-block:: shell
--
--    $ ./mach taskgraph optimized -J -p ~/Downloads/parameters.yml
--
--See :doc:`how-tos` for further practical tips.
--
--Task Parameterization
-----------------------
--
--A few components of tasks are only known at the very end of the decision task
---- just before the ``queue.createTask`` call is made.  These are specified
--using simple parameterized values, as follows:
--
--``{"relative-datestamp": "certain number of seconds/hours/days/years"}``
--    Objects of this form will be replaced with an offset from the current time
--    just before the ``queue.createTask`` call is made.  For example, an
--    artifact expiration might be specified as ``{"relative-datestamp": "1
--    year"}``.
--
--``{"task-reference": "string containing <dep-name>"}``
--    The task definition may contain "task references" of this form.  These will
--    be replaced during the optimization step, with the appropriate taskId for
--    the named dependency substituted for ``<dep-name>`` in the string.
--    Multiple labels may be substituted in a single string, and ``<<>`` can be
--    used to escape a literal ``<``.
--
--Taskgraph JSON Format
-----------------------
--
--Task graphs -- both the graph artifacts produced by the decision task and those
--output by the ``--json`` option to the ``mach taskgraph`` commands -- are JSON
--objects, keyed by label, or for optimized task graphs, by taskId.  For
--convenience, the decision task also writes out ``label-to-taskid.json``
--containing a mapping from label to taskId.  Each task in the graph is
--represented as a JSON object.
--
--Each task has the following properties:
--
--``kind``
--   The name of this task's kind
--
--``task_id``
--   The task's taskId (only for optimized task graphs)
--
--``label``
--   The task's label
--
--``attributes``
--   The task's attributes
--
--``dependencies``
--   The task's in-graph dependencies, represented as an object mapping
--   dependency name to label (or to taskId for optimized task graphs)
--
--``optimizations``
--   The optimizations to be applied to this task
--
--``task``
--   The task's TaskCluster task definition.
--
--The results from each command are in the same format, but with some differences
--in the content:
--
--* The ``tasks`` and ``target`` subcommands both return graphs with no edges.
--  That is, just collections of tasks without any dependencies indicated.
--
--* The ``optimized`` subcommand returns tasks that have been assigned taskIds.
--  The dependencies array, too, contains taskIds instead of labels, with
--  dependencies on optimized tasks omitted.  However, the ``task.dependencies``
--  array is populated with the full list of dependency taskIds.  All task
--  references are resolved in the optimized graph.
--
--The output of the ``mach taskgraph`` commands are suitable for processing with
--the `jq <https://stedolan.github.io/jq/>`_ utility.  For example, to extract all
--tasks' labels and their dependencies:
--
--.. code-block:: shell
--
--    jq 'to_entries | map({label: .value.label, dependencies: .value.dependencies})'
--
-diff --git a/taskcluster/docs/transforms.rst b/taskcluster/docs/transforms.rst
-deleted file mode 100644
---- a/taskcluster/docs/transforms.rst
-+++ /dev/null
-@@ -1,210 +0,0 @@
--Transforms
--==========
--
--Many task kinds generate tasks by a process of transforming job descriptions
--into task definitions.  The basic operation is simple, although the sequence of
--transforms applied for a particular kind may not be!
--
--Overview
----------
--
--To begin, a kind implementation generates a collection of items; see
--:doc:`loading`.  The items are simply Python dictionaries, and describe
--"semantically" what the resulting task or tasks should do.
--
--The kind also defines a sequence of transformations.  These are applied, in
--order, to each item.  Early transforms might apply default values or break
--items up into smaller items (for example, chunking a test suite).  Later
--transforms rewrite the items entirely, with the final result being a task
--definition.
--
--Transform Functions
--...................
--
--Each transformation looks like this:
--
--.. code-block::
--
--    @transforms.add
--    def transform_an_item(config, items):
--        """This transform ..."""  # always a docstring!
--        for item in items:
--            # ..
--            yield item
--
--The ``config`` argument is a Python object containing useful configuration for
--the kind, and is a subclass of
--:class:`taskgraph.transforms.base.TransformConfig`, which specifies a few of
--its attributes.  Kinds may subclass and add additional attributes if necessary.
--
--While most transforms yield one item for each item consumed, this is not always
--true: items that are not yielded are effectively filtered out.  Yielding
--multiple items for each consumed item implements item duplication; this is how
--test chunking is accomplished, for example.
--
--The ``transforms`` object is an instance of
--:class:`taskgraph.transforms.base.TransformSequence`, which serves as a simple
--mechanism to combine a sequence of transforms into one.
--
--Schemas
--.......
--
--The items used in transforms are validated against some simple schemas at
--various points in the transformation process.  These schemas accomplish two
--things: they provide a place to add comments about the meaning of each field,
--and they enforce that the fields are actually used in the documented fashion.
--
--Keyed By
--........
--
--Several fields in the input items can be "keyed by" another value in the item.
--For example, a test description's chunks may be keyed by ``test-platform``.
--In the item, this looks like:
--
--.. code-block:: yaml
--
--    chunks:
--        by-test-platform:
--            linux64/debug: 12
--            linux64/opt: 8
--            android.*: 14
--            default: 10
--
--This is a simple but powerful way to encode business rules in the items
--provided as input to the transforms, rather than expressing those rules in the
--transforms themselves.  If you are implementing a new business rule, prefer
--this mode where possible.  The structure is easily resolved to a single value
--using :func:`taskgraph.transform.base.resolve_keyed_by`.
--
--Exact matches are used immediately.  If no exact matches are found, each
--alternative is treated as a regular expression, matched against the whole
--value.  Thus ``android.*`` would match ``android-api-16/debug``.  If nothing
--matches as a regular expression, but there is a ``default`` alternative, it is
--used.  Otherwise, an exception is raised and graph generation stops.
--
--Organization
---------------
--
--Task creation operates broadly in a few phases, with the interfaces of those
--stages defined by schemas.  The process begins with the raw data structures
--parsed from the YAML files in the kind configuration.  This data can processed
--by kind-specific transforms resulting, for test jobs, in a "test description".
--For non-test jobs, the next step is a "job description".  These transformations
--may also "duplicate" tasks, for example to implement chunking or several
--variations of the same task.
--
--In any case, shared transforms then convert this into a "task description",
--which the task-generation transforms then convert into a task definition
--suitable for ``queue.createTask``.
--
--Test Descriptions
-------------------
--
--Test descriptions specify how to run a unittest or talos run.  They aim to
--describe this abstractly, although in many cases the unique nature of
--invocation on different platforms leaves a lot of specific behavior in the test
--description, divided by ``by-test-platform``.
--
--Test descriptions are validated to conform to the schema in
--``taskcluster/taskgraph/transforms/tests.py``.  This schema is extensively
--documented and is a the primary reference for anyone modifying tests.
--
--The output of ``tests.py`` is a task description.  Test dependencies are
--produced in the form of a dictionary mapping dependency name to task label.
--
--Job Descriptions
------------------
--
--A job description says what to run in the task.  It is a combination of a
--``run`` section and all of the fields from a task description.  The run section
--has a ``using`` property that defines how this task should be run; for example,
--``mozharness`` to run a mozharness script, or ``mach`` to run a mach command.
--The remainder of the run section is specific to the run-using implementation.
--
--The effect of a job description is to say "run this thing on this worker".  The
--job description must contain enough information about the worker to identify
--the workerType and the implementation (docker-worker, generic-worker, etc.).
--Alternatively, job descriptions can specify the ``platforms`` field in
--conjunction with the  ``by-platform`` key to specify multiple workerTypes and
--implementations. Any other task-description information is passed along
--verbatim, although it is augmented by the run-using implementation.
--
--The run-using implementations are all located in
--``taskcluster/taskgraph/transforms/job``, along with the schemas for their
--implementations.  Those well-commented source files are the canonical
--documentation for what constitutes a job description, and should be considered
--part of the documentation.
--
--following ``run-using`` are available
--
--  * ``hazard``
--  * ``mach``
--  * ``mozharness``
--  * ``mozharness-test``
--  * ``run-task``
--  * ``spidermonkey`` or ``spidermonkey-package`` or ``spidermonkey-mozjs-crate``
--  * ``toolchain-script``
--
--
--Task Descriptions
-------------------
--
--Every kind needs to create tasks, and all of those tasks have some things in
--common.  They all run on one of a small set of worker implementations, each
--with their own idiosyncracies.  And they all report to TreeHerder in a similar
--way.
--
--The transforms in ``taskcluster/taskgraph/transforms/task.py`` implement
--this common functionality.  They expect a "task description", and produce a
--task definition.  The schema for a task description is defined at the top of
--``task.py``, with copious comments.  Go forth and read it now!
--
--In general, the task-description transforms handle functionality that is common
--to all Gecko tasks.  While the schema is the definitive reference, the
--functionality includes:
--
--* TreeHerder metadata
--
--* Build index routes
--
--* Information about the projects on which this task should run
--
--* Optimizations
--
--* Defaults for ``expires-after`` and and ``deadline-after``, based on project
--
--* Worker configuration
--
--The parts of the task description that are specific to a worker implementation
--are isolated in a ``task_description['worker']`` object which has an
--``implementation`` property naming the worker implementation.  Each worker
--implementation has its own section of the schema describing the fields it
--expects.  Thus the transforms that produce a task description must be aware of
--the worker implementation to be used, but need not be aware of the details of
--its payload format.
--
--The ``task.py`` file also contains a dictionary mapping treeherder groups to
--group names using an internal list of group names.  Feel free to add additional
--groups to this list as necessary.
--
--Signing Descriptions
----------------------
--
--Signing kinds are passed a single dependent job (from its kind dependency) to act
--on.
--
--The transforms in ``taskcluster/taskgraph/transforms/signing.py`` implement
--this common functionality.  They expect a "signing description", and produce a
--task definition.  The schema for a signing description is defined at the top of
--``signing.py``, with copious comments.
--
--In particular you define a set of upstream artifact urls (that point at the dependent
--task) and can optionally provide a dependent name (defaults to build) for use in
--task-reference. You also need to provide the signing formats to use.
--
--More Detail
-------------
--
--The source files provide lots of additional detail, both in the code itself and
--in the comments and docstrings.  For the next level of detail beyond this file,
--consult the transform source under ``taskcluster/taskgraph/transforms``.
-diff --git a/taskcluster/docs/yaml-templates.rst b/taskcluster/docs/yaml-templates.rst
-deleted file mode 100644
---- a/taskcluster/docs/yaml-templates.rst
-+++ /dev/null
-@@ -1,49 +0,0 @@
--Task Definition YAML Templates
--==============================
--
--A few kinds of tasks are described using templated YAML files.  These files
--allow some limited forms of inheritance and template substitution as well as
--the usual YAML features, as described below.
--
--Please do not use these features in new kinds.  If you are tempted to use
--variable substitution over a YAML file to define tasks, please instead
--implement a new kind-specific transform to accopmlish your goal.  For example,
--if the current push-id must be included as an argument in
--``task.payload.command``, write a transform function that makes that assignment
--while building a job description, rather than parameterizing that value in the
--input to the transforms.
--
--Inheritance
-------------
--
--One YAML file can "inherit" from another by including a top-level ``$inherits``
--key.  That key specifies the parent file in ``from``, and optionally a
--collection of variables in ``variables``.  For example:
--
--.. code-block:: yaml
--
--    $inherits:
--      from: 'tasks/builds/base_linux32.yml'
--      variables:
--        build_name: 'linux32'
--        build_type: 'dbg'
--
--Inheritance proceeds as follows: First, the child document has its template
--substitutions performed and is parsed as YAML.  Then, the parent document is
--parsed, with substitutions specified by ``variables`` added to the template
--substitutions.  Finally, the child document is merged with the parent.
--
--To merge two JSON objects (dictionaries), each value is merged individually.
--Lists are merged by concatenating the lists from the parent and child
--documents.  Atomic values (strings, numbers, etc.) are merged by preferring the
--child document's value.
--
--Substitution
--------------
--
--Each document is expanded using the PyStache template engine before it is
--parsed as YAML.  The parameters for this expansion are specific to the task
--kind.
--
--Simple value substitution looks like ``{{variable}}``.  Function calls look
--like ``{{#function}}argument{{/function}}``.
-diff --git a/taskcluster/mach_commands.py b/taskcluster/mach_commands.py
-deleted file mode 100644
---- a/taskcluster/mach_commands.py
-+++ /dev/null
-@@ -1,506 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import logging
--import os
--import sys
--import traceback
--import re
--
--from mach.decorators import (
--    CommandArgument,
--    CommandProvider,
--    Command,
--    SubCommand,
--)
--
--from mozbuild.base import MachCommandBase
--
--
--class ShowTaskGraphSubCommand(SubCommand):
--    """A SubCommand with TaskGraph-specific arguments"""
--
--    def __call__(self, func):
--        after = SubCommand.__call__(self, func)
--        args = [
--            CommandArgument('--root', '-r', default='taskcluster/ci',
--                            help="root of the taskgraph definition relative to topsrcdir"),
--            CommandArgument('--quiet', '-q', action="store_true",
--                            help="suppress all logging output"),
--            CommandArgument('--verbose', '-v', action="store_true",
--                            help="include debug-level logging output"),
--            CommandArgument('--json', '-J', action="store_const",
--                            dest="format", const="json",
--                            help="Output task graph as a JSON object"),
--            CommandArgument('--labels', '-L', action="store_const",
--                            dest="format", const="labels",
--                            help="Output the label for each task in the task graph (default)"),
--            CommandArgument('--parameters', '-p', default="project=mozilla-central",
--                            help="parameters file (.yml or .json; see "
--                                 "`taskcluster/docs/parameters.rst`)`"),
--            CommandArgument('--no-optimize', dest="optimize", action="store_false",
--                            default="true",
--                            help="do not remove tasks from the graph that are found in the "
--                            "index (a.k.a. optimize the graph)"),
--            CommandArgument('--tasks-regex', '--tasks', default=None,
--                            help="only return tasks with labels matching this regular "
--                            "expression.")
--
--        ]
--        for arg in args:
--            after = arg(after)
--        return after
--
--
--@CommandProvider
--class MachCommands(MachCommandBase):
--
--    @Command('taskgraph', category="ci",
--             description="Manipulate TaskCluster task graphs defined in-tree")
--    def taskgraph(self):
--        """The taskgraph subcommands all relate to the generation of task graphs
--        for Gecko continuous integration.  A task graph is a set of tasks linked
--        by dependencies: for example, a binary must be built before it is tested,
--        and that build may further depend on various toolchains, libraries, etc.
--        """
--
--    @ShowTaskGraphSubCommand('taskgraph', 'tasks',
--                             description="Show all tasks in the taskgraph")
--    def taskgraph_tasks(self, **options):
--        return self.show_taskgraph('full_task_set', options)
--
--    @ShowTaskGraphSubCommand('taskgraph', 'full',
--                             description="Show the full taskgraph")
--    def taskgraph_full(self, **options):
--        return self.show_taskgraph('full_task_graph', options)
--
--    @ShowTaskGraphSubCommand('taskgraph', 'target',
--                             description="Show the target task set")
--    def taskgraph_target(self, **options):
--        return self.show_taskgraph('target_task_set', options)
--
--    @ShowTaskGraphSubCommand('taskgraph', 'target-graph',
--                             description="Show the target taskgraph")
--    def taskgraph_target_taskgraph(self, **options):
--        return self.show_taskgraph('target_task_graph', options)
--
--    @ShowTaskGraphSubCommand('taskgraph', 'optimized',
--                             description="Show the optimized taskgraph")
--    def taskgraph_optimized(self, **options):
--        return self.show_taskgraph('optimized_task_graph', options)
--
--    @ShowTaskGraphSubCommand('taskgraph', 'morphed',
--                             description="Show the morphed taskgraph")
--    def taskgraph_morphed(self, **options):
--        return self.show_taskgraph('morphed_task_graph', options)
--
--    @SubCommand('taskgraph', 'decision',
--                description="Run the decision task")
--    @CommandArgument('--root', '-r',
--                     default='taskcluster/ci',
--                     help="root of the taskgraph definition relative to topsrcdir")
--    @CommandArgument('--base-repository',
--                     required=True,
--                     help='URL for "base" repository to clone')
--    @CommandArgument('--head-repository',
--                     required=True,
--                     help='URL for "head" repository to fetch revision from')
--    @CommandArgument('--head-ref',
--                     required=True,
--                     help='Reference (this is same as rev usually for hg)')
--    @CommandArgument('--head-rev',
--                     required=True,
--                     help='Commit revision to use from head repository')
--    @CommandArgument('--message',
--                     required=True,
--                     help='Commit message to be parsed. Example: "try: -b do -p all -u all"')
--    @CommandArgument('--project',
--                     required=True,
--                     help='Project to use for creating task graph. Example: --project=try')
--    @CommandArgument('--pushlog-id',
--                     dest='pushlog_id',
--                     required=True,
--                     default=0)
--    @CommandArgument('--pushdate',
--                     dest='pushdate',
--                     required=True,
--                     type=int,
--                     default=0)
--    @CommandArgument('--owner',
--                     required=True,
--                     help='email address of who owns this graph')
--    @CommandArgument('--level',
--                     required=True,
--                     help='SCM level of this repository')
--    @CommandArgument('--target-tasks-method',
--                     help='method for selecting the target tasks to generate')
--    def taskgraph_decision(self, **options):
--        """Run the decision task: generate a task graph and submit to
--        TaskCluster.  This is only meant to be called within decision tasks,
--        and requires a great many arguments.  Commands like `mach taskgraph
--        optimized` are better suited to use on the command line, and can take
--        the parameters file generated by a decision task.  """
--
--        import taskgraph.decision
--        try:
--            self.setup_logging()
--            return taskgraph.decision.taskgraph_decision(options)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'action-task',
--                description="Run the add-tasks task. DEPRECATED! Use 'add-tasks' instead.")
--    @CommandArgument('--root', '-r',
--                     default='taskcluster/ci',
--                     help="root of the taskgraph definition relative to topsrcdir")
--    @CommandArgument('--decision-id',
--                     required=True,
--                     help="Decision Task ID of the reference decision task")
--    @CommandArgument('--task-labels',
--                     required=True,
--                     help='Comma separated list of task labels to be scheduled')
--    def taskgraph_action(self, **options):
--        """Run the action task: Generates a task graph using the set of labels
--        provided in the task-labels parameter. It uses the full-task file of
--        the gecko decision task."""
--
--        import taskgraph.action
--        try:
--            self.setup_logging()
--            return taskgraph.action.add_tasks(options['decision_id'],
--                                              options['task_labels'].split(','))
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'add-tasks',
--                description="Run the add-tasks task")
--    @CommandArgument('--root', '-r',
--                     default='taskcluster/ci',
--                     help="root of the taskgraph definition relative to topsrcdir")
--    @CommandArgument('--decision-id',
--                     required=True,
--                     help="Decision Task ID of the reference decision task")
--    @CommandArgument('--task-labels',
--                     required=True,
--                     help='Comma separated list of task labels to be scheduled')
--    def taskgraph_add_tasks(self, **options):
--        """Run the action task: Generates a task graph using the set of labels
--        provided in the task-labels parameter. It uses the full-task file of
--        the gecko decision task."""
--
--        import taskgraph.action
--        try:
--            self.setup_logging()
--            return taskgraph.action.add_tasks(options['decision_id'],
--                                              options['task_labels'].split(','))
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'backfill',
--                description="Run the backfill task")
--    @CommandArgument('--root', '-r',
--                     default='taskcluster/ci',
--                     help="root of the taskgraph definition relative to topsrcdir")
--    @CommandArgument('--project',
--                     required=True,
--                     help="Project of the jobs that need to be backfilled.")
--    @CommandArgument('--job-id',
--                     required=True,
--                     help="Id of the job to be backfilled.")
--    def taskgraph_backfill(self, **options):
--        """Run the backfill task: Given a job in a project, it will
--        add that job type to any previous revisions in treeherder
--        until either a hard limit is met or a green version of that
--        job is found."""
--
--        import taskgraph.action
--        try:
--            self.setup_logging()
--            return taskgraph.action.backfill(options['project'], options['job_id'])
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'cron',
--                description="Run the cron task")
--    @CommandArgument('--base-repository',
--                     required=True,
--                     help='URL for "base" repository to clone')
--    @CommandArgument('--head-repository',
--                     required=True,
--                     help='URL for "head" repository to fetch')
--    @CommandArgument('--head-ref',
--                     required=True,
--                     help='Reference to fetch in head-repository (usually "default")')
--    @CommandArgument('--project',
--                     required=True,
--                     help='Project to use for creating tasks. Example: --project=mozilla-central')
--    @CommandArgument('--level',
--                     required=True,
--                     help='SCM level of this repository')
--    @CommandArgument('--force-run',
--                     required=False,
--                     help='If given, force this cronjob to run regardless of time, '
--                     'and run no others')
--    @CommandArgument('--no-create',
--                     required=False,
--                     action='store_true',
--                     help='Do not actually create tasks')
--    def taskgraph_cron(self, **options):
--        """Run the cron task; this task creates zero or more decision tasks.  It is run
--        from the hooks service on a regular basis."""
--        import taskgraph.cron
--        try:
--            self.setup_logging()
--            return taskgraph.cron.taskgraph_cron(options)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'add-talos',
--                description="Run the add-talos task")
--    @CommandArgument('--root', '-r',
--                     default='taskcluster/ci',
--                     help="root of the taskgraph definition relative to topsrcdir")
--    @CommandArgument('--decision-task-id',
--                     required=True,
--                     help="Id of the decision task that is part of the push to be talos'd")
--    @CommandArgument('--times',
--                     required=False,
--                     default=1,
--                     type=int,
--                     help="Number of times to add each job.")
--    def taskgraph_add_talos(self, **options):
--        """Add all talos jobs for a push."""
--
--        import taskgraph.action
--        try:
--            self.setup_logging()
--            return taskgraph.action.add_talos(options['decision_task_id'], options['times'])
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'action-callback',
--                description='Run action callback used by action tasks')
--    def action_callback(self, **options):
--        import taskgraph.actions
--        try:
--            self.setup_logging()
--
--            task_group_id = os.environ.get('ACTION_TASK_GROUP_ID', None)
--            task_id = json.loads(os.environ.get('ACTION_TASK_ID', 'null'))
--            task = json.loads(os.environ.get('ACTION_TASK', 'null'))
--            input = json.loads(os.environ.get('ACTION_INPUT', 'null'))
--            callback = os.environ.get('ACTION_CALLBACK', None)
--            parameters = json.loads(os.environ.get('ACTION_PARAMETERS', '{}'))
--
--            return taskgraph.actions.trigger_action_callback(
--                    task_group_id=task_group_id,
--                    task_id=task_id,
--                    task=task,
--                    input=input,
--                    callback=callback,
--                    parameters=parameters,
--                    test=False)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @SubCommand('taskgraph', 'test-action-callback',
--                description='Run an action callback in a testing mode')
--    @CommandArgument('--parameters', '-p', default='project=mozilla-central',
--                     help='parameters file (.yml or .json; see '
--                          '`taskcluster/docs/parameters.rst`)`')
--    @CommandArgument('--task-id', default=None,
--                     help='TaskId to which the action applies')
--    @CommandArgument('--task-group-id', default=None,
--                     help='TaskGroupId to which the action applies')
--    @CommandArgument('--input', default=None,
--                     help='Action input (.yml or .json)')
--    @CommandArgument('--task', default=None,
--                     help='Task definition (.yml or .json; if omitted, the task will be'
--                          'fetched from the queue)')
--    @CommandArgument('callback', default=None,
--                     help='Action callback name (Python function name)')
--    def test_action_callback(self, **options):
--        import taskgraph.parameters
--        from taskgraph.util.taskcluster import get_task_definition
--        import taskgraph.actions
--        import yaml
--
--        def load_data(filename):
--            with open(filename) as f:
--                if filename.endswith('.yml'):
--                    return yaml.safe_load(f)
--                elif filename.endswith('.json'):
--                    return json.load(f)
--                else:
--                    raise Exception("unknown filename {}".format(filename))
--
--        try:
--            self.setup_logging()
--            task_id = options['task_id']
--            if options['task']:
--                task = load_data(options['task'])
--            elif task_id:
--                task = get_task_definition(task_id)
--            else:
--                task = None
--
--            if options['input']:
--                input = load_data(options['input'])
--            else:
--                input = None
--
--            parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
--            parameters.check()
--
--            return taskgraph.actions.trigger_action_callback(
--                    task_group_id=options['task_group_id'],
--                    task_id=task_id,
--                    task=task,
--                    input=input,
--                    callback=options['callback'],
--                    parameters=parameters,
--                    test=True)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    def setup_logging(self, quiet=False, verbose=True):
--        """
--        Set up Python logging for all loggers, sending results to stderr (so
--        that command output can be redirected easily) and adding the typical
--        mach timestamp.
--        """
--        # remove the old terminal handler
--        old = self.log_manager.replace_terminal_handler(None)
--
--        # re-add it, with level and fh set appropriately
--        if not quiet:
--            level = logging.DEBUG if verbose else logging.INFO
--            self.log_manager.add_terminal_logging(
--                fh=sys.stderr, level=level,
--                write_interval=old.formatter.write_interval,
--                write_times=old.formatter.write_times)
--
--        # all of the taskgraph logging is unstructured logging
--        self.log_manager.enable_unstructured()
--
--    def show_taskgraph(self, graph_attr, options):
--        import taskgraph.parameters
--        import taskgraph.target_tasks
--        import taskgraph.generator
--
--        try:
--            self.setup_logging(quiet=options['quiet'], verbose=options['verbose'])
--            parameters = taskgraph.parameters.load_parameters_file(options['parameters'])
--            parameters.check()
--
--            tgg = taskgraph.generator.TaskGraphGenerator(
--                root_dir=options['root'],
--                parameters=parameters)
--
--            tg = getattr(tgg, graph_attr)
--
--            show_method = getattr(self, 'show_taskgraph_' + (options['format'] or 'labels'))
--            tg = self.get_filtered_taskgraph(tg, options["tasks_regex"])
--            show_method(tg)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    def show_taskgraph_labels(self, taskgraph):
--        for label in taskgraph.graph.visit_postorder():
--            print(label)
--
--    def show_taskgraph_json(self, taskgraph):
--        print(json.dumps(taskgraph.to_json(),
--              sort_keys=True, indent=2, separators=(',', ': ')))
--
--    def get_filtered_taskgraph(self, taskgraph, tasksregex):
--        from taskgraph.graph import Graph
--        from taskgraph.taskgraph import TaskGraph
--        """
--        This class method filters all the tasks on basis of a regular expression
--        and returns a new TaskGraph object
--        """
--        # return original taskgraph if no regular expression is passed
--        if not tasksregex:
--            return taskgraph
--        named_links_dict = taskgraph.graph.named_links_dict()
--        filteredtasks = {}
--        filterededges = set()
--        regexprogram = re.compile(tasksregex)
--
--        for key in taskgraph.graph.visit_postorder():
--            task = taskgraph.tasks[key]
--            if regexprogram.match(task.label):
--                filteredtasks[key] = task
--                for depname, dep in named_links_dict[key].iteritems():
--                    if regexprogram.match(dep):
--                        filterededges.add((key, dep, depname))
--        filtered_taskgraph = TaskGraph(filteredtasks, Graph(set(filteredtasks), filterededges))
--        return filtered_taskgraph
--
--
--@CommandProvider
--class TaskClusterImagesProvider(object):
--    @Command('taskcluster-load-image', category="ci",
--             description="Load a pre-built Docker image")
--    @CommandArgument('--task-id',
--                     help="Load the image at public/image.tar.zst in this task,"
--                          "rather than searching the index")
--    @CommandArgument('-t', '--tag',
--                     help="tag that the image should be loaded as. If not "
--                          "image will be loaded with tag from the tarball",
--                     metavar="name:tag")
--    @CommandArgument('image_name', nargs='?',
--                     help="Load the image of this name based on the current"
--                          "contents of the tree (as built for mozilla-central"
--                          "or mozilla-inbound)")
--    def load_image(self, image_name, task_id, tag):
--        from taskgraph.docker import load_image_by_name, load_image_by_task_id
--        if not image_name and not task_id:
--            print("Specify either IMAGE-NAME or TASK-ID")
--            sys.exit(1)
--        try:
--            if task_id:
--                ok = load_image_by_task_id(task_id, tag)
--            else:
--                ok = load_image_by_name(image_name, tag)
--            if not ok:
--                sys.exit(1)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
--
--    @Command('taskcluster-build-image', category='ci',
--             description='Build a Docker image')
--    @CommandArgument('image_name',
--                     help='Name of the image to build')
--    @CommandArgument('--context-only',
--                     help="File name the context tarball should be written to."
--                          "with this option it will only build the context.tar.",
--                     metavar='context.tar')
--    def build_image(self, image_name, context_only):
--        from taskgraph.docker import build_image, build_context
--        try:
--            if context_only is None:
--                build_image(image_name)
--            else:
--                build_context(image_name, context_only)
--        except Exception:
--            traceback.print_exc()
--            sys.exit(1)
-diff --git a/taskcluster/moz.build b/taskcluster/moz.build
-deleted file mode 100644
---- a/taskcluster/moz.build
-+++ /dev/null
-@@ -1,29 +0,0 @@
--# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
--# vim: set filetype=python:
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--with Files('**'):
--    BUG_COMPONENT = ('Taskcluster', 'General')
--
--with Files('ci/**'):
--    BUG_COMPONENT = ('Taskcluster', 'Task Configuration')
--
--with Files('docker/**'):
--    BUG_COMPONENT = ('Taskcluster', 'Docker Images')
--
--with Files('docs/**'):
--    BUG_COMPONENT = ('Taskcluster', 'Documentation')
--
--#NOTE: scripts/* files included in docker images
--with Files('scripts/**'):
--    BUG_COMPONENT = ('Taskcluster', 'Docker Images')
--
--with Files('taskgraph/**'):
--    BUG_COMPONENT = ('Taskcluster', 'Task Configuration')
--
--PYTHON_UNITTEST_MANIFESTS += [
--    'taskgraph/test/python.ini',
--]
--SPHINX_TREES['taskcluster'] = 'docs'
-diff --git a/taskcluster/scripts/builder/build-haz-linux.sh b/taskcluster/scripts/builder/build-haz-linux.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/build-haz-linux.sh
-+++ /dev/null
-@@ -1,88 +0,0 @@
--#!/bin/bash -ex
--
--function usage() {
--    echo "Usage: $0 [--project <shell|browser>] <workspace-dir> flags..."
--    echo "flags are treated the same way as a commit message would be"
--    echo "(as in, they are scanned for directives just like a try: ... line)"
--}
--
--PROJECT=shell
--WORKSPACE=
--DO_TOOLTOOL=1
--while [[ $# -gt 0 ]]; do
--    if [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]; then
--        usage
--        exit 0
--    elif [[ "$1" == "--project" ]]; then
--        shift
--        PROJECT="$1"
--        shift
--    elif [[ "$1" == "--no-tooltool" ]]; then
--        shift
--        DO_TOOLTOOL=
--    elif [[ -z "$WORKSPACE" ]]; then
--        WORKSPACE=$( cd "$1" && pwd )
--        shift
--        break
--    fi
--done
--
--SCRIPT_FLAGS="$@"
--
--# Ensure all the scripts in this dir are on the path....
--DIRNAME=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
--PATH=$DIRNAME:$PATH
--
--# Use GECKO_BASE_REPOSITORY as a signal for whether we are running in automation.
--export AUTOMATION=${GECKO_BASE_REPOSITORY:+1}
--
--: ${GECKO_DIR:=$WORKSPACE/gecko}
--: ${TOOLTOOL_CACHE:=$WORKSPACE/tt-cache}
--
--if ! [ -d $GECKO_DIR ]; then
--    echo "GECKO_DIR must be set to a directory containing a gecko source checkout" >&2
--    exit 1
--fi
--GECKO_DIR=$( cd "$GECKO_DIR" && pwd )
--
--# Directory to populate with tooltool-installed tools
--export TOOLTOOL_DIR="$WORKSPACE"
--
--# Directory to hold the (useless) object files generated by the analysis.
--export MOZ_OBJDIR="$WORKSPACE/obj-analyzed"
--mkdir -p "$MOZ_OBJDIR"
--
--if [ -n "$DO_TOOLTOOL" ]; then
--  ( cd $TOOLTOOL_DIR; $GECKO_DIR/mach artifact toolchain -v --tooltool-url https://api.pub.build.mozilla.org/tooltool/ --tooltool-manifest $GECKO_DIR/$TOOLTOOL_MANIFEST --cache-dir $TOOLTOOL_CACHE${MOZ_TOOLCHAINS:+ ${MOZ_TOOLCHAINS}} )
--fi
--
--export NO_MERCURIAL_SETUP_CHECK=1
--
--if [[ "$PROJECT" = "browser" ]]; then (
--    cd "$WORKSPACE"
--    set "$WORKSPACE"
--    . setup-ccache.sh
--    # Mozbuild config:
--    export MOZBUILD_STATE_PATH=$WORKSPACE/mozbuild/
--    # Create .mozbuild so mach doesn't complain about this
--    mkdir -p $MOZBUILD_STATE_PATH
--) fi
--. hazard-analysis.sh
--
--build_js_shell
--
--# Artifacts folder is outside of the cache.
--mkdir -p $HOME/artifacts/ || true
--
--function onexit () {
--    grab_artifacts "$WORKSPACE/analysis" "$HOME/artifacts"
--}
--
--trap onexit EXIT
--
--configure_analysis "$WORKSPACE/analysis"
--run_analysis "$WORKSPACE/analysis" "$PROJECT"
--
--check_hazards "$WORKSPACE/analysis"
--
--################################### script end ###################################
-diff --git a/taskcluster/scripts/builder/build-l10n.sh b/taskcluster/scripts/builder/build-l10n.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/build-l10n.sh
-+++ /dev/null
-@@ -1,98 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--echo "running as" $(id)
--
--. /builds/worker/scripts/xvfb.sh
--
--####
--# Taskcluster friendly wrapper for performing fx desktop l10n repacks via mozharness.
--# Based on ./build-linux.sh
--####
--
--# Inputs, with defaults
--
--: MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
--: MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
--: MOZHARNESS_ACTIONS            ${MOZHARNESS_ACTIONS}
--: MOZHARNESS_OPTIONS            ${MOZHARNESS_OPTIONS}
--
--: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
--
--: NEED_XVFB                     ${NEED_XVFB:=false}
--
--: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
--
--set -v
--
--fail() {
--    echo # make sure error message is on a new line
--    echo "[build-l10n.sh:error]" "${@}"
--    exit 1
--}
--
--export MOZ_CRASHREPORTER_NO_REPORT=1
--export MOZ_OBJDIR=obj-firefox
--export TINDERBOX_OUTPUT=1
--
--# Ensure that in tree libraries can be found
--export LIBRARY_PATH=$LIBRARY_PATH:$WORKSPACE/src/obj-firefox:$WORKSPACE/src/gcc/lib64
--
--# test required parameters are supplied
--if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
--if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
--
--cleanup() {
--    local rv=$?
--    cleanup_xvfb
--    exit $rv
--}
--trap cleanup EXIT INT
--
--# run XVfb in the background, if necessary
--if $NEED_XVFB; then
--    start_xvfb '1024x768x24' 2
--fi
--
--# set up mozharness configuration, via command line, env, etc.
--
--# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the
--# cache.  However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be
--# entirely effective.
--export TOOLTOOL_CACHE
--
--# support multiple, space delimited, config files
--config_cmds=""
--for cfg in $MOZHARNESS_CONFIG; do
--  config_cmds="${config_cmds} --config ${cfg}"
--done
--
--# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions
--# in the mozharness configuration)
--if [ -n "$MOZHARNESS_ACTIONS" ]; then
--    actions=""
--    for action in $MOZHARNESS_ACTIONS; do
--        actions="$actions --$action"
--    done
--fi
--
--# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run
--# e.g. enable-pgo
--if [ -n "$MOZHARNESS_OPTIONS" ]; then
--    options=""
--    for option in $MOZHARNESS_OPTIONS; do
--        options="$options --$option"
--    done
--fi
--
--cd /builds/worker
--
--python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} \
--  --disable-mock \
--  --revision ${GECKO_HEAD_REV} \
--  $actions \
--  $options \
--  ${config_cmds} \
--  --log-level=debug \
--  --work-dir=$WORKSPACE/build \
-diff --git a/taskcluster/scripts/builder/build-linux.sh b/taskcluster/scripts/builder/build-linux.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/build-linux.sh
-+++ /dev/null
-@@ -1,127 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--echo "running as" $(id)
--
--. /builds/worker/scripts/xvfb.sh
--
--####
--# Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
--####
--
--# Inputs, with defaults
--
--: MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
--: MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
--: MOZHARNESS_ACTIONS            ${MOZHARNESS_ACTIONS}
--: MOZHARNESS_OPTIONS            ${MOZHARNESS_OPTIONS}
--
--: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
--
--: NEED_XVFB                     ${NEED_XVFB:=false}
--
--: MH_CUSTOM_BUILD_VARIANT_CFG   ${MH_CUSTOM_BUILD_VARIANT_CFG}
--: MH_BRANCH                     ${MH_BRANCH:=mozilla-central}
--: MH_BUILD_POOL                 ${MH_BUILD_POOL:=staging}
--: MOZ_SCM_LEVEL                 ${MOZ_SCM_LEVEL:=1}
--
--: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
--
--set -v
--
--fail() {
--    echo # make sure error message is on a new line
--    echo "[build-linux.sh:error]" "${@}"
--    exit 1
--}
--
--export MOZ_CRASHREPORTER_NO_REPORT=1
--export MOZ_OBJDIR=obj-firefox
--export TINDERBOX_OUTPUT=1
--
--# use "simple" package names so that they can be hard-coded in the task's
--# extras.locations
--export MOZ_SIMPLE_PACKAGE_NAME=target
--
--# Do not try to upload symbols (see https://bugzilla.mozilla.org/show_bug.cgi?id=1164615)
--export MOZ_AUTOMATION_UPLOAD_SYMBOLS=0
--
--# Ensure that in tree libraries can be found
--export LIBRARY_PATH=$LIBRARY_PATH:$WORKSPACE/src/obj-firefox:$WORKSPACE/src/gcc/lib64
--
--if [[ -n ${USE_SCCACHE} ]]; then
--    # Point sccache at the Taskcluster proxy for AWS credentials.
--    export AWS_IAM_CREDENTIALS_URL="http://taskcluster/auth/v1/aws/s3/read-write/taskcluster-level-${MOZ_SCM_LEVEL}-sccache-${TASKCLUSTER_WORKER_GROUP}/?format=iam-role-compat"
--fi
--
--# test required parameters are supplied
--if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
--if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
--
--cleanup() {
--    local rv=$?
--    cleanup_xvfb
--    exit $rv
--}
--trap cleanup EXIT INT
--
--# run XVfb in the background, if necessary
--if $NEED_XVFB; then
--    start_xvfb '1024x768x24' 2
--fi
--
--# set up mozharness configuration, via command line, env, etc.
--
--debug_flag=""
--if [ 0$DEBUG -ne 0 ]; then
--  debug_flag='--debug'
--fi
--
--custom_build_variant_cfg_flag=""
--if [ -n "${MH_CUSTOM_BUILD_VARIANT_CFG}" ]; then
--    custom_build_variant_cfg_flag="--custom-build-variant-cfg=${MH_CUSTOM_BUILD_VARIANT_CFG}"
--fi
--
--# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the
--# cache.  However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be
--# entirely effective.
--export TOOLTOOL_CACHE
--
--# support multiple, space delimited, config files
--config_cmds=""
--for cfg in $MOZHARNESS_CONFIG; do
--  config_cmds="${config_cmds} --config ${cfg}"
--done
--
--# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions
--# in the mozharness configuration)
--if [ -n "$MOZHARNESS_ACTIONS" ]; then
--    actions=""
--    for action in $MOZHARNESS_ACTIONS; do
--        actions="$actions --$action"
--    done
--fi
--
--# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run
--# e.g. enable-pgo
--if [ -n "$MOZHARNESS_OPTIONS" ]; then
--    options=""
--    for option in $MOZHARNESS_OPTIONS; do
--        options="$options --$option"
--    done
--fi
--
--cd /builds/worker
--
--python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
--  $debug_flag \
--  $custom_build_variant_cfg_flag \
--  --disable-mock \
--  $actions \
--  $options \
--  --log-level=debug \
--  --scm-level=$MOZ_SCM_LEVEL \
--  --work-dir=$WORKSPACE/build \
--  --branch=${MH_BRANCH} \
--  --build-pool=${MH_BUILD_POOL}
-diff --git a/taskcluster/scripts/builder/build-sm-mozjs-crate.sh b/taskcluster/scripts/builder/build-sm-mozjs-crate.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/build-sm-mozjs-crate.sh
-+++ /dev/null
-@@ -1,18 +0,0 @@
--#!/usr/bin/env bash
--
--set -xe
--
--source $(dirname $0)/sm-tooltool-config.sh
--
--# Ensure that we have a .config/cargo that points us to our vendored crates
--# rather than to crates.io.
--cd "$SRCDIR/.cargo"
--sed -e "s|@top_srcdir@|$SRCDIR|" < config.in | tee config
--
--cd "$SRCDIR/js/src"
--
--export PATH="$PATH:$TOOLTOOL_CHECKOUT/cargo/bin:$TOOLTOOL_CHECKOUT/rustc/bin"
--export RUST_BACKTRACE=1
--
--cargo build --verbose --frozen --features debugmozjs
--cargo build --verbose --frozen
-diff --git a/taskcluster/scripts/builder/build-sm-package.sh b/taskcluster/scripts/builder/build-sm-package.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/build-sm-package.sh
-+++ /dev/null
-@@ -1,33 +0,0 @@
--#!/bin/bash
--
--set -xe
--
--source $(dirname $0)/sm-tooltool-config.sh
--
--mkdir -p $UPLOAD_DIR
--
--# Package up the sources into the release tarball.
--AUTOMATION=1 DIST=$UPLOAD_DIR $SRCDIR/js/src/make-source-package.sh
--
--# Extract the tarball into a new directory in the workspace.
--
--PACKAGE_DIR=$WORK/sm-package
--mkdir -p $PACKAGE_DIR
--pushd $PACKAGE_DIR
--
--tar -xjvf $UPLOAD_DIR/mozjs-*.tar.bz2
--
--: ${PYTHON:=python2.7}
--
--# Build the freshly extracted, packaged SpiderMonkey.
--pushd ./mozjs-*/js/src
--
--# MOZ_AUTOMATION enforces certain requirements that don't apply to
--# packaged builds. Unset it.
--unset MOZ_AUTOMATION
--
--AUTOMATION=1 $PYTHON ./devtools/automation/autospider.py --skip-tests=checks $SPIDERMONKEY_VARIANT
--popd
--
--# Copy artifacts for upload by TaskCluster
--cp -rL ./mozjs-*/obj-spider/dist/bin/{js,jsapi-tests,js-gdb.py,libmozjs*} $UPLOAD_DIR
-diff --git a/taskcluster/scripts/builder/build-sm.sh b/taskcluster/scripts/builder/build-sm.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/build-sm.sh
-+++ /dev/null
-@@ -1,20 +0,0 @@
--#!/bin/bash
--
--set -x
--
--source $(dirname $0)/sm-tooltool-config.sh
--
--: ${PYTHON:=python2.7}
--
--# Run the script
--export MOZ_UPLOAD_DIR="$UPLOAD_DIR"
--AUTOMATION=1 $PYTHON $SRCDIR/js/src/devtools/automation/autospider.py $SPIDERMONKEY_VARIANT
--BUILD_STATUS=$?
--
--# Ensure upload dir exists
--mkdir -p $UPLOAD_DIR
--
--# Copy artifacts for upload by TaskCluster
--cp -rL $SRCDIR/obj-spider/dist/bin/{js,jsapi-tests,js-gdb.py} $UPLOAD_DIR
--
--exit $BUILD_STATUS
-diff --git a/taskcluster/scripts/builder/hazard-analysis.sh b/taskcluster/scripts/builder/hazard-analysis.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/hazard-analysis.sh
-+++ /dev/null
-@@ -1,170 +0,0 @@
--#!/bin/bash -ex
--
--[ -n "$WORKSPACE" ]
--[ -n "$MOZ_OBJDIR" ]
--[ -n "$GECKO_DIR" ]
--
--HAZARD_SHELL_OBJDIR=$WORKSPACE/obj-haz-shell
--JS_SRCDIR=$GECKO_DIR/js/src
--ANALYSIS_SRCDIR=$JS_SRCDIR/devtools/rootAnalysis
--
--export CC="$TOOLTOOL_DIR/gcc/bin/gcc"
--export CXX="$TOOLTOOL_DIR/gcc/bin/g++"
--
--PYTHON=python2.7
--if ! which $PYTHON; then
--    PYTHON=python
--fi
--
--
--function check_commit_msg () {
--    ( set +e;
--    if [[ -n "$AUTOMATION" ]]; then
--        hg --cwd "$GECKO_DIR" log -r. --template '{desc}\n' | grep -F -q -- "$1"
--    else
--        echo -- "$SCRIPT_FLAGS" | grep -F -q -- "$1"
--    fi
--    )
--}
--
--if check_commit_msg "--dep"; then
--    HAZ_DEP=1
--fi
--
--function build_js_shell () {
--    # Must unset MOZ_OBJDIR and MOZCONFIG here to prevent the build system from
--    # inferring that the analysis output directory is the current objdir. We
--    # need a separate objdir here to build the opt JS shell to use to run the
--    # analysis.
--    (
--    unset MOZ_OBJDIR
--    unset MOZCONFIG
--    ( cd $JS_SRCDIR; autoconf-2.13 )
--    if [[ -z "$HAZ_DEP" ]]; then
--        [ -d $HAZARD_SHELL_OBJDIR ] && rm -rf $HAZARD_SHELL_OBJDIR
--    fi
--    mkdir -p $HAZARD_SHELL_OBJDIR || true
--    cd $HAZARD_SHELL_OBJDIR
--    $JS_SRCDIR/configure --enable-optimize --disable-debug --enable-ctypes --enable-nspr-build --without-intl-api --with-ccache
--    make -j4
--    ) # Restore MOZ_OBJDIR and MOZCONFIG
--}
--
--function configure_analysis () {
--    local analysis_dir
--    analysis_dir="$1"
--
--    if [[ -z "$HAZ_DEP" ]]; then
--        [ -d "$analysis_dir" ] && rm -rf "$analysis_dir"
--    fi
--
--    mkdir -p "$analysis_dir" || true
--    (
--        cd "$analysis_dir"
--        cat > defaults.py <<EOF
--js = "$HAZARD_SHELL_OBJDIR/dist/bin/js"
--analysis_scriptdir = "$ANALYSIS_SRCDIR"
--objdir = "$MOZ_OBJDIR"
--source = "$GECKO_DIR"
--sixgill = "$TOOLTOOL_DIR/sixgill/usr/libexec/sixgill"
--sixgill_bin = "$TOOLTOOL_DIR/sixgill/usr/bin"
--EOF
--
--        local rev
--        rev=$(cd $GECKO_DIR && hg log -r . -T '{node|short}')
--        cat > run-analysis.sh <<EOF
--#!/bin/sh
--if [ \$# -eq 0 ]; then
--  set gcTypes
--fi
--export ANALYSIS_SCRIPTDIR="$ANALYSIS_SRCDIR"
--export URLPREFIX="https://hg.mozilla.org/mozilla-unified/file/$rev/"
--exec "$ANALYSIS_SRCDIR/analyze.py" "\$@"
--EOF
--        chmod +x run-analysis.sh
--    )
--}
--
--function run_analysis () {
--    local analysis_dir
--    analysis_dir="$1"
--    local build_type
--    build_type="$2"
--
--    if [[ -z "$HAZ_DEP" ]]; then
--        [ -d $MOZ_OBJDIR ] && rm -rf $MOZ_OBJDIR
--    fi
--
--    (
--        cd "$analysis_dir"
--        $PYTHON "$ANALYSIS_SRCDIR/analyze.py" --buildcommand="$GECKO_DIR/testing/mozharness/scripts/spidermonkey/build.${build_type}"
--    )
--}
--
--function grab_artifacts () {
--    local analysis_dir
--    analysis_dir="$1"
--    local artifacts
--    artifacts="$2"
--
--    (
--        cd "$analysis_dir"
--        ls -lah
--
--        # Do not error out if no files found
--        shopt -s nullglob
--        set +e
--        local important
--        important=(refs.txt unnecessary.txt hazards.txt gcFunctions.txt allFunctions.txt heapWriteHazards.txt)
--
--        # Bundle up the less important but still useful intermediate outputs,
--        # just to cut down on the clutter in treeherder's Job Details pane.
--        tar -acvf "${artifacts}/hazardIntermediates.tar.xz" --exclude-from <(for f in "${important[@]}"; do echo $f; done) *.txt *.lst build_xgill.log
--
--        # Upload the important outputs individually, so that they will be
--        # visible in Job Details and accessible to automated jobs.
--        for f in "${important[@]}"; do
--            gzip -9 -c "$f" > "${artifacts}/$f.gz"
--        done
--
--        # Check whether the user requested .xdb file upload in the top commit comment
--        if check_commit_msg "--upload-xdbs"; then
--            HAZ_UPLOAD_XDBS=1
--        fi
--
--        if [ -n "$HAZ_UPLOAD_XDBS" ]; then
--            for f in *.xdb; do
--                bzip2 -c "$f" > "${artifacts}/$f.bz2"
--            done
--        fi
--    )
--}
--
--function check_hazards () {
--    (
--    set +e
--    NUM_HAZARDS=$(grep -c 'Function.*has unrooted.*live across GC call' "$1"/rootingHazards.txt)
--    NUM_UNSAFE=$(grep -c '^Function.*takes unsafe address of unrooted' "$1"/refs.txt)
--    NUM_UNNECESSARY=$(grep -c '^Function.* has unnecessary root' "$1"/unnecessary.txt)
--    NUM_WRITE_HAZARDS=$(perl -lne 'print $1 if m!found (\d+)/\d+ allowed errors!' "$1"/heapWriteHazards.txt)
--
--    set +x
--    echo "TinderboxPrint: rooting hazards<br/>$NUM_HAZARDS"
--    echo "TinderboxPrint: (unsafe references to unrooted GC pointers)<br/>$NUM_UNSAFE"
--    echo "TinderboxPrint: (unnecessary roots)<br/>$NUM_UNNECESSARY"
--    echo "TinderboxPrint: heap write hazards<br/>$NUM_WRITE_HAZARDS"
--
--    if [ $NUM_HAZARDS -gt 0 ]; then
--        echo "TEST-UNEXPECTED-FAIL $NUM_HAZARDS rooting hazards detected" >&2
--        echo "TinderboxPrint: documentation<br/><a href='https://wiki.mozilla.org/Javascript:Hazard_Builds#Diagnosing_a_rooting_hazards_failure'>static rooting hazard analysis failures</a>, visit \"Inspect Task\" link for hazard details"
--        exit 1
--    fi
--
--    NUM_ALLOWED_WRITE_HAZARDS=4
--    if [ $NUM_WRITE_HAZARDS -gt $NUM_ALLOWED_WRITE_HAZARDS ]; then
--        echo "TEST-UNEXPECTED-FAIL $NUM_WRITE_HAZARDS heap write hazards detected out of $NUM_ALLOWED_WRITE_HAZARDS allowed" >&2
--        echo "TinderboxPrint: documentation<br/><a href='https://wiki.mozilla.org/Javascript:Hazard_Builds#Diagnosing_a_heap_write_hazard_failure'>heap write hazard analysis failures</a>, visit \"Inspect Task\" link for hazard details"
--        exit 1
--    fi
--    )
--}
-diff --git a/taskcluster/scripts/builder/repackage.sh b/taskcluster/scripts/builder/repackage.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/repackage.sh
-+++ /dev/null
-@@ -1,93 +0,0 @@
--#! /bin/bash -vex
--
--set -x -e
--
--echo "running as" $(id)
--
--. /builds/worker/scripts/xvfb.sh
--
--####
--# Taskcluster friendly wrapper for performing fx desktop builds via mozharness.
--####
--
--# Inputs, with defaults
--
--: MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
--: MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
--: MOZHARNESS_ACTIONS            ${MOZHARNESS_ACTIONS}
--: MOZHARNESS_OPTIONS            ${MOZHARNESS_OPTIONS}
--
--: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
--
--: WORKSPACE                     ${WORKSPACE:=/builds/worker/workspace}
--
--set -v
--
--fail() {
--    echo # make sure error message is on a new line
--    echo "[build-linux.sh:error]" "${@}"
--    exit 1
--}
--
--export MOZ_CRASHREPORTER_NO_REPORT=1
--export MOZ_OBJDIR=obj-firefox
--export TINDERBOX_OUTPUT=1
--
--# use "simple" package names so that they can be hard-coded in the task's
--# extras.locations
--export MOZ_SIMPLE_PACKAGE_NAME=target
--
--# test required parameters are supplied
--if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
--if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
--
--cleanup() {
--    local rv=$?
--    cleanup_xvfb
--    exit $rv
--}
--trap cleanup EXIT INT
--
--# set up mozharness configuration, via command line, env, etc.
--
--debug_flag=""
--if [ 0$DEBUG -ne 0 ]; then
--  debug_flag='--debug'
--fi
--
--# $TOOLTOOL_CACHE bypasses mozharness completely and is read by tooltool_wrapper.sh to set the
--# cache.  However, only some mozharness scripts use tooltool_wrapper.sh, so this may not be
--# entirely effective.
--export TOOLTOOL_CACHE
--
--# support multiple, space delimited, config files
--config_cmds=""
--for cfg in $MOZHARNESS_CONFIG; do
--  config_cmds="${config_cmds} --config ${cfg}"
--done
--
--# if MOZHARNESS_ACTIONS is given, only run those actions (completely overriding default_actions
--# in the mozharness configuration)
--if [ -n "$MOZHARNESS_ACTIONS" ]; then
--    actions=""
--    for action in $MOZHARNESS_ACTIONS; do
--        actions="$actions --$action"
--    done
--fi
--
--# if MOZHARNESS_OPTIONS is given, append them to mozharness command line run
--# e.g. enable-pgo
--if [ -n "$MOZHARNESS_OPTIONS" ]; then
--    options=""
--    for option in $MOZHARNESS_OPTIONS; do
--        options="$options --$option"
--    done
--fi
--
--cd /builds/worker
--
--python2.7 $WORKSPACE/build/src/testing/${MOZHARNESS_SCRIPT} ${config_cmds} \
--  $actions \
--  $options \
--  --log-level=debug \
--  --work-dir=$WORKSPACE/build \
-diff --git a/taskcluster/scripts/builder/setup-ccache.sh b/taskcluster/scripts/builder/setup-ccache.sh
-deleted file mode 100644
---- a/taskcluster/scripts/builder/setup-ccache.sh
-+++ /dev/null
-@@ -1,9 +0,0 @@
--#! /bin/bash -ex
--
--test -d $1 # workspace must exist at this point...
--WORKSPACE=$( cd "$1" && pwd )
--
--export CCACHE_DIR=$WORKSPACE/ccache
--
--ccache -M 12G
--ccache -s
-diff --git a/taskcluster/scripts/builder/sm-tooltool-config.sh b/taskcluster/scripts/builder/sm-tooltool-config.sh
-deleted file mode 100755
---- a/taskcluster/scripts/builder/sm-tooltool-config.sh
-+++ /dev/null
-@@ -1,49 +0,0 @@
--#!/bin/bash
--
--set -xe
--
--: ${TOOLTOOL_SERVER:=https://api.pub.build.mozilla.org/tooltool/}
--: ${SPIDERMONKEY_VARIANT:=plain}
--: ${UPLOAD_DIR:=$HOME/artifacts/}
--: ${WORK:=$HOME/workspace}
--: ${SRCDIR:=$WORK/build/src}
--
--mkdir -p $WORK
--cd $WORK
--
--# Need to install things from tooltool. Figure out what platform to use.
--
--case $(uname -m) in
--    i686 | arm )
--        BITS=32
--        ;;
--    *)
--        BITS=64
--        ;;
--esac
--
--case "$OSTYPE" in
--    darwin*)
--        PLATFORM_OS=macosx
--        ;;
--    linux-gnu)
--        PLATFORM_OS=linux
--        ;;
--    msys)
--        PLATFORM_OS=win
--        ;;
--    *)
--        echo "Unrecognized OSTYPE '$OSTYPE'" >&2
--        PLATFORM_OS=linux
--        ;;
--esac
--
--# Install everything needed for the browser on this platform. Not all of it is
--# necessary for the JS shell, but it's less duplication to share tooltool
--# manifests.
--BROWSER_PLATFORM=$PLATFORM_OS$BITS
--
--: ${TOOLTOOL_CHECKOUT:=$WORK}
--export TOOLTOOL_CHECKOUT
--
--(cd $TOOLTOOL_CHECKOUT && ${SRCDIR}/mach artifact toolchain -v --tooltool-url $TOOLTOOL_SERVER --tooltool-manifest $SRCDIR/$TOOLTOOL_MANIFEST ${TOOLTOOL_CACHE:+ --cache-dir $TOOLTOOL_CACHE}${MOZ_TOOLCHAINS:+ ${MOZ_TOOLCHAINS}})
-diff --git a/taskcluster/scripts/copy.sh b/taskcluster/scripts/copy.sh
-deleted file mode 100755
---- a/taskcluster/scripts/copy.sh
-+++ /dev/null
-@@ -1,9 +0,0 @@
--#! /bin/bash -ex
--
--# This script copies the contents of the "scripts" folder into a docker
--# container using tar/untar the container id must be passed.
--
--DIRNAME=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
--docker exec $1 mkdir -p $2
--cd $DIRNAME
--tar -cv * | docker exec -i $1 tar -x -C $2
-diff --git a/taskcluster/scripts/misc/build-binutils-linux.sh b/taskcluster/scripts/misc/build-binutils-linux.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-binutils-linux.sh
-+++ /dev/null
-@@ -1,16 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building binutils for Linux.
--
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--build/unix/build-binutils/build-binutils.sh $HOME_DIR
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp $HOME_DIR/binutils.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-cctools-port-macosx.sh b/taskcluster/scripts/misc/build-cctools-port-macosx.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-cctools-port-macosx.sh
-+++ /dev/null
-@@ -1,54 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building cctools (Apple's binutils) for Mac OS X on
--# Linux using ctools-port (https://github.com/tpoechtrager/cctools-port).
--WORKSPACE=$HOME/workspace
--UPLOAD_DIR=$HOME/artifacts
--
--# Repository info
--: CROSSTOOL_PORT_REPOSITORY    ${CROSSTOOL_PORT_REPOSITORY:=https://github.com/tpoechtrager/cctools-port}
--: CROSSTOOL_PORT_REV           ${CROSSTOOL_PORT_REV:=8e9c3f2506b51cf56725eaa60b6e90e240e249ca}
--
--# Set some crosstools-port directories
--CROSSTOOLS_SOURCE_DIR=$WORKSPACE/crosstools-port
--CROSSTOOLS_CCTOOLS_DIR=$CROSSTOOLS_SOURCE_DIR/cctools
--CROSSTOOLS_BUILD_DIR=/tmp/cctools
--CLANG_DIR=$WORKSPACE/build/src/clang
--CCTOOLS_DIR=$WORKSPACE/build/src/cctools
--MACOSX_SDK_DIR=$WORKSPACE/build/src/MacOSX10.10.sdk
--
--TARGET_TRIPLE=x86_64-apple-darwin11
--
--# Create our directories
--mkdir -p $CROSSTOOLS_BUILD_DIR
--
--git clone --no-checkout $CROSSTOOL_PORT_REPOSITORY $CROSSTOOLS_SOURCE_DIR
--cd $CROSSTOOLS_SOURCE_DIR
--git checkout $CROSSTOOL_PORT_REV
--echo "Building from commit hash `git rev-parse $CROSSTOOL_PORT_REV`..."
--
--# Fetch clang from tooltool
--cd $WORKSPACE/build/src
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# Configure crosstools-port
--cd $CROSSTOOLS_CCTOOLS_DIR
--export CC=$CLANG_DIR/bin/clang
--export CXX=$CLANG_DIR/bin/clang++
--export CFLAGS="-mcpu=generic -mtune=generic -O3 -target $TARGET_TRIPLE -isysroot $MACOSX_SDK_DIR"
--export CXXFLAGS="-mcpu=generic -mtune=generic -O3 -target $TARGET_TRIPLE -isysroot $MACOSX_SDK_DIR"
--export LDFLAGS="-Wl,-syslibroot,$MACOSX_SDK_DIR -Wl,-dead_strip"
--# TODO: bug 1357317 to avoid the LD_LIBRARY_PATH.
--export LD_LIBRARY_PATH="$CLANG_DIR/lib"
--export PATH="$CCTOOLS_DIR/bin:$PATH"
--./autogen.sh
--./configure --prefix=$CROSSTOOLS_BUILD_DIR --build=$MACHTYPE --host=$TARGET_TRIPLE --with-llvm-config=$CLANG_DIR/bin/llvm-config
--
--# Build cctools
--make -j `nproc --all` install
--$CCTOOLS_DIR/bin/$TARGET_TRIPLE-strip $CROSSTOOLS_BUILD_DIR/bin/*
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--tar cjf $UPLOAD_DIR/cctools.tar.bz2 -C $CROSSTOOLS_BUILD_DIR/.. `basename $CROSSTOOLS_BUILD_DIR`
-diff --git a/taskcluster/scripts/misc/build-cctools-port.sh b/taskcluster/scripts/misc/build-cctools-port.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-cctools-port.sh
-+++ /dev/null
-@@ -1,47 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building cctools (Apple's binutils) for Linux using
--# cctools-port (https://github.com/tpoechtrager/cctools-port).
--WORKSPACE=$HOME/workspace
--UPLOAD_DIR=$HOME/artifacts
--
--# Repository info
--: CROSSTOOL_PORT_REPOSITORY    ${CROSSTOOL_PORT_REPOSITORY:=https://github.com/tpoechtrager/cctools-port}
--: CROSSTOOL_PORT_REV           ${CROSSTOOL_PORT_REV:=8e9c3f2506b51cf56725eaa60b6e90e240e249ca}
--
--# Set some crosstools-port directories
--CROSSTOOLS_SOURCE_DIR=$WORKSPACE/crosstools-port
--CROSSTOOLS_CCTOOLS_DIR=$CROSSTOOLS_SOURCE_DIR/cctools
--CROSSTOOLS_BUILD_DIR=$WORKSPACE/cctools
--CLANG_DIR=$WORKSPACE/build/src/clang
--
--# Create our directories
--mkdir -p $CROSSTOOLS_BUILD_DIR
--
--git clone --no-checkout $CROSSTOOL_PORT_REPOSITORY $CROSSTOOLS_SOURCE_DIR
--cd $CROSSTOOLS_SOURCE_DIR
--git checkout $CROSSTOOL_PORT_REV
--echo "Building from commit hash `git rev-parse $CROSSTOOL_PORT_REV`..."
--
--# Fetch clang from tooltool
--cd $WORKSPACE/build/src
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# Configure crosstools-port
--cd $CROSSTOOLS_CCTOOLS_DIR
--export CC=$CLANG_DIR/bin/clang
--export CXX=$CLANG_DIR/bin/clang++
--export LDFLAGS=/lib64/libpthread.so.0
--./autogen.sh
--./configure --prefix=$CROSSTOOLS_BUILD_DIR --target=x86_64-apple-darwin11 --with-llvm-config=$CLANG_DIR/bin/llvm-config
--
--# Build cctools
--make -j `nproc --all` install
--strip $CROSSTOOLS_BUILD_DIR/bin/*
--# cctools-port doesn't include dsymutil but clang will need to find it.
--cp $CLANG_DIR/bin/llvm-dsymutil $CROSSTOOLS_BUILD_DIR/bin/x86_64-apple-darwin11-dsymutil
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--tar cJf $UPLOAD_DIR/cctools.tar.xz -C $CROSSTOOLS_BUILD_DIR/.. `basename $CROSSTOOLS_BUILD_DIR`
-diff --git a/taskcluster/scripts/misc/build-clang-3.9-linux.sh b/taskcluster/scripts/misc/build-clang-3.9-linux.sh
-deleted file mode 100644
---- a/taskcluster/scripts/misc/build-clang-3.9-linux.sh
-+++ /dev/null
-@@ -1,25 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building clang for Linux.
--
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# gets a bit too verbose here
--set +x
--
--cd build/build-clang
--# |mach python| sets up a virtualenv for us!
--../../mach python ./build-clang.py -c clang-3.9-linux64.json
--
--set -x
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp clang.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-clang-4-linux.sh b/taskcluster/scripts/misc/build-clang-4-linux.sh
-deleted file mode 100644
---- a/taskcluster/scripts/misc/build-clang-4-linux.sh
-+++ /dev/null
-@@ -1,25 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building clang for Linux.
--
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# gets a bit too verbose here
--set +x
--
--cd build/build-clang
--# |mach python| sets up a virtualenv for us!
--../../mach python ./build-clang.py -c clang-4-linux64.json
--
--set -x
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp clang.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-clang-macosx.sh b/taskcluster/scripts/misc/build-clang-macosx.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-macosx.sh
-+++ /dev/null
-@@ -1,33 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building clang for Mac OS X on Linux.
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# ld needs libLTO.so from llvm
--export LD_LIBRARY_PATH=$HOME_DIR/src/clang/lib
--# these variables are used in build-clang.py
--export CROSS_CCTOOLS_PATH=$HOME_DIR/src/cctools
--export CROSS_SYSROOT=$HOME_DIR/src/MacOSX10.10.sdk
--# cmake doesn't allow us to specify a path to lipo on the command line.
--export PATH=$PATH:$CROSS_CCTOOLS_PATH/bin
--ln -sf $CROSS_CCTOOLS_PATH/bin/x86_64-apple-darwin11-lipo $CROSS_CCTOOLS_PATH/bin/lipo
--
--# gets a bit too verbose here
--set +x
--
--cd build/build-clang
--# |mach python| sets up a virtualenv for us!
--../../mach python ./build-clang.py -c clang-macosx64.json
--
--set -x
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp clang.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-clang-tidy-linux.sh b/taskcluster/scripts/misc/build-clang-tidy-linux.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-tidy-linux.sh
-+++ /dev/null
-@@ -1,25 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building clang for Linux.
--
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# gets a bit too verbose here
--set +x
--
--cd build/build-clang
--# |mach python| sets up a virtualenv for us!
--../../mach python ./build-clang.py -c clang-tidy-linux64.json
--
--set -x
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp clang-tidy.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-clang-tidy-macosx.sh b/taskcluster/scripts/misc/build-clang-tidy-macosx.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-tidy-macosx.sh
-+++ /dev/null
-@@ -1,33 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building clang for Mac OS X on Linux.
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--# ld needs libLTO.so from llvm
--export LD_LIBRARY_PATH=$HOME_DIR/src/clang/lib
--# these variables are used in build-clang.py
--export CROSS_CCTOOLS_PATH=$HOME_DIR/src/cctools
--export CROSS_SYSROOT=$HOME_DIR/src/MacOSX10.10.sdk
--# cmake doesn't allow us to specify a path to lipo on the command line.
--export PATH=$PATH:$CROSS_CCTOOLS_PATH/bin
--ln -sf $CROSS_CCTOOLS_PATH/bin/x86_64-apple-darwin11-lipo $CROSS_CCTOOLS_PATH/bin/lipo
--
--# gets a bit too verbose here
--set +x
--
--cd build/build-clang
--# |mach python| sets up a virtualenv for us!
--../../mach python ./build-clang.py -c clang-tidy-macosx64.json
--
--set -x
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp clang-tidy.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-clang-tidy32-windows.sh b/taskcluster/scripts/misc/build-clang-tidy32-windows.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-tidy32-windows.sh
-+++ /dev/null
-@@ -1,3 +0,0 @@
--#!/bin/bash
--
--source build/src/taskcluster/scripts/misc/build-clang-windows-helper32.sh clang-tidy-win32.json
-diff --git a/taskcluster/scripts/misc/build-clang-tidy64-windows.sh b/taskcluster/scripts/misc/build-clang-tidy64-windows.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-tidy64-windows.sh
-+++ /dev/null
-@@ -1,3 +0,0 @@
--#!/bin/bash
--
--source build/src/taskcluster/scripts/misc/build-clang-windows-helper64.sh clang-tidy-win64.json
-diff --git a/taskcluster/scripts/misc/build-clang-windows-helper32.sh b/taskcluster/scripts/misc/build-clang-windows-helper32.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-windows-helper32.sh
-+++ /dev/null
-@@ -1,57 +0,0 @@
--#!/bin/bash
--
--set -x -e -v
--
--# This script is for building clang-cl on Windows.
--
--: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
--export TOOLTOOL_CACHE
--
--TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
--if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
--    echo cannot find ${TOOLTOOL_AUTH_FILE}
--    exit 1
--fi
--
--./build/src/mach artifact toolchain -v --authentication-file="${TOOLTOOL_AUTH_FILE}" --tooltool-manifest "build/src/${TOOLTOOL_MANIFEST}"${TOOLTOOL_CACHE:+ --cache-dir ${TOOLTOOL_CACHE}}${MOZ_TOOLCHAINS:+ ${MOZ_TOOLCHAINS}}
--
--# Set up all the Visual Studio paths.
--MSVC_DIR=vs2017_15.4.2
--VSWINPATH="$(cd ${MSVC_DIR} && pwd)"
--
--echo vswinpath ${VSWINPATH}
--
--export WINDOWSSDKDIR="${VSWINPATH}/SDK"
--export WIN32_REDIST_DIR="${VSWINPATH}/VC/redist/x86/Microsoft.VC141.CRT"
--export WIN_UCRT_REDIST_DIR="${VSWINPATH}/SDK/Redist/ucrt/DLLs/x86"
--
--export PATH="${VSWINPATH}/VC/bin/Hostx64/x86:${VSWINPATH}/VC/bin/Hostx64/x64:${VSWINPATH}/SDK/bin/10.0.15063.0/x64:${VSWINPATH}/DIA SDK/bin:${PATH}"
--export PATH="${VSWINPATH}/VC/redist/x86/Microsoft.VC141.CRT:${VSWINPATH}/SDK/Redist/ucrt/DLLs/x86:${PATH}"
--
--export INCLUDE="${VSWINPATH}/VC/include:${VSWINPATH}/VC/atlmfc/include:${VSWINPATH}/SDK/Include/10.0.15063.0/ucrt:${VSWINPATH}/SDK/Include/10.0.15063.0/shared:${VSWINPATH}/SDK/Include/10.0.15063.0/um:${VSWINPATH}/SDK/Include/10.0.15063.0/winrt:${VSWINPATH}/DIA SDK/include"
--export LIB="${VSWINPATH}/VC/lib/x86:${VSWINPATH}/VC/atlmfc/lib/x86:${VSWINPATH}/SDK/Lib/10.0.15063.0/ucrt/x86:${VSWINPATH}/SDK/Lib/10.0.15063.0/um/x86:${VSWINPATH}/DIA SDK/lib"
--
--export PATH="$(cd svn && pwd)/bin:${PATH}"
--export PATH="$(cd cmake && pwd)/bin:${PATH}"
--export PATH="$(cd ninja && pwd)/bin:${PATH}"
--
--# We use |mach python| to set up a virtualenv automatically for us.  We create
--# a dummy mozconfig, because the default machinery for config.guess-choosing
--# of the objdir doesn't work very well.
--MOZCONFIG="$(pwd)/mozconfig"
--cat > ${MOZCONFIG} <<EOF
--mk_add_options MOZ_OBJDIR=$(pwd)/objdir
--EOF
--
--# gets a bit too verbose here
--set +x
--
--BUILD_CLANG_DIR=build/src/build/build-clang
--MOZCONFIG=${MOZCONFIG} build/src/mach python ${BUILD_CLANG_DIR}/build-clang.py -c ${BUILD_CLANG_DIR}/${1}
--
--set -x
--
--# Put a tarball in the artifacts dir
--UPLOAD_PATH=public/build
--mkdir -p ${UPLOAD_PATH}
--cp clang*.tar.* ${UPLOAD_PATH}
-diff --git a/taskcluster/scripts/misc/build-clang-windows-helper64.sh b/taskcluster/scripts/misc/build-clang-windows-helper64.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang-windows-helper64.sh
-+++ /dev/null
-@@ -1,56 +0,0 @@
--#!/bin/bash
--
--set -x -e -v
--
--# This script is for building clang-cl on Windows.
--
--: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
--export TOOLTOOL_CACHE
--
--TOOLTOOL_AUTH_FILE=/c/builds/relengapi.tok
--if [ ! -e ${TOOLTOOL_AUTH_FILE} ]; then
--    echo cannot find ${TOOLTOOL_AUTH_FILE}
--    exit 1
--fi
--
--./build/src/mach artifact toolchain -v --authentication-file="${TOOLTOOL_AUTH_FILE}" --tooltool-manifest "build/src/${TOOLTOOL_MANIFEST}"${TOOLTOOL_CACHE:+ --cache-dir ${TOOLTOOL_CACHE}}${MOZ_TOOLCHAINS:+ ${MOZ_TOOLCHAINS}}
--
--# Set up all the Visual Studio paths.
--MSVC_DIR=vs2017_15.4.2
--VSWINPATH="$(cd ${MSVC_DIR} && pwd)"
--
--echo vswinpath ${VSWINPATH}
--
--export WINDOWSSDKDIR="${VSWINPATH}/SDK"
--export WIN32_REDIST_DIR="${VSWINPATH}/VC/redist/x64/Microsoft.VC141.CRT"
--export WIN_UCRT_REDIST_DIR="${VSWINPATH}/SDK/Redist/ucrt/DLLs/x64"
--
--export PATH="${VSWINPATH}/VC/bin/Hostx64/x64:${VSWINPATH}/SDK/bin/10.0.15063.0/x64:${VSWINPATH}/VC/redist/x64/Microsoft.VC141.CRT:${VSWINPATH}/SDK/Redist/ucrt/DLLs/x64:${VSWINPATH}/DIA SDK/bin/amd64:${PATH}"
--
--export INCLUDE="${VSWINPATH}/VC/include:${VSWINPATH}/VC/atlmfc/include:${VSWINPATH}/SDK/Include/10.0.15063.0/ucrt:${VSWINPATH}/SDK/Include/10.0.15063.0/shared:${VSWINPATH}/SDK/Include/10.0.15063.0/um:${VSWINPATH}/SDK/Include/10.0.15063.0/winrt:${VSWINPATH}/DIA SDK/include"
--export LIB="${VSWINPATH}/VC/lib/x64:${VSWINPATH}/VC/atlmfc/lib/x64:${VSWINPATH}/SDK/Lib/10.0.15063.0/ucrt/x64:${VSWINPATH}/SDK/Lib/10.0.15063.0/um/x64:${VSWINPATH}/DIA SDK/lib/amd64"
--
--export PATH="$(cd svn && pwd)/bin:${PATH}"
--export PATH="$(cd cmake && pwd)/bin:${PATH}"
--export PATH="$(cd ninja && pwd)/bin:${PATH}"
--
--# We use |mach python| to set up a virtualenv automatically for us.  We create
--# a dummy mozconfig, because the default machinery for config.guess-choosing
--# of the objdir doesn't work very well.
--MOZCONFIG="$(pwd)/mozconfig"
--cat > ${MOZCONFIG} <<EOF
--mk_add_options MOZ_OBJDIR=$(pwd)/objdir
--EOF
--
--# gets a bit too verbose here
--set +x
--
--BUILD_CLANG_DIR=build/src/build/build-clang
--MOZCONFIG=${MOZCONFIG} build/src/mach python ${BUILD_CLANG_DIR}/build-clang.py -c ${BUILD_CLANG_DIR}/${1}
--
--set -x
--
--# Put a tarball in the artifacts dir
--UPLOAD_PATH=public/build
--mkdir -p ${UPLOAD_PATH}
--cp clang*.tar.* ${UPLOAD_PATH}
-diff --git a/taskcluster/scripts/misc/build-clang32-windows.sh b/taskcluster/scripts/misc/build-clang32-windows.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang32-windows.sh
-+++ /dev/null
-@@ -1,3 +0,0 @@
--#!/bin/bash
--
--source build/src/taskcluster/scripts/misc/build-clang-windows-helper32.sh clang-win32.json
-diff --git a/taskcluster/scripts/misc/build-clang64-windows.sh b/taskcluster/scripts/misc/build-clang64-windows.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-clang64-windows.sh
-+++ /dev/null
-@@ -1,3 +0,0 @@
--#!/bin/bash
--
--source build/src/taskcluster/scripts/misc/build-clang-windows-helper64.sh clang-win64.json
-diff --git a/taskcluster/scripts/misc/build-gcc-4.9-linux.sh b/taskcluster/scripts/misc/build-gcc-4.9-linux.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-gcc-4.9-linux.sh
-+++ /dev/null
-@@ -1,48 +0,0 @@
--#!/bin/bash
--set -e
--
--# This script is for building GCC 4.9 for Linux.
--
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--root_dir=$HOME_DIR
--data_dir=$HOME_DIR/src/build/unix/build-gcc
--
--. $data_dir/build-gcc.sh
--
--gcc_version=4.9.4
--gcc_ext=bz2
--binutils_version=2.25.1
--binutils_ext=bz2
--
--# GPG key used to sign GCC
--$GPG --import $data_dir/13975A70E63C361C73AE69EF6EEB81F8981C74C7.key
--# GPG key used to sign binutils
--$GPG --import $data_dir/EAF1C276A747E9ED86210CBAC3126D3B4AE55E93.key
--# GPG key used to sign GMP
--$GPG --import $data_dir/343C2FF0FBEE5EC2EDBEF399F3599FF828C67298.key
--# GPG key used to sign MPFR
--$GPG --import $data_dir/07F3DBBECC1A39605078094D980C197698C3739D.key
--# GPG key used to sign MPC
--$GPG --import $data_dir/AD17A21EF8AED8F1CC02DBD9F7D5C9BF765C61E3.key
--
--cat > $HOME_DIR/checksums <<EOF
--b5b14added7d78a8d1ca70b5cb75fef57ce2197264f4f5835326b0df22ac9f22  binutils-2.25.1.tar.bz2
--02500a4edd14875f94fe84cbeda4290425cb0c1c2474c6f75d75a303d64b4196  cloog-0.18.1.tar.gz
--6c11d292cd01b294f9f84c9a59c230d80e9e4a47e5c6355f046bb36d4f358092  gcc-4.9.4.tar.bz2
--752079520b4690531171d0f4532e40f08600215feefede70b24fabdc6f1ab160  gmp-5.1.3.tar.bz2
--f4b3dbee9712850006e44f0db2103441ab3d13b406f77996d1df19ee89d11fb4  isl-0.12.2.tar.bz2
--ae79f8d41d8a86456b68607e9ca398d00f8b7342d1d83bcf4428178ac45380c7  mpc-0.8.2.tar.gz
--ca498c1c7a74dd37a576f353312d1e68d490978de4395fa28f1cbd46a364e658  mpfr-3.1.5.tar.bz2
--EOF
--
--prepare
--apply_patch $data_dir/PR64905.patch
--build_binutils
--build_gcc
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp $HOME_DIR/gcc.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-hfsplus-linux.sh b/taskcluster/scripts/misc/build-hfsplus-linux.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-hfsplus-linux.sh
-+++ /dev/null
-@@ -1,19 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building hfsplus for Linux.
--WORKSPACE=$HOME/workspace
--HOME_DIR=$WORKSPACE/build
--UPLOAD_DIR=$HOME/artifacts
--
--cd $HOME_DIR/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--export PATH=$PATH:$HOME_DIR/src/clang/bin
--
--build/unix/build-hfsplus/build-hfsplus.sh $HOME_DIR
--
--# Put a tarball in the artifacts dir
--mkdir -p $UPLOAD_DIR
--cp $HOME_DIR/hfsplus-tools.tar.* $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/build-libdmg-hfsplus.sh b/taskcluster/scripts/misc/build-libdmg-hfsplus.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-libdmg-hfsplus.sh
-+++ /dev/null
-@@ -1,39 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--# This script is for building libdmg-hfsplus to get the `dmg` and `hfsplus`
--# tools for producing DMG archives on Linux.
--
--WORKSPACE=$HOME/workspace
--STAGE=$WORKSPACE/dmg
--UPLOAD_DIR=$HOME/artifacts
--
--# There's no single well-maintained fork of libdmg-hfsplus, so we forked
--# https://github.com/andreas56/libdmg-hfsplus/ to get a specific version and
--# backport some patches.
--: LIBDMG_REPOSITORY    ${LIBDMG_REPOSITORY:=https://github.com/mozilla/libdmg-hfsplus}
--# The `mozilla` branch contains our fork.
--: LIBDMG_REV           ${LIBDMG_REV:=ba04b00435a0853f1499d751617177828ee8ec00}
--
--mkdir -p $UPLOAD_DIR $STAGE
--
--cd $WORKSPACE
--git clone --no-checkout $LIBDMG_REPOSITORY libdmg-hfsplus
--cd libdmg-hfsplus
--git checkout $LIBDMG_REV
--
--# Make a source archive
--git archive --prefix=libdmg-hfsplus/ ${LIBDMG_REV} | xz > $UPLOAD_DIR/libdmg-hfsplus.tar.xz
--cmake .
--make -j$(getconf _NPROCESSORS_ONLN)
--
--# We only need the dmg and hfsplus tools.
--strip dmg/dmg hfs/hfsplus
--cp dmg/dmg hfs/hfsplus $STAGE
--
--cat >$STAGE/README<<EOF
--Built from ${LIBDMG_REPOSITORY} rev `git rev-parse ${LIBDMG_REV}`.
--Source is available as a taskcluster artifact:
--https://queue.taskcluster.net/v1/task/$TASK_ID/artifacts/public/libdmg-hfsplus.tar.xz
--EOF
--tar cf - -C $WORKSPACE `basename $STAGE` | xz > $UPLOAD_DIR/dmg.tar.xz
-diff --git a/taskcluster/scripts/misc/build-sccache.sh b/taskcluster/scripts/misc/build-sccache.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/build-sccache.sh
-+++ /dev/null
-@@ -1,48 +0,0 @@
--#!/bin/bash
--set -x -e -v
--
--SCCACHE_REVISION=43300e1976bdbfc8dbda30e22a00ce2cce54e9de
--
--# This script is for building sccache
--
--case "$(uname -s)" in
--Linux)
--    WORKSPACE=$HOME/workspace
--    UPLOAD_DIR=$HOME/artifacts
--    export CC=clang
--    PATH="$WORKSPACE/build/src/clang/bin:$PATH"
--    COMPRESS_EXT=xz
--    ;;
--MINGW*)
--    WORKSPACE=$PWD
--    UPLOAD_DIR=$WORKSPACE/public/build
--    WIN_WORKSPACE="$(pwd -W)"
--    COMPRESS_EXT=bz2
--
--    export INCLUDE="$WIN_WORKSPACE/build/src/vs2015u3/VC/include;$WIN_WORKSPACE/build/src/vs2015u3/VC/atlmfc/include;$WIN_WORKSPACE/build/src/vs2015u3/SDK/Include/10.0.14393.0/ucrt;$WIN_WORKSPACE/build/src/vs2015u3/SDK/Include/10.0.14393.0/shared;$WIN_WORKSPACE/build/src/vs2015u3/SDK/Include/10.0.14393.0/um;$WIN_WORKSPACE/build/src/vs2015u3/SDK/Include/10.0.14393.0/winrt;$WIN_WORKSPACE/build/src/vs2015u3/DIA SDK/include"
--
--    export LIB="$WIN_WORKSPACE/build/src/vs2015u3/VC/lib/amd64;$WIN_WORKSPACE/build/src/vs2015u3/VC/atlmfc/lib/amd64;$WIN_WORKSPACE/build/src/vs2015u3/SDK/lib/10.0.14393.0/um/x64;$WIN_WORKSPACE/build/src/vs2015u3/SDK/lib/10.0.14393.0/ucrt/x64;$WIN_WORKSPACE/build/src/vs2015u3/DIA SDK/lib/amd64"
--
--    PATH="$WORKSPACE/build/src/vs2015u3/VC/bin/amd64:$WORKSPACE/build/src/vs2015u3/VC/bin:$WORKSPACE/build/src/vs2015u3/SDK/bin/x64:$WORKSPACE/build/src/vs2015u3/redist/x64/Microsoft.VC140.CRT:$WORKSPACE/build/src/vs2015u3/SDK/Redist/ucrt/DLLs/x64:$WORKSPACE/build/src/vs2015u3/DIA SDK/bin/amd64:$WORKSPACE/build/src/mingw64/bin:$PATH"
--    ;;
--esac
--
--cd $WORKSPACE/build/src
--
--. taskcluster/scripts/misc/tooltool-download.sh
--
--PATH="$PWD/rustc/bin:$PATH"
--
--git clone https://github.com/mozilla/sccache sccache
--
--cd sccache
--
--git checkout $SCCACHE_REVISION
--
--cargo build --verbose --release
--
--mkdir sccache2
--cp target/release/sccache* sccache2/
--tar -acf sccache2.tar.$COMPRESS_EXT sccache2
--mkdir -p $UPLOAD_DIR
--cp sccache2.tar.$COMPRESS_EXT $UPLOAD_DIR
-diff --git a/taskcluster/scripts/misc/minidump_stackwalk.sh b/taskcluster/scripts/misc/minidump_stackwalk.sh
-deleted file mode 100755
---- a/taskcluster/scripts/misc/minidump_stackwalk.sh
-+++ /dev/null
-@@ -1,125 +0,0 @@
--#!/bin/bash
--#
--# This script builds minidump_stackwalk binaries from the Google Breakpad
--# source for all of the operating systems that we run Firefox tests on:
--# Linux x86, Linux x86-64, Windows x86, OS X x86-64.
--#
--# It expects to be run in the luser/breakpad-builder:0.7 Docker image and
--# needs access to the relengapiproxy to download internal tooltool files.
--
--set -v -e -x
--
--# This is a pain to support properly with gclient.
--#: BREAKPAD_REPO        ${BREAKPAD_REPO:=https://google-breakpad.googlecode.com/svn/trunk/}
--: BREAKPAD_REV         "${BREAKPAD_REV:=master}"
--: STACKWALK_HTTP_REPO  "${STACKWALK_HTTP_REPO:=https://hg.mozilla.org/users/tmielczarek_mozilla.com/stackwalk-http}"
--: STACKWALK_HTTP_REV   "${STACKWALK_HTTP_REV:=default}"
--
--ncpu=$(getconf _NPROCESSORS_ONLN)
--
--function build()
--{
--    cd /tmp
--    local platform=$1
--    local strip_prefix=$2
--    local configure_args=$3
--    local make_args=$4
--    local objdir=/tmp/obj-breakpad-$platform
--    local ext=
--    if test "$platform" = "win32"; then
--        ext=.exe
--    fi
--    rm -rf "$objdir"
--    mkdir "$objdir"
--    # First, build Breakpad
--    cd "$objdir"
--    # shellcheck disable=SC2086
--    CFLAGS="-O2 $CFLAGS" CXXFLAGS="-O2 $CXXFLAGS" /tmp/breakpad/src/configure --disable-tools $configure_args
--    # shellcheck disable=SC2086
--    make -j$ncpu $make_args src/libbreakpad.a src/third_party/libdisasm/libdisasm.a src/processor/stackwalk_common.o
--    # Second, build stackwalk-http
--    make -f /tmp/stackwalk-http/Makefile BREAKPAD_SRCDIR=/tmp/breakpad/src "BREAKPAD_OBJDIR=$(pwd)" "OS=$platform" "-j$ncpu"
--    "${strip_prefix}strip" "stackwalk${ext}"
--    cp "stackwalk${ext}" "/tmp/stackwalker/${platform}-minidump_stackwalk${ext}"
--}
--
--function linux64()
--{
--    export LDFLAGS="-static-libgcc -static-libstdc++"
--    build linux64
--    unset LDFLAGS
--}
--
--function linux32()
--{
--    export LDFLAGS="-static-libgcc -static-libstdc++ -L/tmp/libcurl-i386/lib"
--    export CFLAGS="-m32 -I/tmp/libcurl-i386/include"
--    export CXXFLAGS="-m32 -I/tmp/libcurl-i386/include"
--    build linux32 "" "--enable-m32"
--    unset LDFLAGS CFLAGS CXXFLAGS
--}
--
--function macosx64()
--{
--    cd /tmp
--    if ! test -d MacOSX10.7.sdk; then
--      python tooltool.py -v --manifest=macosx-sdk.manifest --url=http://relengapi/tooltool/ fetch
--    fi
--    export MACOSX_SDK=/tmp/MacOSX10.7.sdk
--    export CCTOOLS=/tmp/cctools
--    local FLAGS="-stdlib=libc++ -target x86_64-apple-darwin10 -mlinker-version=136 -B /tmp/cctools/bin -isysroot ${MACOSX_SDK} -mmacosx-version-min=10.7"
--    export CC="clang $FLAGS"
--    export CXX="clang++ $FLAGS -std=c++11"
--    local old_path="$PATH"
--    export PATH="/tmp/clang/bin:/tmp/cctools/bin/:$PATH"
--    export LD_LIBRARY_PATH=/usr/lib/llvm-3.6/lib/
--
--    build macosx64 "/tmp/cctools/bin/x86_64-apple-darwin10-" "--host=x86_64-apple-darwin10" "AR=/tmp/cctools/bin/x86_64-apple-darwin10-ar"
--
--    unset CC CXX LD_LIBRARY_PATH MACOSX_SDK CCTOOLS
--    export PATH="$old_path"
--}
--
--function win32()
--{
--    export LDFLAGS="-static-libgcc -static-libstdc++"
--    export CFLAGS="-D__USE_MINGW_ANSI_STDIO=1"
--    export CXXFLAGS="-D__USE_MINGW_ANSI_STDIO=1"
--    export ZLIB_DIR=/tmp/zlib-mingw
--    build win32 "i686-w64-mingw32-" "--host=i686-w64-mingw32"
--    unset LDFLAGS CFLAGS CXXFLAGS ZLIB_DIR
--}
--
--cd /tmp
--if ! test -d depot_tools; then
--  git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git
--else
--  (cd depot_tools; git pull origin master)
--fi
--export PATH=$(pwd)/depot_tools:"$PATH"
--if ! test -d breakpad; then
--    mkdir breakpad
--    pushd breakpad
--    fetch breakpad
--    popd
--else
--    pushd breakpad/src
--    git pull origin master
--    popd
--fi
--pushd breakpad/src
--git checkout "${BREAKPAD_REV}"
--gclient sync
--popd
--
--(cd breakpad/src; git rev-parse master)
--if ! test -d stackwalk-http; then
--  hg clone -u "$STACKWALK_HTTP_REV" "$STACKWALK_HTTP_REPO"
--else
--  (cd stackwalk-http && hg pull "$STACKWALK_HTTP_REPO" && hg up "$STACKWALK_HTTP_REV")
--fi
--mkdir -p stackwalker
--linux64
--linux32
--macosx64
--win32
-diff --git a/taskcluster/scripts/misc/tooltool-download.sh b/taskcluster/scripts/misc/tooltool-download.sh
-deleted file mode 100644
---- a/taskcluster/scripts/misc/tooltool-download.sh
-+++ /dev/null
-@@ -1,35 +0,0 @@
--# Fetch a tooltool manifest.
--
--cd $WORKSPACE/build/src
--
--case "`uname -s`" in
--Linux)
--    TOOLTOOL_AUTH_FILE=/builds/relengapi.tok
--    ;;
--MINGW*)
--    TOOLTOOL_AUTH_FILE=c:/builds/relengapi.tok
--    ;;
--esac
--
--TOOLTOOL_DL_FLAGS=
--
--if [ -e "$TOOLTOOL_AUTH_FILE" ]; then
--    # When the worker has the relengapi token pass it down
--    TOOLTOOL_DL_FLAGS="${TOOLTOOL_DL_FLAGS=} --authentication-file=$TOOLTOOL_AUTH_FILE"
--fi
--
--if [ -n "$RELENGAPI_PORT" ]; then
--    # When the worker has the relengapi proxy setup, use it.
--    TOOLTOOL_DL_FLAGS="${TOOLTOOL_DL_FLAGS=} --tooltool-url=http://relengapi/tooltool/"
--fi
--
--if [ -n "$UPLOAD_DIR" ]; then
--    TOOLTOOL_DL_FLAGS="${TOOLTOOL_DL_FLAGS=} --artifact-manifest $UPLOAD_DIR/toolchains.json"
--fi
--
--: TOOLTOOL_CACHE                ${TOOLTOOL_CACHE:=/builds/worker/tooltool-cache}
--export TOOLTOOL_CACHE
--
--./mach artifact toolchain -v${TOOLTOOL_DL_FLAGS}${TOOLTOOL_MANIFEST:+ --tooltool-manifest "${TOOLTOOL_MANIFEST}"}${TOOLTOOL_CACHE:+ --cache-dir ${TOOLTOOL_CACHE}} --retry 5${MOZ_TOOLCHAINS:+ ${MOZ_TOOLCHAINS}}
--
--cd $OLDPWD
-diff --git a/taskcluster/scripts/tester/run-wizard b/taskcluster/scripts/tester/run-wizard
-deleted file mode 100755
---- a/taskcluster/scripts/tester/run-wizard
-+++ /dev/null
-@@ -1,178 +0,0 @@
--#!/usr/bin/env python
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this,
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import print_function, unicode_literals
--
--import datetime
--import os
--import subprocess
--import sys
--import time
--from distutils.spawn import find_executable
--from textwrap import wrap
--
--here = os.path.dirname(os.path.abspath(__file__))
--MOZHARNESS_WORKDIR = os.path.expanduser(os.path.join('~', 'workspace', 'build'))
--
--MACH_SETUP_FINISHED = """
--Mozharness has finished downloading the build and tests to:
--{}
--
--A limited mach environment has also been set up and added to the $PATH, but
--it may be missing the command you need. To see a list of commands, run:
--    $ mach help
--""".lstrip().format(MOZHARNESS_WORKDIR)
--
--MACH_SETUP_FAILED = """
--Could not set up mach environment, no mach binary detected.
--""".lstrip()
--
--
--def call(cmd, **kwargs):
--    print(" ".join(cmd))
--    return subprocess.call(cmd, **kwargs)
--
--
--def wait_for_run_mozharness(timeout=60):
--    starttime = datetime.datetime.now()
--    while datetime.datetime.now() - starttime < datetime.timedelta(seconds=timeout):
--        if os.path.isfile(os.path.join(here, 'run-mozharness')):
--            break
--        time.sleep(0.2)
--    else:
--        print("Timed out after %d seconds waiting for the 'run-mozharness' binary" % timeout)
--        return 1
--
--
--def setup_mach_environment():
--    mach_src = os.path.join(MOZHARNESS_WORKDIR, 'tests', 'mach')
--    if not os.path.isfile(mach_src):
--        return 1
--
--    mach_dest = os.path.expanduser(os.path.join('~', 'bin', 'mach'))
--    if os.path.exists(mach_dest):
--        os.remove(mach_dest)
--    os.symlink(mach_src, mach_dest)
--    return 0
--
--
--def run_mozharness(*args):
--    wait_for_run_mozharness()
--    try:
--        return call(['run-mozharness'] + list(args))
--    finally:
--        setup_mach_environment()
--
--
--def setup():
--    """Run the mozharness script without the 'run-tests' action.
--
--    This will do all the necessary setup steps like creating a virtualenv and
--    downloading the tests and firefox binary. But it stops before running the
--    tests.
--    """
--    status = run_mozharness('--no-run-tests')
--
--    if find_executable('mach'):
--        print(MACH_SETUP_FINISHED)
--    else:
--        print(MACH_SETUP_FAILED)
--
--    return status
--
--
--def clone():
--    """Clone the correct gecko repository and update to the proper revision."""
--    base_repo = os.environ['GECKO_HEAD_REPOSITORY']
--    dest = os.path.expanduser(os.path.join('~', 'gecko'))
--
--    # Specify method to checkout a revision. This defaults to revisions as
--    # SHA-1 strings, but also supports symbolic revisions like `tip` via the
--    # branch flag.
--    if os.environ.get('GECKO_HEAD_REV'):
--        revision_flag = b'--revision'
--        revision = os.environ['GECKO_HEAD_REV']
--    elif os.environ.get('GECKO_HEAD_REF'):
--        revision_flag = b'--branch'
--        revision = os.environ['GECKO_HEAD_REF']
--    else:
--        print('revision is not specified for checkout')
--        return 1
--
--    # TODO Bug 1301382 - pin hg.mozilla.org fingerprint.
--    call([
--        b'/usr/bin/hg', b'robustcheckout',
--        b'--sharebase', os.environ['HG_STORE_PATH'],
--        b'--purge',
--        b'--upstream', b'https://hg.mozilla.org/mozilla-unified',
--        revision_flag, revision,
--        base_repo, dest
--    ])
--    print("Finished cloning to {} at revision {}.".format(dest, revision))
--
--
--def exit():
--    pass
--
--
--OPTIONS = [
--    ('Resume task', run_mozharness,
--     "Resume the original task without modification. This can be useful for "
--     "passively monitoring it from another shell."),
--    ('Setup task', setup,
--     "Setup the task (download the application and tests) but don't run the "
--     "tests just yet. The tests can be run with a custom configuration later. "
--     "This will provide a mach environment (experimental)."),
--    ('Clone gecko', clone,
--     "Perform a clone of gecko using the task's repo and update it to the "
--     "task's revision."),
--    ('Exit', exit, "Exit this wizard and return to the shell.")
--]
--
--
--def _fmt_options():
--    max_line_len = 60
--    max_name_len = max(len(o[0]) for o in OPTIONS)
--
--    # TODO Pad will be off if there are more than 9 options.
--    pad = ' ' * (max_name_len+6)
--
--    msg = []
--    for i, (name, _, desc) in enumerate(OPTIONS):
--        desc = wrap(desc, width=max_line_len)
--        desc = [desc[0]] + [pad + l for l in desc[1:]]
--
--        optstr = '{}) {} - {}\n'.format(
--            i+1, name.ljust(max_name_len), '\n'.join(desc))
--        msg.append(optstr)
--    msg.append("Select one of the above options: ")
--    return '\n'.join(msg)
--
--
--def wizard():
--    print("This wizard can help you get started with some common debugging "
--          "workflows.\nWhat would you like to do?\n")
--    print(_fmt_options(), end="")
--    choice = None
--    while True:
--        choice = raw_input().decode('utf8')
--        try:
--            choice = int(choice)-1
--            if 0 <= choice < len(OPTIONS):
--                break
--        except ValueError:
--            pass
--
--        print("Must provide an integer from 1-{}:".format(len(OPTIONS)))
--
--    func = OPTIONS[choice][1]
--    ret = func()
--
--    print("Use the 'run-wizard' command to start this wizard again.")
--    return ret
--
--
--if __name__ == '__main__':
--    sys.exit(wizard())
-diff --git a/taskcluster/scripts/tester/test-linux.sh b/taskcluster/scripts/tester/test-linux.sh
-deleted file mode 100644
---- a/taskcluster/scripts/tester/test-linux.sh
-+++ /dev/null
-@@ -1,202 +0,0 @@
--#! /bin/bash -xe
--
--set -x -e
--
--echo "running as" $(id)
--
--# Detect release version.
--. /etc/lsb-release
--if [ "${DISTRIB_RELEASE}" == "12.04" ]; then
--    UBUNTU_1204=1
--elif [ "${DISTRIB_RELEASE}" == "16.04" ]; then
--    UBUNTU_1604=1
--fi
--
--####
--# Taskcluster friendly wrapper for performing fx desktop tests via mozharness.
--####
--
--# Inputs, with defaults
--
--: MOZHARNESS_PATH               ${MOZHARNESS_PATH}
--: MOZHARNESS_URL                ${MOZHARNESS_URL}
--: MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
--: MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
--: NEED_XVFB                     ${NEED_XVFB:=true}
--: NEED_WINDOW_MANAGER           ${NEED_WINDOW_MANAGER:=false}
--: NEED_PULSEAUDIO               ${NEED_PULSEAUDIO:=false}
--: START_VNC                     ${START_VNC:=false}
--: TASKCLUSTER_INTERACTIVE       ${TASKCLUSTER_INTERACTIVE:=false}
--: WORKSPACE                     ${WORKSPACE:=$HOME/workspace}
--: mozharness args               "${@}"
--
--set -v
--mkdir -p $WORKSPACE
--cd $WORKSPACE
--
--fail() {
--    echo # make sure error message is on a new line
--    echo "[test-linux.sh:error]" "${@}"
--    exit 1
--}
--
--maybe_start_pulse() {
--    if $NEED_PULSEAUDIO; then
--        pulseaudio --fail --daemonize --start
--        pactl load-module module-null-sink
--    fi
--}
--
--# test required parameters are supplied
--if [ -z "${MOZHARNESS_PATH}" -a -z "${MOZHARNESS_URL}" ]; then
--    fail "MOZHARNESS_PATH or MOZHARNESS_URL must be defined";
--fi
--
--if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
--if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
--
--# make sure artifact directories exist
--mkdir -p $WORKSPACE/build/upload/logs
--mkdir -p ~/artifacts/public
--mkdir -p $WORKSPACE/build/blobber_upload_dir
--
--cleanup() {
--    local rv=$?
--    if [[ -s $HOME/.xsession-errors ]]; then
--      # To share X issues
--      cp $HOME/.xsession-errors ~/artifacts/public/xsession-errors.log
--    fi
--    if $NEED_XVFB; then
--        cleanup_xvfb
--    fi
--    exit $rv
--}
--trap cleanup EXIT INT
--
--# Download mozharness with exponential backoff
--# curl already applies exponential backoff, but not for all
--# failed cases, apparently, as we keep getting failed downloads
--# with 404 code.
--download_mozharness() {
--    local max_attempts=10
--    local timeout=1
--    local attempt=0
--
--    echo "Downloading mozharness"
--
--    while [[ $attempt < $max_attempts ]]; do
--        if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
--            rm -rf mozharness
--            if unzip -q mozharness.zip; then
--                return 0
--            fi
--            echo "error unzipping mozharness.zip" >&2
--        else
--            echo "failed to download mozharness zip" >&2
--        fi
--        echo "Download failed, retrying in $timeout seconds..." >&2
--        sleep $timeout
--        timeout=$((timeout*2))
--        attempt=$((attempt+1))
--    done
--
--    fail "Failed to download and unzip mozharness"
--}
--
--# Download mozharness if we're told to.
--if [ ${MOZHARNESS_URL} ]; then
--    download_mozharness
--    rm mozharness.zip
--
--    if ! [ -d mozharness ]; then
--        fail "mozharness zip did not contain mozharness/"
--    fi
--
--    MOZHARNESS_PATH=`pwd`/mozharness
--fi
--
--# pulseaudio daemon must be started before xvfb on Ubuntu 12.04.
--if [ "${UBUNTU_1204}" ]; then
--    maybe_start_pulse
--fi
--
--# run XVfb in the background, if necessary
--if $NEED_XVFB; then
--    # note that this file is not available when run under native-worker
--    . $HOME/scripts/xvfb.sh
--    start_xvfb '1600x1200x24' 0
--fi
--
--if $START_VNC; then
--    x11vnc > ~/artifacts/public/x11vnc.log 2>&1 &
--fi
--
--if $NEED_WINDOW_MANAGER; then
--    # This is read by xsession to select the window manager
--    echo DESKTOP_SESSION=ubuntu > $HOME/.xsessionrc
--
--    # note that doing anything with this display before running Xsession will cause sadness (like,
--    # crashes in compiz). Make sure that X has enough time to start
--    sleep 15
--    # DISPLAY has already been set above
--    # XXX: it would be ideal to add a semaphore logic to make sure that the
--    # window manager is ready
--    /etc/X11/Xsession 2>&1 &
--
--    # Turn off the screen saver and screen locking
--    gsettings set org.gnome.desktop.screensaver idle-activation-enabled false
--    gsettings set org.gnome.desktop.screensaver lock-enabled false
--    gsettings set org.gnome.desktop.screensaver lock-delay 3600
--    # Disable the screen saver
--    xset s off s reset
--
--    if [ "${UBUNTU_1604}" ]; then
--        # start compiz for our window manager
--        compiz 2>&1 &
--        #TODO: how to determine if compiz starts correctly?
--    fi
--fi
--
--if [ "${UBUNTU_1604}" ]; then
--    maybe_start_pulse
--fi
--
--# For telemetry purposes, the build process wants information about the
--# source it is running; tc-vcs obscures this a little, but we can provide
--# it directly.
--export MOZ_SOURCE_REPO="${GECKO_HEAD_REPOSITORY}"
--export MOZ_SOURCE_CHANGESET="${GECKO_HEAD_REV}"
--
--# support multiple, space delimited, config files
--config_cmds=""
--for cfg in $MOZHARNESS_CONFIG; do
--  config_cmds="${config_cmds} --config-file ${MOZHARNESS_PATH}/configs/${cfg}"
--done
--
--mozharness_bin="$HOME/bin/run-mozharness"
--mkdir -p $(dirname $mozharness_bin)
--
--# Save the computed mozharness command to a binary which is useful
--# for interactive mode.
--echo -e "#!/usr/bin/env bash
--# Some mozharness scripts assume base_work_dir is in
--# the current working directory, see bug 1279237
--cd $WORKSPACE
--cmd=\"python2.7 ${MOZHARNESS_PATH}/scripts/${MOZHARNESS_SCRIPT} ${config_cmds} ${@} \${@}\"
--echo \"Running: \${cmd}\"
--exec \${cmd}" > ${mozharness_bin}
--chmod +x ${mozharness_bin}
--
--# In interactive mode, the user will be prompted with options for what to do.
--if ! $TASKCLUSTER_INTERACTIVE; then
--  # run the given mozharness script and configs, but pass the rest of the
--  # arguments in from our own invocation
--  ${mozharness_bin};
--fi
--
--# Run a custom mach command (this is typically used by action tasks to run
--# harnesses in a particular way)
--if [ "$CUSTOM_MACH_COMMAND" ]; then
--    eval "$HOME/workspace/build/tests/mach ${CUSTOM_MACH_COMMAND}"
--    exit $?
--fi
-diff --git a/taskcluster/scripts/tester/test-macosx.sh b/taskcluster/scripts/tester/test-macosx.sh
-deleted file mode 100644
---- a/taskcluster/scripts/tester/test-macosx.sh
-+++ /dev/null
-@@ -1,77 +0,0 @@
--#! /bin/bash -xe
--
--set -x -e
--
--echo "running as" $(id)
--
--####
--# Taskcluster friendly wrapper for performing fx Mac OSX tests via mozharness.
--####
--
--# Inputs, with defaults
--
--: MOZHARNESS_URL                ${MOZHARNESS_URL}
--: MOZHARNESS_SCRIPT             ${MOZHARNESS_SCRIPT}
--: MOZHARNESS_CONFIG             ${MOZHARNESS_CONFIG}
--
--WORKSPACE=$HOME
--cd $WORKSPACE
--
--rm -rf artifacts
--mkdir artifacts
--
--# test required parameters are supplied
--if [[ -z ${MOZHARNESS_URL} ]]; then fail "MOZHARNESS_URL is not set"; fi
--if [[ -z ${MOZHARNESS_SCRIPT} ]]; then fail "MOZHARNESS_SCRIPT is not set"; fi
--if [[ -z ${MOZHARNESS_CONFIG} ]]; then fail "MOZHARNESS_CONFIG is not set"; fi
--
--# Download mozharness with exponential backoff
--# curl already applies exponential backoff, but not for all
--# failed cases, apparently, as we keep getting failed downloads
--# with 404 code.
--download_mozharness() {
--    local max_attempts=10
--    local timeout=1
--    local attempt=0
--
--    echo "Downloading mozharness"
--
--    while [[ $attempt < $max_attempts ]]; do
--        if curl --fail -o mozharness.zip --retry 10 -L $MOZHARNESS_URL; then
--            rm -rf mozharness
--            if unzip -q mozharness.zip; then
--                return 0
--            fi
--            echo "error unzipping mozharness.zip" >&2
--        else
--            echo "failed to download mozharness zip" >&2
--        fi
--        echo "Download failed, retrying in $timeout seconds..." >&2
--        sleep $timeout
--        timeout=$((timeout*2))
--        attempt=$((attempt+1))
--    done
--
--    fail "Failed to download and unzip mozharness"
--}
--
--download_mozharness
--rm mozharness.zip
--
--# For telemetry purposes, the build process wants information about the
--# source it is running; tc-vcs obscures this a little, but we can provide
--# it directly.
--export MOZ_SOURCE_REPO="${GECKO_HEAD_REPOSITORY}"
--export MOZ_SOURCE_CHANGESET="${GECKO_HEAD_REV}"
--
--# support multiple, space delimited, config files
--config_cmds=""
--for cfg in $MOZHARNESS_CONFIG; do
--  config_cmds="${config_cmds} --config-file ${cfg}"
--done
--
--rm -rf build logs properties target.dmg
--
--# run the given mozharness script and configs, but pass the rest of the
--# arguments in from our own invocation
--python2.7 $WORKSPACE/mozharness/scripts/${MOZHARNESS_SCRIPT} ${config_cmds} "${@}"
-diff --git a/taskcluster/taskgraph/__init__.py b/taskcluster/taskgraph/__init__.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/__init__.py
-+++ /dev/null
-@@ -1,7 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import os
--
--GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
-diff --git a/taskcluster/taskgraph/action.py b/taskcluster/taskgraph/action.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/action.py
-+++ /dev/null
-@@ -1,146 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--import requests
--
--from .create import create_tasks
--from .decision import write_artifact
--from .optimize import optimize_task_graph
--from .taskgraph import TaskGraph
--from .util.taskcluster import get_artifact
--
--
--logger = logging.getLogger(__name__)
--TREEHERDER_URL = "https://treeherder.mozilla.org/api"
--
--# We set this to 5 for now because this is what SETA sets the
--# count to for every repository/job. If this is ever changed,
--# we'll need to have an API added to Treeherder to let us query
--# how far back we should look.
--MAX_BACKFILL_RESULTSETS = 5
--
--
--def add_tasks(decision_task_id, task_labels, prefix=''):
--    """
--    Run the add-tasks task.  This function implements `mach taskgraph add-tasks`,
--    and is responsible for
--
--     * creating taskgraph of tasks asked for in parameters with respect to
--     a given gecko decision task and schedule these jobs.
--    """
--    # read in the full graph for reference
--    full_task_json = get_artifact(decision_task_id, "public/full-task-graph.json")
--    decision_params = get_artifact(decision_task_id, "public/parameters.yml")
--    all_tasks, full_task_graph = TaskGraph.from_json(full_task_json)
--
--    target_tasks = set(task_labels)
--    target_graph = full_task_graph.graph.transitive_closure(target_tasks)
--    target_task_graph = TaskGraph(
--        {l: all_tasks[l] for l in target_graph.nodes},
--        target_graph)
--
--    existing_tasks = get_artifact(decision_task_id, "public/label-to-taskid.json")
--
--    # We don't want to optimize target tasks since they have been requested by user
--    # Hence we put `target_tasks under` `do_not_optimize`
--    optimized_graph, label_to_taskid = optimize_task_graph(target_task_graph=target_task_graph,
--                                                           params=decision_params,
--                                                           do_not_optimize=target_tasks,
--                                                           existing_tasks=existing_tasks)
--
--    # write out the optimized task graph to describe what will actually happen,
--    # and the map of labels to taskids
--    write_artifact('{}task-graph.json'.format(prefix), optimized_graph.to_json())
--    write_artifact('{}label-to-taskid.json'.format(prefix), label_to_taskid)
--    # actually create the graph
--    create_tasks(optimized_graph, label_to_taskid, decision_params)
--
--
--def backfill(project, job_id):
--    """
--    Run the backfill task.  This function implements `mach taskgraph backfill-task`,
--    and is responsible for
--
--     * Scheduling backfill jobs from a given treeherder resultset backwards until either
--     a successful job is found or `N` jobs have been scheduled.
--    """
--    s = requests.Session()
--    s.headers.update({"User-Agent": "gecko-intree-backfill-task"})
--
--    job = s.get(url="{}/project/{}/jobs/{}/".format(TREEHERDER_URL, project, job_id)).json()
--
--    job_type_name = job['job_type_name']
--
--    if job['build_system_type'] != 'taskcluster':
--        if 'Created by BBB for task' not in job['reason']:
--            logger.warning("Invalid build system type! Must be a Taskcluster job. Aborting.")
--            return
--        task_id = job['reason'].split(' ')[-1]
--        task = requests.get("https://queue.taskcluster.net/v1/task/{}".format(task_id)).json()
--        job_type_name = task['metadata']['name']
--
--    filters = dict((k, job[k]) for k in ("build_platform_id", "platform_option", "job_type_id"))
--
--    resultset_url = "{}/project/{}/resultset/".format(TREEHERDER_URL, project)
--    params = {"id__lt": job["result_set_id"], "count": MAX_BACKFILL_RESULTSETS}
--    results = s.get(url=resultset_url, params=params).json()["results"]
--    resultsets = [resultset["id"] for resultset in results]
--
--    for decision in load_decisions(s, project, resultsets, filters):
--        add_tasks(decision, [job_type_name], '{}-'.format(decision))
--
--
--def add_talos(decision_task_id, times=1):
--    """
--    Run the add-talos task.  This function implements `mach taskgraph add-talos`,
--    and is responsible for
--
--     * Adding all talos jobs to a push.
--    """
--    full_task_json = get_artifact(decision_task_id, "public/full-task-graph.json")
--    task_labels = [
--        label for label, task in full_task_json.iteritems()
--        if "talos_try_name" in task['attributes']
--    ]
--    for time in xrange(times):
--        add_tasks(decision_task_id, task_labels, '{}-'.format(time))
--
--
--def load_decisions(s, project, resultsets, filters):
--    """
--    Given a project, a list of revisions, and a dict of filters, return
--    a list of taskIds from decision tasks.
--    """
--    project_url = "{}/project/{}/jobs/".format(TREEHERDER_URL, project)
--    decisions = []
--    decision_ids = []
--
--    for resultset in resultsets:
--        unfiltered = []
--        offset = 0
--        jobs_per_call = 250
--        while True:
--            params = {"push_id": resultset, "count": jobs_per_call, "offset": offset}
--            results = s.get(url=project_url, params=params).json()["results"]
--            unfiltered += results
--            if (len(results) < jobs_per_call):
--                break
--            offset += jobs_per_call
--        filtered = [j for j in unfiltered if all([j[k] == filters[k] for k in filters])]
--        if filtered and all([j["result"] == "success" for j in filtered]):
--            logger.info("Push found with all green jobs for this type. Continuing.")
--            break
--        decisions += [t for t in unfiltered if t["job_type_name"] == "Gecko Decision Task"]
--
--    for decision in decisions:
--        job_url = project_url + '{}/'.format(decision["id"])
--        taskcluster_metadata = s.get(url=job_url).json()["taskcluster_metadata"]
--        decision_ids.append(taskcluster_metadata["task_id"])
--
--    return decision_ids
-diff --git a/taskcluster/taskgraph/action.yml b/taskcluster/taskgraph/action.yml
-deleted file mode 100644
---- a/taskcluster/taskgraph/action.yml
-+++ /dev/null
-@@ -1,68 +0,0 @@
-----
--created: '{{now}}'
--deadline: '{{#from_now}}1 day{{/from_now}}'
--expires: '{{#from_now}}14 day{{/from_now}}'
--metadata:
--  owner: mozilla-taskcluster-maintenance@mozilla.com
--  source: 'https://hg.mozilla.org/{{project}}/file/{{head_rev}}/taskcluster/taskgraph/action.yml'
--  name: "[tc] Action Task"
--  description: Helps schedule new jobs without new push
--
--workerType: "gecko-decision"
--provisionerId: "aws-provisioner-v1"
--schedulerId: "gecko-level-{{level}}"
--
--tags:
--  createdForUser: {{owner}}
--
--scopes:
--  - {{repo_scope}}
--
--routes:
--  - "tc-treeherder.v2.{{project}}.{{head_rev}}.{{pushlog_id}}"
--  - "tc-treeherder-stage.v2.{{project}}.{{head_rev}}.{{pushlog_id}}"
--
--payload:
--  env:
--    GECKO_BASE_REPOSITORY: 'https://hg.mozilla.org/mozilla-unified'
--    GECKO_HEAD_REPOSITORY: '{{{head_repository}}}'
--    GECKO_HEAD_REF: '{{head_ref}}'
--    GECKO_HEAD_REV: '{{head_rev}}'
--    HG_STORE_PATH: /builds/worker/checkouts/hg-store
--
--  cache:
--    level-{{level}}-checkouts: /builds/worker/checkouts
--
--  features:
--    taskclusterProxy: true
--
--  # Note: This task is built server side without the context or tooling that
--  # exist in tree so we must hard code the version
--  image: 'taskcluster/decision:0.1.7'
--
--  # Virtually no network or other potentially risky operations happen as part
--  # of the task timeout aside from the initial clone. We intentionally have
--  # set this to a lower value _all_ decision tasks should use a root
--  # repository which is cached.
--  maxRunTime: 1800
--
--  command:
--    - /builds/worker/bin/run-task
--    - '--vcs-checkout=/builds/worker/checkouts/gecko'
--    - '--'
--    - bash
--    - -cx
--    - >
--        cd /builds/worker/checkouts/gecko &&
--        ln -s /builds/worker/artifacts artifacts &&
--        ./mach --log-no-times taskgraph {{action}} {{action_args}}
--
--  artifacts:
--    'public':
--      type: 'directory'
--      path: '/builds/worker/artifacts'
--      expires: '{{#from_now}}7 days{{/from_now}}'
--
--extra:
--  treeherder:
--    symbol: A
-diff --git a/taskcluster/taskgraph/actions/__init__.py b/taskcluster/taskgraph/actions/__init__.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/__init__.py
-+++ /dev/null
-@@ -1,18 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from .registry import (
--    register_task_action, register_callback_action, render_actions_json, trigger_action_callback,
--)
--
--__all__ = [
--    'register_task_action',
--    'register_callback_action',
--    'render_actions_json',
--    'trigger_action_callback',
--]
-diff --git a/taskcluster/taskgraph/actions/add-new-jobs.py b/taskcluster/taskgraph/actions/add-new-jobs.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/add-new-jobs.py
-+++ /dev/null
-@@ -1,58 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from .registry import register_callback_action
--from slugid import nice as slugid
--
--from .util import (create_task, find_decision_task)
--from taskgraph.util.taskcluster import get_artifact
--from taskgraph.util.parameterization import resolve_task_references
--from taskgraph.taskgraph import TaskGraph
--
--
--@register_callback_action(
--    name='add-new-jobs',
--    title='Add new jobs',
--    symbol='add-new',
--    description="Add new jobs using task labels",
--    order=10000,
--    context=[{}],
--    schema={
--        'type': 'object',
--        'properties': {
--            'tasks': {
--                'type': 'array',
--                'description': 'An array of task labels',
--                'items': {
--                    'type': 'string'
--                }
--            }
--        }
--    }
--)
--def add_new_jobs_action(parameters, input, task_group_id, task_id, task):
--    decision_task_id = find_decision_task(parameters)
--
--    full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json")
--    _, full_task_graph = TaskGraph.from_json(full_task_graph)
--    label_to_taskid = get_artifact(decision_task_id, "public/label-to-taskid.json")
--
--    for elem in input['tasks']:
--        if elem in full_task_graph.tasks:
--            task = full_task_graph.tasks[elem]
--
--            # fix up the task's dependencies, similar to how optimization would
--            # have done in the decision
--            dependencies = {name: label_to_taskid[label]
--                            for name, label in task.dependencies.iteritems()}
--            task_def = resolve_task_references(task.label, task.task, dependencies)
--            task_def.setdefault('dependencies', []).extend(dependencies.itervalues())
--            # actually create the new task
--            create_task(slugid(), task_def, parameters['level'])
--        else:
--            raise Exception('{} was not found in the task-graph'.format(elem))
-diff --git a/taskcluster/taskgraph/actions/backfill.py b/taskcluster/taskgraph/actions/backfill.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/backfill.py
-+++ /dev/null
-@@ -1,91 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--
--import requests
--from slugid import nice as slugid
--
--from .registry import register_callback_action
--from .util import create_task
--from taskgraph.util.taskcluster import get_artifact_from_index
--from taskgraph.util.parameterization import resolve_task_references
--from taskgraph.taskgraph import TaskGraph
--
--PUSHLOG_TMPL = '{}json-pushes?version=2&startID={}&endID={}'
--INDEX_TMPL = 'gecko.v2.{}.pushlog-id.{}.decision'
--
--logger = logging.getLogger(__name__)
--
--
--@register_callback_action(
--    title='Backfill',
--    name='backfill',
--    symbol='Bk',
--    description=('Take the label of the current task, '
--                 'and trigger the task with that label '
--                 'on previous pushes in the same project.'),
--    order=0,
--    context=[{}],  # This will be available for all tasks
--    schema={
--        'type': 'object',
--        'properties': {
--            'depth': {
--                'type': 'integer',
--                'default': 5,
--                'minimum': 1,
--                'maximum': 10,
--                'title': 'Depth',
--                'description': ('The number of previous pushes before the current '
--                                'push to attempt to trigger this task on.')
--            }
--        },
--        'additionalProperties': False
--    },
--    available=lambda parameters: parameters.get('project', None) != 'try'
--)
--def backfill_action(parameters, input, task_group_id, task_id, task):
--    label = task['metadata']['name']
--    pushes = []
--    depth = input.get('depth', 5)
--    end_id = int(parameters['pushlog_id']) - 1
--
--    while True:
--        start_id = max(end_id - depth, 0)
--        pushlog_url = PUSHLOG_TMPL.format(parameters['head_repository'], start_id, end_id)
--        r = requests.get(pushlog_url)
--        r.raise_for_status()
--        pushes = pushes + r.json()['pushes'].keys()
--        if len(pushes) >= depth:
--            break
--
--        end_id = start_id - 1
--        start_id -= depth
--        if start_id < 0:
--            break
--
--    pushes = sorted(pushes)[-depth:]
--
--    for push in pushes:
--        full_task_graph = get_artifact_from_index(
--                INDEX_TMPL.format(parameters['project'], push),
--                'public/full-task-graph.json')
--        _, full_task_graph = TaskGraph.from_json(full_task_graph)
--        label_to_taskid = get_artifact_from_index(
--                INDEX_TMPL.format(parameters['project'], push),
--                'public/label-to-taskid.json')
--
--        if label in full_task_graph.tasks.keys():
--            task = full_task_graph.tasks[label]
--            dependencies = {name: label_to_taskid[label]
--                            for name, label in task.dependencies.iteritems()}
--            task_def = resolve_task_references(task.label, task.task, dependencies)
--            task_def.setdefault('dependencies', []).extend(dependencies.itervalues())
--            create_task(slugid(), task_def, parameters['level'])
--        else:
--            logging.info('Could not find {} on {}. Skipping.'.format(label, push))
-diff --git a/taskcluster/taskgraph/actions/registry.py b/taskcluster/taskgraph/actions/registry.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/registry.py
-+++ /dev/null
-@@ -1,332 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import os
--import inspect
--import re
--from mozbuild.util import memoize
--from types import FunctionType
--from collections import namedtuple
--from taskgraph.util.docker import docker_image
--from taskgraph.parameters import Parameters
--from . import util
--
--
--GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..'))
--
--actions = []
--callbacks = {}
--
--Action = namedtuple('Action', [
--    'name', 'title', 'description', 'order', 'context', 'schema', 'task_template_builder',
--])
--
--
--def is_json(data):
--    """ Return ``True``, if ``data`` is a JSON serializable data structure. """
--    try:
--        json.dumps(data)
--    except ValueError:
--        return False
--    return True
--
--
--def register_task_action(name, title, description, order, context, schema=None):
--    """
--    Register an action task that can be triggered from supporting
--    user interfaces, such as Treeherder.
--
--    Most actions will create intermediate action tasks that call back into
--    in-tree python code. To write such an action please use
--    :func:`register_callback_action`.
--
--    This function is to be used a decorator for a function that returns a task
--    template, see :doc:`specification <action-spec>` for details on the
--    templating features. The decorated function will be given decision task
--    parameters, which can be embedded in the task template that is returned.
--
--    Parameters
--    ----------
--    name : str
--        An identifier for this action, used by UIs to find the action.
--    title : str
--        A human readable title for the action to be used as label on a button
--        or text on a link for triggering the action.
--    description : str
--        A human readable description of the action in **markdown**.
--        This will be display as tooltip and in dialog window when the action
--        is triggered. This is a good place to describe how to use the action.
--    order : int
--        Order of the action in menus, this is relative to the ``order`` of
--        other actions declared.
--    context : list of dict
--        List of tag-sets specifying which tasks the action is can take as input.
--        If no tag-sets is specified as input the action is related to the
--        entire task-group, and won't be triggered with a given task.
--
--        Otherwise, if ``context = [{'k': 'b', 'p': 'l'}, {'k': 't'}]`` will only
--        be displayed in the context menu for tasks that has
--        ``task.tags.k == 'b' && task.tags.p = 'l'`` or ``task.tags.k = 't'``.
--        Esentially, this allows filtering on ``task.tags``.
--    schema : dict
--        JSON schema specifying input accepted by the action.
--        This is optional and can be left ``null`` if no input is taken.
--
--    Returns
--    -------
--    function
--        To be used as decorator for the function that builds the task template.
--        The decorated function will be given decision parameters and may return
--        ``None`` instead of a task template, if the action is disabled.
--    """
--    assert isinstance(name, basestring), 'name must be a string'
--    assert isinstance(title, basestring), 'title must be a string'
--    assert isinstance(description, basestring), 'description must be a string'
--    assert isinstance(order, int), 'order must be an integer'
--    assert is_json(schema), 'schema must be a JSON compatible  object'
--    mem = {"registered": False}  # workaround nonlocal missing in 2.x
--
--    def register_task_template_builder(task_template_builder):
--        assert not mem['registered'], 'register_task_action must be used as decorator'
--        actions.append(Action(
--            name.strip(), title.strip(), description.strip(), order, context,
--            schema, task_template_builder,
--        ))
--        mem['registered'] = True
--    return register_task_template_builder
--
--
--def register_callback_action(name, title, symbol, description, order=10000,
--                             context=[], available=lambda parameters: True, schema=None):
--    """
--    Register an action callback that can be triggered from supporting
--    user interfaces, such as Treeherder.
--
--    This function is to be used as a decorator for a callback that takes
--    parameters as follows:
--
--    ``parameters``:
--        Decision task parameters, see ``taskgraph.parameters.Parameters``.
--    ``input``:
--        Input matching specified JSON schema, ``None`` if no ``schema``
--        parameter is given to ``register_callback_action``.
--    ``task_group_id``:
--        The id of the task-group this was triggered for.
--    ``task_id`` and `task``:
--        task identifier and task definition for task the action was triggered
--        for, ``None`` if no ``context`` parameters was given to
--        ``register_callback_action``.
--
--    Parameters
--    ----------
--    name : str
--        An identifier for this action, used by UIs to find the action.
--    title : str
--        A human readable title for the action to be used as label on a button
--        or text on a link for triggering the action.
--    symbol : str
--        Treeherder symbol for the action callback, this is the symbol that the
--        task calling your callback will be displayed as. This is usually 1-3
--        letters abbreviating the action title.
--    description : str
--        A human readable description of the action in **markdown**.
--        This will be display as tooltip and in dialog window when the action
--        is triggered. This is a good place to describe how to use the action.
--    order : int
--        Order of the action in menus, this is relative to the ``order`` of
--        other actions declared.
--    context : list of dict
--        List of tag-sets specifying which tasks the action is can take as input.
--        If no tag-sets is specified as input the action is related to the
--        entire task-group, and won't be triggered with a given task.
--
--        Otherwise, if ``context = [{'k': 'b', 'p': 'l'}, {'k': 't'}]`` will only
--        be displayed in the context menu for tasks that has
--        ``task.tags.k == 'b' && task.tags.p = 'l'`` or ``task.tags.k = 't'``.
--        Esentially, this allows filtering on ``task.tags``.
--    available : function
--        An optional function that given decision parameters decides if the
--        action is available. Defaults to a function that always returns ``True``.
--    schema : dict
--        JSON schema specifying input accepted by the action.
--        This is optional and can be left ``null`` if no input is taken.
--
--    Returns
--    -------
--    function
--        To be used as decorator for the callback function.
--    """
--    mem = {"registered": False}  # workaround nonlocal missing in 2.x
--
--    def register_callback(cb):
--        assert isinstance(cb, FunctionType), 'callback must be a function'
--        assert isinstance(symbol, basestring), 'symbol must be a string'
--        assert 1 <= len(symbol) <= 25, 'symbol must be between 1 and 25 characters'
--        assert not mem['registered'], 'register_callback_action must be used as decorator'
--        assert cb.__name__ not in callbacks, 'callback name {} is not unique'.format(cb.__name__)
--        source_path = os.path.relpath(inspect.stack()[1][1], GECKO)
--
--        @register_task_action(name, title, description, order, context, schema)
--        def build_callback_action_task(parameters):
--            if not available(parameters):
--                return None
--
--            match = re.match(r'https://(hg.mozilla.org)/(.*?)/?$', parameters['head_repository'])
--            if not match:
--                raise Exception('Unrecognized head_repository')
--            repo_scope = 'assume:repo:{}/{}:*'.format(
--                match.group(1), match.group(2))
--
--            return {
--                'created': {'$fromNow': ''},
--                'deadline': {'$fromNow': '12 hours'},
--                'expires': {'$fromNow': '14 days'},
--                'metadata': {
--                    'owner': 'mozilla-taskcluster-maintenance@mozilla.com',
--                    'source': '{}raw-file/{}/{}'.format(
--                        parameters['head_repository'], parameters['head_rev'], source_path,
--                    ),
--                    'name': 'Action: {}'.format(title),
--                    'description': 'Task executing callback for action.\n\n---\n' + description,
--                },
--                'workerType': 'gecko-decision',
--                'provisionerId': 'aws-provisioner-v1',
--                'scopes': [
--                    repo_scope,
--                ],
--                'tags': {
--                    'createdForUser': parameters['owner'],
--                    'kind': 'action-callback',
--                },
--                'routes': [
--                    'tc-treeherder.v2.{}.{}.{}'.format(
--                        parameters['project'], parameters['head_rev'], parameters['pushlog_id']),
--                    'tc-treeherder-stage.v2.{}.{}.{}'.format(
--                        parameters['project'], parameters['head_rev'], parameters['pushlog_id']),
--                ],
--                'payload': {
--                    'env': {
--                        'GECKO_BASE_REPOSITORY': 'https://hg.mozilla.org/mozilla-unified',
--                        'GECKO_HEAD_REPOSITORY': parameters['head_repository'],
--                        'GECKO_HEAD_REF': parameters['head_ref'],
--                        'GECKO_HEAD_REV': parameters['head_rev'],
--                        'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
--                        'ACTION_TASK_GROUP_ID': {'$eval': 'taskGroupId'},
--                        'ACTION_TASK_ID': {'$json': {'$eval': 'taskId'}},
--                        'ACTION_TASK': {'$json': {'$eval': 'task'}},
--                        'ACTION_INPUT': {'$json': {'$eval': 'input'}},
--                        'ACTION_CALLBACK': cb.__name__,
--                        'ACTION_PARAMETERS': {'$json': {'$eval': 'parameters'}},
--                    },
--                    'cache': {
--                        'level-{}-checkouts'.format(parameters['level']):
--                            '/home/worker/checkouts',
--                    },
--                    'features': {
--                        'taskclusterProxy': True,
--                        'chainOfTrust': True,
--                    },
--                    'image': docker_image('decision'),
--                    'maxRunTime': 1800,
--                    'command': [
--                        '/home/worker/bin/run-task', '--vcs-checkout=/home/worker/checkouts/gecko',
--                        '--', 'bash', '-cx',
--                        """\
--cd /home/worker/checkouts/gecko &&
--ln -s /home/worker/artifacts artifacts &&
--./mach --log-no-times taskgraph action-callback""",
--                    ],
--                },
--                'extra': {
--                    'treeherder': {
--                        'groupName': 'action-callback',
--                        'groupSymbol': 'AC',
--                        'symbol': symbol,
--                    },
--                },
--            }
--        mem['registered'] = True
--        callbacks[cb.__name__] = cb
--    return register_callback
--
--
--def render_actions_json(parameters):
--    """
--    Render JSON object for the ``public/actions.json`` artifact.
--
--    Parameters
--    ----------
--    parameters : taskgraph.parameters.Parameters
--        Decision task parameters.
--
--    Returns
--    -------
--    dict
--        JSON object representation of the ``public/actions.json`` artifact.
--    """
--    assert isinstance(parameters, Parameters), 'requires instance of Parameters'
--    result = []
--    for action in sorted(get_actions(), key=lambda action: action.order):
--        task = action.task_template_builder(parameters)
--        if task:
--            assert is_json(task), 'task must be a JSON compatible object'
--            res = {
--                'kind': 'task',
--                'name': action.name,
--                'title': action.title,
--                'description': action.description,
--                'context': action.context,
--                'schema': action.schema,
--                'task': task,
--            }
--            if res['schema'] is None:
--                res.pop('schema')
--            result.append(res)
--    return {
--        'version': 1,
--        'variables': {
--            'parameters': dict(**parameters),
--        },
--        'actions': result,
--    }
--
--
--def trigger_action_callback(task_group_id, task_id, task, input, callback, parameters,
--                            test=False):
--    """
--    Trigger action callback with the given inputs. If `test` is true, then run
--    the action callback in testing mode, without actually creating tasks.
--    """
--    cb = get_callbacks().get(callback, None)
--    if not cb:
--        raise Exception('Unknown callback: {}. Known callbacks: {}'.format(
--            callback, get_callbacks().keys()))
--
--    if test:
--        util.testing = True
--
--    cb(Parameters(**parameters), input, task_group_id, task_id, task)
--
--
--@memoize
--def _load():
--    # Load all modules from this folder, relying on the side-effects of register_
--    # functions to populate the action registry.
--    for f in os.listdir(os.path.dirname(__file__)):
--        if f.endswith('.py') and f not in ('__init__.py', 'registry.py', 'util.py'):
--            __import__('taskgraph.actions.' + f[:-3])
--    return callbacks, actions
--
--
--def get_callbacks():
--    return _load()[0]
--
--
--def get_actions():
--    return _load()[1]
-diff --git a/taskcluster/taskgraph/actions/retrigger.py b/taskcluster/taskgraph/actions/retrigger.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/retrigger.py
-+++ /dev/null
-@@ -1,64 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from .registry import register_task_action
--
--
--@register_task_action(
--    title='Retrigger',
--    name='retrigger',
--    description='Create a clone of the task',
--    order=1,
--    context=[{}],
--)
--def retrigger_task_builder(parameters):
--
--    new_expires = '30 days'
--
--    return {
--        '$merge': [
--            {'$eval': 'task'},
--            {'created': {'$fromNow': ''}},
--            {'deadline': {'$fromNow': '1 day'}},
--            {'expires': {'$fromNow': new_expires}},
--            {'payload': {
--                '$merge': [
--                    {'$eval': 'task.payload'},
--                    {
--                        '$if': '"artifacts" in task.payload',
--                        'then': {
--                            'artifacts': {
--                                '$if': 'typeof(task.payload.artifacts) == "object"',
--                                'then': {
--                                    '$map': {'$eval': 'task.payload.artifacts'},
--                                    'each(artifact)': {
--                                        '${artifact.key}': {
--                                            '$merge': [
--                                                {'$eval': 'artifact.val'},
--                                                {'expires': {'$fromNow': new_expires}},
--                                            ],
--                                        },
--                                    },
--                                },
--                                'else': {
--                                    '$map': {'$eval': 'task.payload.artifacts'},
--                                    'each(artifact)': {
--                                        '$merge': [
--                                            {'$eval': 'artifact'},
--                                            {'expires': {'$fromNow': new_expires}},
--                                        ],
--                                    },
--                                },
--                            },
--                        },
--                        'else': {},
--                    }
--                ]
--            }}
--        ]
--    }
-diff --git a/taskcluster/taskgraph/actions/run_missing_tests.py b/taskcluster/taskgraph/actions/run_missing_tests.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/run_missing_tests.py
-+++ /dev/null
-@@ -1,67 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--from slugid import nice as slugid
--
--from .registry import register_callback_action
--from .util import create_task, find_decision_task
--from taskgraph.util.taskcluster import get_artifact
--from taskgraph.util.parameterization import resolve_task_references
--from taskgraph.taskgraph import TaskGraph
--
--logger = logging.getLogger(__name__)
--
--
--@register_callback_action(
--    name='run-missing-tests',
--    title='Run Missing Tests',
--    symbol='rmt',
--    description="""
--    Run tests in the selected push that were optimized away, usually by SETA.
--
--    This action is for use on pushes that will be merged into another branch,
--    to check that optimization hasn't hidden any failures.
--    """,
--    order=100,  # Useful for sheriffs, but not top of the list
--    context=[],  # Applies to any task
--)
--def run_missing_tests(parameters, input, task_group_id, task_id, task):
--    decision_task_id = find_decision_task(parameters)
--
--    full_task_graph = get_artifact(decision_task_id, "public/full-task-graph.json")
--    _, full_task_graph = TaskGraph.from_json(full_task_graph)
--    target_tasks = get_artifact(decision_task_id, "public/target-tasks.json")
--    label_to_taskid = get_artifact(decision_task_id, "public/label-to-taskid.json")
--
--    # The idea here is to schedule all tasks of the `test` kind that were
--    # targetted but did not appear in the final task-graph -- those were the
--    # optimized tasks.
--    to_run = []
--    already_run = 0
--    for label in target_tasks:
--        task = full_task_graph.tasks[label]
--        if task.kind != 'test':
--            continue  # not a test
--        if label in label_to_taskid:
--            already_run += 1
--            continue
--        to_run.append(task)
--
--    for task in to_run:
--
--        # fix up the task's dependencies, similar to how optimization would
--        # have done in the decision
--        dependencies = {name: label_to_taskid[label]
--                        for name, label in task.dependencies.iteritems()}
--        task_def = resolve_task_references(task.label, task.task, dependencies)
--        task_def.setdefault('dependencies', []).extend(dependencies.itervalues())
--        create_task(slugid(), task_def, parameters['level'])
--
--    logger.info('Out of {} test tasks, {} already existed and the action created {}'.format(
--        already_run + len(to_run), already_run, len(to_run)))
-diff --git a/taskcluster/taskgraph/actions/test-retrigger-action.py b/taskcluster/taskgraph/actions/test-retrigger-action.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/test-retrigger-action.py
-+++ /dev/null
-@@ -1,142 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import copy
--import json
--import logging
--
--import requests
--from slugid import nice as slugid
--
--from .registry import register_callback_action
--from taskgraph.create import create_task
--from taskgraph.util.time import (
--    current_json_time,
--    json_time_from_now
--)
--
--TASKCLUSTER_QUEUE_URL = "https://queue.taskcluster.net/v1/task"
--
--logger = logging.getLogger(__name__)
--
--
--@register_callback_action(
--    name='run-with-options',
--    title='Schedule test retrigger',
--    symbol='tr',
--    description="Retriggers the specified test job with additional options",
--    context=[{'test-type': 'mochitest'},
--             {'test-type': 'reftest'}],
--    order=0,
--    schema={
--        'type': 'object',
--        'properties': {
--            'path': {
--                'type': 'string',
--                'maxLength': 255,
--                'default': '',
--                'title': 'Path name',
--                'description': 'Path of test to retrigger'
--            },
--            'logLevel': {
--                'type': 'string',
--                'enum': ['debug', 'info', 'warning', 'error', 'critical'],
--                'default': 'debug',
--                'title': 'Log level',
--                'description': 'Log level for output (default is DEBUG, which is highest)'
--            },
--            'runUntilFail': {
--                'type': 'boolean',
--                'default': True,
--                'title': 'Run until failure',
--                'description': ('Runs the specified set of tests repeatedly '
--                                'until failure (or 30 times)')
--            },
--            'repeat': {
--                'type': 'integer',
--                'default': 30,
--                'minimum': 1,
--                'title': 'Run tests N times',
--                'description': ('Run tests repeatedly (usually used in '
--                                'conjunction with runUntilFail)')
--            },
--            'environment': {
--                'type': 'object',
--                'default': {'MOZ_LOG': ''},
--                'title': 'Extra environment variables',
--                'description': 'Extra environment variables to use for this run'
--            },
--            'preferences': {
--                'type': 'object',
--                'default': {'mygeckopreferences.pref': 'myvalue2'},
--                'title': 'Extra gecko (about:config) preferences',
--                'description': 'Extra gecko (about:config) preferences to use for this run'
--            }
--        },
--        'additionalProperties': False,
--        'required': ['path']
--    }
--)
--def test_retrigger_action(parameters, input, task_group_id, task_id, task):
--    new_task_definition = copy.copy(task)
--
--    # set new created, deadline, and expiry fields
--    new_task_definition['created'] = current_json_time()
--    new_task_definition['deadline'] = json_time_from_now('1d')
--    new_task_definition['expires'] = json_time_from_now('30d')
--
--    # reset artifact expiry
--    for artifact in new_task_definition['payload'].get('artifacts', {}).values():
--        artifact['expires'] = new_task_definition['expires']
--
--    # don't want to run mozharness tests, want a custom mach command instead
--    new_task_definition['payload']['command'] += ['--no-run-tests']
--
--    custom_mach_command = [task['tags']['test-type']]
--
--    # mochitests may specify a flavor
--    if new_task_definition['payload']['env'].get('MOCHITEST_FLAVOR'):
--        custom_mach_command += [
--            '--keep-open=false',
--            '-f',
--            new_task_definition['payload']['env']['MOCHITEST_FLAVOR']
--        ]
--
--    enable_e10s = json.loads(new_task_definition['payload']['env'].get(
--        'ENABLE_E10S', 'true'))
--    if not enable_e10s:
--        custom_mach_command += ['--disable-e10s']
--
--    custom_mach_command += ['--log-tbpl=-',
--                            '--log-tbpl-level={}'.format(input['logLevel'])]
--    if input.get('runUntilFail'):
--        custom_mach_command += ['--run-until-failure']
--    if input.get('repeat'):
--        custom_mach_command += ['--repeat', str(input['repeat'])]
--
--    # add any custom gecko preferences
--    for (key, val) in input.get('preferences', {}).iteritems():
--        custom_mach_command += ['--setpref', '{}={}'.format(key, val)]
--
--    custom_mach_command += [input['path']]
--    new_task_definition['payload']['env']['CUSTOM_MACH_COMMAND'] = ' '.join(
--        custom_mach_command)
--
--    # update environment
--    new_task_definition['payload']['env'].update(input.get('environment', {}))
--
--    # tweak the treeherder symbol
--    new_task_definition['extra']['treeherder']['symbol'] += '-custom'
--
--    logging.info("New task definition: %s", new_task_definition)
--
--    # actually create the new task
--    new_task_id = slugid()
--    logger.info("Creating new mochitest task with id %s", new_task_id)
--    session = requests.Session()
--    create_task(session, new_task_id, 'test-retrigger', new_task_definition)
-diff --git a/taskcluster/taskgraph/actions/util.py b/taskcluster/taskgraph/actions/util.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/actions/util.py
-+++ /dev/null
-@@ -1,39 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import sys
--
--from taskgraph import create
--from taskgraph.util.taskcluster import get_session, find_task_id
--
--# this is set to true for `mach taskgraph action-callback --test`
--testing = False
--
--
--def find_decision_task(parameters):
--    """Given the parameters for this action, find the taskId of the decision
--    task"""
--    return find_task_id('gecko.v2.{}.pushlog-id.{}.decision'.format(
--        parameters['project'],
--        parameters['pushlog_id']))
--
--
--def create_task(task_id, task_def, level):
--    """Create a new task.  The task definition will have {relative-datestamp':
--    '..'} rendered just like in a decision task.  Action callbacks should use
--    this function to create new tasks, as it has the additional advantage of
--    allowing easy debugging with `mach taskgraph action-callback --test`."""
--    task_def['schedulerId'] = 'gecko-level-{}'.format(level)
--    if testing:
--        json.dump([task_id, task_def], sys.stdout,
--                  sort_keys=True, indent=4, separators=(',', ': '))
--        return
--    label = task_def['metadata']['name']
--    session = get_session()
--    create.create_task(session, task_id, label, task_def)
-diff --git a/taskcluster/taskgraph/create.py b/taskcluster/taskgraph/create.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/create.py
-+++ /dev/null
-@@ -1,110 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import concurrent.futures as futures
--import requests
--import requests.adapters
--import json
--import os
--import logging
--
--from slugid import nice as slugid
--from taskgraph.util.parameterization import resolve_timestamps
--from taskgraph.util.time import current_json_time
--
--logger = logging.getLogger(__name__)
--
--# the maximum number of parallel createTask calls to make
--CONCURRENCY = 50
--
--
--def create_tasks(taskgraph, label_to_taskid, params):
--    taskid_to_label = {t: l for l, t in label_to_taskid.iteritems()}
--
--    session = requests.Session()
--
--    # Default HTTPAdapter uses 10 connections. Mount custom adapter to increase
--    # that limit. Connections are established as needed, so using a large value
--    # should not negatively impact performance.
--    http_adapter = requests.adapters.HTTPAdapter(pool_connections=CONCURRENCY,
--                                                 pool_maxsize=CONCURRENCY)
--    session.mount('https://', http_adapter)
--    session.mount('http://', http_adapter)
--
--    decision_task_id = os.environ.get('TASK_ID')
--
--    # when running as an actual decision task, we use the decision task's
--    # taskId as the taskGroupId.  The process that created the decision task
--    # helpfully placed it in this same taskGroup.  If there is no $TASK_ID,
--    # fall back to a slugid
--    task_group_id = decision_task_id or slugid()
--    scheduler_id = 'gecko-level-{}'.format(params['level'])
--
--    with futures.ThreadPoolExecutor(CONCURRENCY) as e:
--        fs = {}
--
--        # We can't submit a task until its dependencies have been submitted.
--        # So our strategy is to walk the graph and submit tasks once all
--        # their dependencies have been submitted.
--        #
--        # Using visit_postorder() here isn't the most efficient: we'll
--        # block waiting for dependencies of task N to submit even though
--        # dependencies for task N+1 may be finished. If we need to optimize
--        # this further, we can build a graph of task dependencies and walk
--        # that.
--        for task_id in taskgraph.graph.visit_postorder():
--            task_def = taskgraph.tasks[task_id].task
--            attributes = taskgraph.tasks[task_id].attributes
--
--            # if this task has no dependencies *within* this taskgraph, make it
--            # depend on this decision task. If it has another dependency within
--            # the taskgraph, then it already implicitly depends on the decision
--            # task.  The result is that tasks do not start immediately. if this
--            # loop fails halfway through, none of the already-created tasks run.
--            if decision_task_id:
--                if not any(t in taskgraph.tasks for t in task_def.get('dependencies', [])):
--                    task_def.setdefault('dependencies', []).append(decision_task_id)
--
--            task_def['taskGroupId'] = task_group_id
--            task_def['schedulerId'] = scheduler_id
--
--            # Wait for dependencies before submitting this.
--            deps_fs = [fs[dep] for dep in task_def.get('dependencies', [])
--                       if dep in fs]
--            for f in futures.as_completed(deps_fs):
--                f.result()
--
--            fs[task_id] = e.submit(create_task, session, task_id,
--                                   taskid_to_label[task_id], task_def)
--
--            # Schedule tasks as many times as task_duplicates indicates
--            for i in range(1, attributes.get('task_duplicates', 1)):
--                # We use slugid() since we want a distinct task id
--                fs[task_id] = e.submit(create_task, session, slugid(),
--                                       taskid_to_label[task_id], task_def)
--
--        # Wait for all futures to complete.
--        for f in futures.as_completed(fs.values()):
--            f.result()
--
--
--def create_task(session, task_id, label, task_def):
--    # create the task using 'http://taskcluster/queue', which is proxied to the queue service
--    # with credentials appropriate to this job.
--
--    # Resolve timestamps
--    now = current_json_time(datetime_format=True)
--    task_def = resolve_timestamps(now, task_def)
--
--    logger.debug("Creating task with taskId {} for {}".format(task_id, label))
--    res = session.put('http://taskcluster/queue/v1/task/{}'.format(task_id),
--                      data=json.dumps(task_def))
--    if res.status_code != 200:
--        try:
--            logger.error(res.json()['message'])
--        except:
--            logger.error(res.text)
--        res.raise_for_status()
-diff --git a/taskcluster/taskgraph/cron/__init__.py b/taskcluster/taskgraph/cron/__init__.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/cron/__init__.py
-+++ /dev/null
-@@ -1,165 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import datetime
--import json
--import logging
--import os
--import traceback
--import yaml
--
--from . import decision, schema
--from .util import (
--    match_utc,
--    calculate_head_rev
--)
--from ..create import create_task
--from .. import GECKO
--from taskgraph.util.attributes import match_run_on_projects
--from taskgraph.util.schema import resolve_keyed_by
--from taskgraph.util.taskcluster import get_session
--
--# Functions to handle each `job.type` in `.cron.yml`.  These are called with
--# the contents of the `job` property from `.cron.yml` and should return a
--# sequence of (taskId, task) tuples which will subsequently be fed to
--# createTask.
--JOB_TYPES = {
--    'decision-task': decision.run_decision_task,
--}
--
--logger = logging.getLogger(__name__)
--
--
--def load_jobs(params):
--    with open(os.path.join(GECKO, '.cron.yml'), 'rb') as f:
--        cron_yml = yaml.load(f)
--    schema.validate(cron_yml)
--
--    # resolve keyed_by fields in each job
--    jobs = cron_yml['jobs']
--
--    return {j['name']: j for j in jobs}
--
--
--def should_run(job, params):
--    run_on_projects = job.get('run-on-projects', ['all'])
--    if not match_run_on_projects(params['project'], run_on_projects):
--        return False
--    # Resolve when key here, so we don't require it before we know that we
--    # actually want to run on this branch.
--    resolve_keyed_by(job, 'when', 'Cron job ' + job['name'],
--                     project=params['project'])
--    if not any(match_utc(params, hour=sched.get('hour'), minute=sched.get('minute'))
--               for sched in job.get('when', [])):
--        return False
--    return True
--
--
--def run_job(job_name, job, params):
--    params['job_name'] = job_name
--
--    try:
--        job_type = job['job']['type']
--        if job_type in JOB_TYPES:
--            tasks = JOB_TYPES[job_type](job['job'], params)
--        else:
--            raise Exception("job type {} not recognized".format(job_type))
--        if params['no_create']:
--            for task_id, task in tasks:
--                logger.info("Not creating task {} (--no-create):\n".format(task_id) +
--                            json.dumps(task, sort_keys=True, indent=4, separators=(',', ': ')))
--        else:
--            for task_id, task in tasks:
--                create_task(get_session(), task_id, params['job_name'], task)
--
--    except Exception:
--        # report the exception, but don't fail the whole cron task, as that
--        # would leave other jobs un-run.  NOTE: we could report job failure to
--        # a responsible person here via tc-notify
--        traceback.print_exc()
--        logger.error("cron job {} run failed; continuing to next job".format(
--            params['job_name']))
--
--
--def calculate_time(options):
--    if 'TASK_ID' not in os.environ:
--        # running in a development environment, so look for CRON_TIME or use
--        # the current time
--        if 'CRON_TIME' in os.environ:
--            logger.warning("setting params['time'] based on $CRON_TIME")
--            time = datetime.datetime.utcfromtimestamp(
--                int(os.environ['CRON_TIME']))
--        else:
--            logger.warning("using current time for params['time']; try setting $CRON_TIME "
--                           "to a timestamp")
--            time = datetime.datetime.utcnow()
--    else:
--        # fetch this task from the queue
--        res = get_session().get(
--            'http://taskcluster/queue/v1/task/' + os.environ['TASK_ID'])
--        if res.status_code != 200:
--            try:
--                logger.error(res.json()['message'])
--            except:
--                logger.error(res.text)
--            res.raise_for_status()
--        # the task's `created` time is close to when the hook ran, although that
--        # may be some time ago if task execution was delayed
--        created = res.json()['created']
--        time = datetime.datetime.strptime(created, '%Y-%m-%dT%H:%M:%S.%fZ')
--
--    # round down to the nearest 15m
--    minute = time.minute - (time.minute % 15)
--    time = time.replace(minute=minute, second=0, microsecond=0)
--    logger.info("calculated cron schedule time is {}".format(time))
--    return time
--
--
--def taskgraph_cron(options):
--    params = {
--        # name of this cron job (set per job below)
--        'job_name': '..',
--
--        # repositories
--        'base_repository': options['base_repository'],
--        'head_repository': options['head_repository'],
--
--        # the symbolic ref this should run against (which happens to be what
--        # run-task checked out for us)
--        'head_ref': options['head_ref'],
--
--        # *calculated* head_rev; this is based on the current meaning of this
--        # reference in the working copy
--        'head_rev': calculate_head_rev(options),
--
--        # the project (short name for the repository) and its SCM level
--        'project': options['project'],
--        'level': options['level'],
--
--        # if true, tasks will not actually be created
--        'no_create': options['no_create'],
--
--        # the time that this cron task was created (as a UTC datetime object)
--        'time': calculate_time(options),
--    }
--
--    jobs = load_jobs(params)
--
--    if options['force_run']:
--        job_name = options['force_run']
--        logger.info("force-running cron job {}".format(job_name))
--        run_job(job_name, jobs[job_name], params)
--        return
--
--    for job_name, job in sorted(jobs.items()):
--        if should_run(job, params):
--            logger.info("running cron job {}".format(job_name))
--            run_job(job_name, job, params)
--        else:
--            logger.info("not running cron job {}".format(job_name))
-diff --git a/taskcluster/taskgraph/cron/decision.py b/taskcluster/taskgraph/cron/decision.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/cron/decision.py
-+++ /dev/null
-@@ -1,83 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import jsone
--import pipes
--import yaml
--import os
--import slugid
--
--from taskgraph.util.time import current_json_time
--
--
--def run_decision_task(job, params):
--    arguments = []
--    if 'target-tasks-method' in job:
--        arguments.append('--target-tasks-method={}'.format(job['target-tasks-method']))
--    return [
--        make_decision_task(
--            params,
--            symbol=job['treeherder-symbol'],
--            arguments=arguments),
--    ]
--
--
--def make_decision_task(params, symbol, arguments=[], head_rev=None):
--    """Generate a basic decision task, based on the root .taskcluster.yml"""
--    with open('.taskcluster.yml') as f:
--        taskcluster_yml = yaml.load(f)
--
--    if not head_rev:
--        head_rev = params['head_rev']
--
--    slugids = {}
--
--    def as_slugid(name):
--        # https://github.com/taskcluster/json-e/issues/164
--        name = name[0]
--        if name not in slugids:
--            slugids[name] = slugid.nice()
--        return slugids[name]
--
--    # provide a similar JSON-e context to what mozilla-taskcluster provides:
--    # https://docs.taskcluster.net/reference/integrations/mozilla-taskcluster/docs/taskcluster-yml
--    # but with a different tasks_for and an extra `cron` section
--    context = {
--        'tasks_for': 'cron',
--        'repository': {
--            'url': params['head_repository'],
--            'project': params['project'],
--            'level': params['level'],
--        },
--        'push': {
--            'revision': params['head_rev'],
--            # remainder are fake values, but the decision task expects them anyway
--            'pushlog_id': -1,
--            'pushdate': 0,
--            'owner': 'nobody',
--            'comment': '',
--        },
--        'cron': {
--            'task_id': os.environ.get('TASK_ID', '<cron task id>'),
--            'job_name': params['job_name'],
--            'job_symbol': symbol,
--            # args are shell-quoted since they are given to `bash -c`
--            'quoted_args': ' '.join(pipes.quote(a) for a in arguments),
--        },
--        'now': current_json_time(),
--        'as_slugid': as_slugid,
--    }
--
--    rendered = jsone.render(taskcluster_yml, context)
--    if len(rendered['tasks']) != 1:
--        raise Exception("Expected .taskcluster.yml to only produce one cron task")
--    task = rendered['tasks'][0]
--
--    task_id = task.pop('taskId')
--    return (task_id, task)
-diff --git a/taskcluster/taskgraph/cron/schema.py b/taskcluster/taskgraph/cron/schema.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/cron/schema.py
-+++ /dev/null
-@@ -1,61 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from voluptuous import Any, Required, All
--from taskgraph.util.schema import (
--    optionally_keyed_by,
--    validate_schema,
--    Schema,
--)
--
--
--def even_15_minutes(minutes):
--    if minutes % 15 != 0:
--        raise ValueError("minutes must be evenly divisible by 15")
--
--
--cron_yml_schema = Schema({
--    'jobs': [{
--        # Name of the crontask (must be unique)
--        Required('name'): basestring,
--
--        # what to run
--
--        # Description of the job to run, keyed by 'type'
--        Required('job'): Any({
--            Required('type'): 'decision-task',
--
--            # Treeherder symbol for the cron task
--            Required('treeherder-symbol'): basestring,
--
--            # --target-tasks-method './mach taskgraph decision' argument
--            'target-tasks-method': basestring,
--        }),
--
--        # when to run it
--
--        # Optional set of projects on which this job should run; if omitted, this will
--        # run on all projects for which cron tasks are set up.  This works just like the
--        # `run_on_projects` attribute, where strings like "release" and "integration" are
--        # expanded to cover multiple repositories.  (taskcluster/docs/attributes.rst)
--        'run-on-projects': [basestring],
--
--        # Array of times at which this task should run.  These *must* be a
--        # multiple of 15 minutes, the minimum scheduling interval.  This field
--        # can be keyed by project so that each project has a different schedule
--        # for the same job.
--        'when': optionally_keyed_by(
--            'project',
--            [{'hour': int, 'minute': All(int, even_15_minutes)}]),
--    }],
--})
--
--
--def validate(cron_yml):
--    validate_schema(cron_yml_schema, cron_yml, "Invalid .cron.yml:")
-diff --git a/taskcluster/taskgraph/cron/util.py b/taskcluster/taskgraph/cron/util.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/cron/util.py
-+++ /dev/null
-@@ -1,32 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import subprocess
--
--
--def match_utc(params, hour=None, minute=None):
--    """ Return True if params['time'] matches the given hour and minute.
--    If hour is not specified, any hour will match.  If minute is not
--    specified, then every multiple of fifteen minutes will match.  Times
--    not an even multiple of fifteen minutes will result in an exception
--    (since they would never run)."""
--    if minute is not None and minute % 15 != 0:
--        raise Exception("cron jobs only run on multiples of 15 minutes past the hour")
--    if hour is not None and params['time'].hour != hour:
--        return False
--    if minute is not None and params['time'].minute != minute:
--        return False
--    return True
--
--
--def calculate_head_rev(options):
--    # we assume that run-task has correctly checked out the revision indicated by
--    # GECKO_HEAD_REF, so all that remains is to see what the current revision is.
--    # Mercurial refers to that as `.`.
--    return subprocess.check_output(['hg', 'log', '-r', '.', '-T', '{node}'])
-diff --git a/taskcluster/taskgraph/decision.py b/taskcluster/taskgraph/decision.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/decision.py
-+++ /dev/null
-@@ -1,229 +0,0 @@
--# -*- coding: utf-8 -*-
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import json
--import logging
--import re
--
--import time
--import yaml
--
--from .generator import TaskGraphGenerator
--from .create import create_tasks
--from .parameters import Parameters
--from .taskgraph import TaskGraph
--from .actions import render_actions_json
--from . import GECKO
--
--from taskgraph.util.templates import Templates
--from taskgraph.util.time import (
--    json_time_from_now,
--    current_json_time,
--)
--
--logger = logging.getLogger(__name__)
--
--ARTIFACTS_DIR = 'artifacts'
--
--# For each project, this gives a set of parameters specific to the project.
--# See `taskcluster/docs/parameters.rst` for information on parameters.
--PER_PROJECT_PARAMETERS = {
--    'try': {
--        'target_tasks_method': 'try_tasks',
--        # Always perform optimization.  This makes it difficult to use try
--        # pushes to run a task that would otherwise be optimized, but is a
--        # compromise to avoid essentially disabling optimization in try.
--        'optimize_target_tasks': True,
--        # By default, the `try_option_syntax` `target_task_method` ignores this
--        # parameter, and enables/disables nightlies depending whether
--        # `--include-nightly` is specified in the commit message.
--        # We're setting the `include_nightly` parameter to True here for when
--        # we submit decision tasks against Try that use other
--        # `target_task_method`s, like `nightly_fennec` or `mozilla_beta_tasks`,
--        # which reference the `include_nightly` parameter.
--        'include_nightly': True,
--    },
--
--    'ash': {
--        'target_tasks_method': 'ash_tasks',
--        'optimize_target_tasks': True,
--        'include_nightly': False,
--    },
--
--    'cedar': {
--        'target_tasks_method': 'cedar_tasks',
--        'optimize_target_tasks': True,
--        'include_nightly': False,
--    },
--
--    'graphics': {
--        'target_tasks_method': 'graphics_tasks',
--        'optimize_target_tasks': True,
--        'include_nightly': False,
--    },
--
--    'mozilla-beta': {
--        'target_tasks_method': 'mozilla_beta_tasks',
--        'optimize_target_tasks': False,
--        'include_nightly': True,
--    },
--
--    'mozilla-release': {
--        'target_tasks_method': 'mozilla_release_tasks',
--        'optimize_target_tasks': False,
--        'include_nightly': True,
--    },
--
--    'pine': {
--        'target_tasks_method': 'pine_tasks',
--        'optimize_target_tasks': True,
--        'include_nightly': False,
--    },
--
--    # the default parameters are used for projects that do not match above.
--    'default': {
--        'target_tasks_method': 'default',
--        'optimize_target_tasks': True,
--        'include_nightly': False,
--    }
--}
--
--
--def taskgraph_decision(options):
--    """
--    Run the decision task.  This function implements `mach taskgraph decision`,
--    and is responsible for
--
--     * processing decision task command-line options into parameters
--     * running task-graph generation exactly the same way the other `mach
--       taskgraph` commands do
--     * generating a set of artifacts to memorialize the graph
--     * calling TaskCluster APIs to create the graph
--    """
--
--    parameters = get_decision_parameters(options)
--    # create a TaskGraphGenerator instance
--    tgg = TaskGraphGenerator(
--        root_dir=options['root'],
--        parameters=parameters)
--
--    # write out the parameters used to generate this graph
--    write_artifact('parameters.yml', dict(**parameters))
--
--    # write out the yml file for action tasks
--    write_artifact('action.yml', get_action_yml(parameters))
--
--    # write out the public/actions.json file
--    write_artifact('actions.json', render_actions_json(parameters))
--
--    # write out the full graph for reference
--    full_task_json = tgg.full_task_graph.to_json()
--    write_artifact('full-task-graph.json', full_task_json)
--
--    # this is just a test to check whether the from_json() function is working
--    _, _ = TaskGraph.from_json(full_task_json)
--
--    # write out the target task set to allow reproducing this as input
--    write_artifact('target-tasks.json', tgg.target_task_set.tasks.keys())
--
--    # write out the optimized task graph to describe what will actually happen,
--    # and the map of labels to taskids
--    write_artifact('task-graph.json', tgg.morphed_task_graph.to_json())
--    write_artifact('label-to-taskid.json', tgg.label_to_taskid)
--
--    # actually create the graph
--    create_tasks(tgg.morphed_task_graph, tgg.label_to_taskid, parameters)
--
--
--def get_decision_parameters(options):
--    """
--    Load parameters from the command-line options for 'taskgraph decision'.
--    This also applies per-project parameters, based on the given project.
--
--    """
--    parameters = {n: options[n] for n in [
--        'base_repository',
--        'head_repository',
--        'head_rev',
--        'head_ref',
--        'message',
--        'project',
--        'pushlog_id',
--        'pushdate',
--        'owner',
--        'level',
--        'target_tasks_method',
--    ] if n in options}
--
--    # Define default filter list, as most configurations shouldn't need
--    # custom filters.
--    parameters['filters'] = [
--        'check_servo',
--        'target_tasks_method',
--    ]
--
--    # owner must be an email, but sometimes (e.g., for ffxbld) it is not, in which
--    # case, fake it
--    if '@' not in parameters['owner']:
--        parameters['owner'] += '@noreply.mozilla.org'
--
--    # use the pushdate as build_date if given, else use current time
--    parameters['build_date'] = parameters['pushdate'] or int(time.time())
--    # moz_build_date is the build identifier based on build_date
--    parameters['moz_build_date'] = time.strftime("%Y%m%d%H%M%S",
--                                                 time.gmtime(parameters['build_date']))
--
--    project = parameters['project']
--    try:
--        parameters.update(PER_PROJECT_PARAMETERS[project])
--    except KeyError:
--        logger.warning("using default project parameters; add {} to "
--                       "PER_PROJECT_PARAMETERS in {} to customize behavior "
--                       "for this project".format(project, __file__))
--        parameters.update(PER_PROJECT_PARAMETERS['default'])
--
--    # `target_tasks_method` has higher precedence than `project` parameters
--    if options.get('target_tasks_method'):
--        parameters['target_tasks_method'] = options['target_tasks_method']
--
--    return Parameters(parameters)
--
--
--def write_artifact(filename, data):
--    logger.info('writing artifact file `{}`'.format(filename))
--    if not os.path.isdir(ARTIFACTS_DIR):
--        os.mkdir(ARTIFACTS_DIR)
--    path = os.path.join(ARTIFACTS_DIR, filename)
--    if filename.endswith('.yml'):
--        with open(path, 'w') as f:
--            yaml.safe_dump(data, f, allow_unicode=True, default_flow_style=False)
--    elif filename.endswith('.json'):
--        with open(path, 'w') as f:
--            json.dump(data, f, sort_keys=True, indent=2, separators=(',', ': '))
--    else:
--        raise TypeError("Don't know how to write to {}".format(filename))
--
--
--def get_action_yml(parameters):
--    templates = Templates(os.path.join(GECKO, "taskcluster/taskgraph"))
--    action_parameters = parameters.copy()
--
--    match = re.match(r'https://(hg.mozilla.org)/(.*?)/?$', action_parameters['head_repository'])
--    if not match:
--        raise Exception('Unrecognized head_repository')
--    repo_scope = 'assume:repo:{}/{}:*'.format(
--        match.group(1), match.group(2))
--
--    action_parameters.update({
--        "action": "{{action}}",
--        "action_args": "{{action_args}}",
--        "repo_scope": repo_scope,
--        "from_now": json_time_from_now,
--        "now": current_json_time()
--    })
--    return templates.load('action.yml', action_parameters)
-diff --git a/taskcluster/taskgraph/docker.py b/taskcluster/taskgraph/docker.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/docker.py
-+++ /dev/null
-@@ -1,202 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import os
--import sys
--import subprocess
--import tarfile
--import tempfile
--import which
--from subprocess import Popen, PIPE
--from io import BytesIO
--
--from taskgraph.util import docker
--from taskgraph.util.taskcluster import (
--    find_task_id,
--    get_artifact_url,
--)
--from . import GECKO
--
--DOCKER_INDEX = docker.INDEX_PREFIX + '.{}.{}.hash.{}'
--
--
--def load_image_by_name(image_name, tag=None):
--    context_path = os.path.join(GECKO, 'taskcluster', 'docker', image_name)
--    context_hash = docker.generate_context_hash(GECKO, context_path, image_name)
--
--    index_path = DOCKER_INDEX.format('level-3', image_name, context_hash)
--    task_id = find_task_id(index_path)
--
--    return load_image_by_task_id(task_id, tag)
--
--
--def load_image_by_task_id(task_id, tag=None):
--    artifact_url = get_artifact_url(task_id, 'public/image.tar.zst')
--    result = load_image(artifact_url, tag)
--    print("Found docker image: {}:{}".format(result['image'], result['tag']))
--    if tag:
--        print("Re-tagged as: {}".format(tag))
--    else:
--        tag = '{}:{}'.format(result['image'], result['tag'])
--    print("Try: docker run -ti --rm {} bash".format(tag))
--    return True
--
--
--def build_context(name, outputFile):
--    """Build a context.tar for image with specified name.
--    """
--    if not name:
--        raise ValueError('must provide a Docker image name')
--    if not outputFile:
--        raise ValueError('must provide a outputFile')
--
--    image_dir = os.path.join(docker.IMAGE_DIR, name)
--    if not os.path.isdir(image_dir):
--        raise Exception('image directory does not exist: %s' % image_dir)
--
--    docker.create_context_tar(GECKO, image_dir, outputFile, "")
--
--
--def build_image(name):
--    """Build a Docker image of specified name.
--
--    Output from image building process will be printed to stdout.
--    """
--    if not name:
--        raise ValueError('must provide a Docker image name')
--
--    image_dir = os.path.join(docker.IMAGE_DIR, name)
--    if not os.path.isdir(image_dir):
--        raise Exception('image directory does not exist: %s' % image_dir)
--
--    tag = docker.docker_image(name, by_tag=True)
--
--    docker_bin = which.which('docker')
--
--    # Verify that Docker is working.
--    try:
--        subprocess.check_output([docker_bin, '--version'])
--    except subprocess.CalledProcessError:
--        raise Exception('Docker server is unresponsive. Run `docker ps` and '
--                        'check that Docker is running')
--
--    # We obtain a context archive and build from that. Going through the
--    # archive creation is important: it normalizes things like file owners
--    # and mtimes to increase the chances that image generation is
--    # deterministic.
--    fd, context_path = tempfile.mkstemp()
--    os.close(fd)
--    try:
--        docker.create_context_tar(GECKO, image_dir, context_path, name)
--        docker.build_from_context(docker_bin, context_path, name, tag)
--    finally:
--        os.unlink(context_path)
--
--    print('Successfully built %s and tagged with %s' % (name, tag))
--
--    if tag.endswith(':latest'):
--        print('*' * 50)
--        print('WARNING: no VERSION file found in image directory.')
--        print('Image is not suitable for deploying/pushing.')
--        print('Create an image suitable for deploying/pushing by creating')
--        print('a VERSION file in the image directory.')
--        print('*' * 50)
--
--
--def load_image(url, imageName=None, imageTag=None):
--    """
--    Load docker image from URL as imageName:tag, if no imageName or tag is given
--    it will use whatever is inside the zstd compressed tarball.
--
--    Returns an object with properties 'image', 'tag' and 'layer'.
--    """
--    # If imageName is given and we don't have an imageTag
--    # we parse out the imageTag from imageName, or default it to 'latest'
--    # if no imageName and no imageTag is given, 'repositories' won't be rewritten
--    if imageName and not imageTag:
--        if ':' in imageName:
--            imageName, imageTag = imageName.split(':', 1)
--        else:
--            imageTag = 'latest'
--
--    curl, zstd, docker = None, None, None
--    image, tag, layer = None, None, None
--    error = None
--    try:
--        # Setup piping: curl | zstd | tarin
--        curl = Popen(['curl', '-#', '--fail', '-L', '--retry', '8', url], stdout=PIPE)
--        zstd = Popen(['zstd', '-d'], stdin=curl.stdout, stdout=PIPE)
--        tarin = tarfile.open(mode='r|', fileobj=zstd.stdout)
--        # Seutp piping: tarout | docker
--        docker = Popen(['docker', 'load'], stdin=PIPE)
--        tarout = tarfile.open(mode='w|', fileobj=docker.stdin, format=tarfile.GNU_FORMAT)
--
--        # Read from tarin and write to tarout
--        for member in tarin:
--            # Write non-file members directly (don't use extractfile on links)
--            if not member.isfile():
--                tarout.addfile(member)
--                continue
--
--            # Open reader for the member
--            reader = tarin.extractfile(member)
--
--            # If member is repository, we parse and possibly rewrite the image tags
--            if member.name == 'repositories':
--                # Read and parse repositories
--                repos = json.loads(reader.read())
--                reader.close()
--
--                # If there is more than one image or tag, we can't handle it here
--                if len(repos.keys()) > 1:
--                    raise Exception('file contains more than one image')
--                image = repos.keys()[0]
--                if len(repos[image].keys()) > 1:
--                    raise Exception('file contains more than one tag')
--                tag = repos[image].keys()[0]
--                layer = repos[image][tag]
--
--                # Rewrite the repositories file
--                data = json.dumps({imageName or image: {imageTag or tag: layer}})
--                reader = BytesIO(data)
--                member.size = len(data)
--
--            # Add member and reader
--            tarout.addfile(member, reader)
--            reader.close()
--        tarout.close()
--    except Exception:
--        error = sys.exc_info()[0]
--    finally:
--        def trykill(proc):
--            try:
--                proc.kill()
--            except:
--                pass
--
--        # Check that all subprocesses finished correctly
--        if curl and curl.wait() != 0:
--            trykill(zstd)
--            trykill(docker)
--            raise Exception('failed to download from url: {}'.format(url))
--        if zstd and zstd.wait() != 0:
--            trykill(docker)
--            raise Exception('zstd decompression failed')
--        if docker:
--            docker.stdin.close()
--        if docker and docker.wait() != 0:
--            raise Exception('loading into docker failed')
--        if error:
--            raise error
--
--    # Check that we found a repositories file
--    if not image or not tag or not layer:
--        raise Exception('No repositories file found!')
--
--    return {'image': image, 'tag': tag, 'layer': layer}
-diff --git a/taskcluster/taskgraph/files_changed.py b/taskcluster/taskgraph/files_changed.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/files_changed.py
-+++ /dev/null
-@@ -1,65 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--"""
--Support for optimizing tasks based on the set of files that have changed.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--import requests
--from redo import retry
--from mozpack.path import match as mozpackmatch
--
--logger = logging.getLogger(__name__)
--_cache = {}
--
--
--def get_changed_files(repository, revision):
--    """
--    Get the set of files changed in the push headed by the given revision.
--    Responses are cached, so multiple calls with the same arguments are OK.
--    """
--    key = repository, revision
--    if key not in _cache:
--        url = '%s/json-automationrelevance/%s' % (repository.rstrip('/'), revision)
--        logger.debug("Querying version control for metadata: %s", url)
--
--        def get_automationrelevance():
--            response = requests.get(url, timeout=5)
--            return response.json()
--        contents = retry(get_automationrelevance, attempts=2, sleeptime=10)
--
--        logger.debug('{} commits influencing task scheduling:'
--                     .format(len(contents['changesets'])))
--        changed_files = set()
--        for c in contents['changesets']:
--            logger.debug(" {cset} {desc}".format(
--                cset=c['node'][0:12],
--                desc=c['desc'].splitlines()[0].encode('ascii', 'ignore')))
--            changed_files |= set(c['files'])
--
--        _cache[key] = changed_files
--    return _cache[key]
--
--
--def check(params, file_patterns):
--    """Determine whether any of the files changed in the indicated push to
--    https://hg.mozilla.org match any of the given file patterns."""
--    repository = params.get('head_repository')
--    revision = params.get('head_rev')
--    if not repository or not revision:
--        logger.warning("Missing `head_repository` or `head_rev` parameters; "
--                       "assuming all files have changed")
--        return True
--
--    changed_files = get_changed_files(repository, revision)
--
--    for pattern in file_patterns:
--        for path in changed_files:
--            if mozpackmatch(path, pattern):
--                return True
--
--    return False
-diff --git a/taskcluster/taskgraph/filter_tasks.py b/taskcluster/taskgraph/filter_tasks.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/filter_tasks.py
-+++ /dev/null
-@@ -1,67 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, unicode_literals
--
--import logging
--
--from . import (
--    target_tasks,
--)
--
--logger = logging.getLogger(__name__)
--
--filter_task_functions = {}
--
--
--def filter_task(name):
--    """Generator to declare a task filter function."""
--    def wrap(func):
--        filter_task_functions[name] = func
--        return func
--    return wrap
--
--
--@filter_task('target_tasks_method')
--def filter_target_tasks(graph, parameters):
--    """Proxy filter to use legacy target tasks code.
--
--    This should go away once target_tasks are converted to filters.
--    """
--
--    attr = parameters.get('target_tasks_method', 'all_tasks')
--    fn = target_tasks.get_method(attr)
--    return fn(graph, parameters)
--
--
--@filter_task('check_servo')
--def filter_servo(graph, parameters):
--    """Filter out tasks for Servo vendoring changesets.
--
--    If the change triggering is related to Servo vendoring, impact is minimal
--    because not all platforms use Servo code.
--
--    We filter out tests on platforms that don't run Servo tests because running
--    tests will accomplish little for these changes.
--    """
--
--    SERVO_TEST_PLATFORMS = {
--        'linux64',
--        'linux64-stylo',
--    }
--
--    def fltr(task):
--        if parameters.get('owner') != "servo-vcs-sync@mozilla.com":
--            return True
--
--        # This is a Servo vendor change.
--
--        # Servo code is compiled. So we at least need to build. Resource
--        # savings come from pruning tests. So that's where we filter.
--        if task.attributes.get('kind') != 'test':
--            return True
--
--        return task.attributes.get('build_platform') in SERVO_TEST_PLATFORMS
--
--    return [l for l, t in graph.tasks.iteritems() if fltr(t)]
-diff --git a/taskcluster/taskgraph/generator.py b/taskcluster/taskgraph/generator.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/generator.py
-+++ /dev/null
-@@ -1,328 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--import logging
--import os
--import yaml
--import copy
--
--from . import filter_tasks
--from .graph import Graph
--from .taskgraph import TaskGraph
--from .task import Task
--from .optimize import optimize_task_graph
--from .morph import morph
--from .util.python_path import find_object
--from .transforms.base import TransformSequence, TransformConfig
--from .util.verify import (
--    verify_docs,
--    verify_task_graph_symbol,
--    verify_gecko_v2_routes,
--)
--
--logger = logging.getLogger(__name__)
--
--
--class Kind(object):
--
--    def __init__(self, name, path, config):
--        self.name = name
--        self.path = path
--        self.config = config
--
--    def _get_loader(self):
--        try:
--            loader = self.config['loader']
--        except KeyError:
--            raise KeyError("{!r} does not define `loader`".format(self.path))
--        return find_object(loader)
--
--    def load_tasks(self, parameters, loaded_tasks):
--        loader = self._get_loader()
--        config = copy.deepcopy(self.config)
--
--        if 'parse-commit' in self.config:
--            parse_commit = find_object(config['parse-commit'])
--            config['args'] = parse_commit(parameters['message'])
--        else:
--            config['args'] = None
--
--        kind_dependencies = config.get('kind-dependencies', [])
--        kind_dependencies_tasks = [task for task in loaded_tasks
--                                   if task.kind in kind_dependencies]
--
--        inputs = loader(self.name, self.path, config, parameters, loaded_tasks)
--
--        transforms = TransformSequence()
--        for xform_path in config['transforms']:
--            transform = find_object(xform_path)
--            transforms.add(transform)
--
--        # perform the transformations on the loaded inputs
--        trans_config = TransformConfig(self.name, self.path, config, parameters,
--                                       kind_dependencies_tasks)
--        tasks = [Task(self.name,
--                      label=task_dict['label'],
--                      attributes=task_dict['attributes'],
--                      task=task_dict['task'],
--                      optimizations=task_dict.get('optimizations'),
--                      dependencies=task_dict.get('dependencies'))
--                 for task_dict in transforms(trans_config, inputs)]
--        return tasks
--
--
--class TaskGraphGenerator(object):
--    """
--    The central controller for taskgraph.  This handles all phases of graph
--    generation.  The task is generated from all of the kinds defined in
--    subdirectories of the generator's root directory.
--
--    Access to the results of this generation, as well as intermediate values at
--    various phases of generation, is available via properties.  This encourages
--    the provision of all generation inputs at instance construction time.
--    """
--
--    # Task-graph generation is implemented as a Python generator that yields
--    # each "phase" of generation.  This allows some mach subcommands to short-
--    # circuit generation of the entire graph by never completing the generator.
--
--    def __init__(self, root_dir, parameters):
--        """
--        @param root_dir: root directory, with subdirectories for each kind
--        @param parameters: parameters for this task-graph generation
--        @type parameters: dict
--        """
--        self.root_dir = root_dir
--        self.parameters = parameters
--
--        self.verify_parameters(self.parameters)
--
--        filters = parameters.get('filters', [])
--
--        # Always add legacy target tasks method until we deprecate that API.
--        if 'target_tasks_method' not in filters:
--            filters.insert(0, 'target_tasks_method')
--
--        self.filters = [filter_tasks.filter_task_functions[f] for f in filters]
--
--        # this can be set up until the time the target task set is generated;
--        # it defaults to parameters['target_tasks']
--        self._target_tasks = parameters.get('target_tasks')
--
--        # start the generator
--        self._run = self._run()
--        self._run_results = {}
--
--    @property
--    def full_task_set(self):
--        """
--        The full task set: all tasks defined by any kind (a graph without edges)
--
--        @type: TaskGraph
--        """
--        return self._run_until('full_task_set')
--
--    @property
--    def full_task_graph(self):
--        """
--        The full task graph: the full task set, with edges representing
--        dependencies.
--
--        @type: TaskGraph
--        """
--        return self._run_until('full_task_graph')
--
--    @property
--    def target_task_set(self):
--        """
--        The set of targetted tasks (a graph without edges)
--
--        @type: TaskGraph
--        """
--        return self._run_until('target_task_set')
--
--    @property
--    def target_task_graph(self):
--        """
--        The set of targetted tasks and all of their dependencies
--
--        @type: TaskGraph
--        """
--        return self._run_until('target_task_graph')
--
--    @property
--    def optimized_task_graph(self):
--        """
--        The set of targetted tasks and all of their dependencies; tasks that
--        have been optimized out are either omitted or replaced with a Task
--        instance containing only a task_id.
--
--        @type: TaskGraph
--        """
--        return self._run_until('optimized_task_graph')
--
--    @property
--    def label_to_taskid(self):
--        """
--        A dictionary mapping task label to assigned taskId.  This property helps
--        in interpreting `optimized_task_graph`.
--
--        @type: dictionary
--        """
--        return self._run_until('label_to_taskid')
--
--    @property
--    def morphed_task_graph(self):
--        """
--        The optimized task graph, with any subsequent morphs applied. This graph
--        will have the same meaning as the optimized task graph, but be in a form
--        more palatable to TaskCluster.
--
--        @type: TaskGraph
--        """
--        return self._run_until('morphed_task_graph')
--
--    def _load_kinds(self):
--        for path in os.listdir(self.root_dir):
--            path = os.path.join(self.root_dir, path)
--            if not os.path.isdir(path):
--                continue
--            kind_name = os.path.basename(path)
--
--            kind_yml = os.path.join(path, 'kind.yml')
--            if not os.path.exists(kind_yml):
--                continue
--
--            logger.debug("loading kind `{}` from `{}`".format(kind_name, path))
--            with open(kind_yml) as f:
--                config = yaml.load(f)
--
--            yield Kind(kind_name, path, config)
--
--    def _run(self):
--        logger.info("Loading kinds")
--        # put the kinds into a graph and sort topologically so that kinds are loaded
--        # in post-order
--        kinds = {kind.name: kind for kind in self._load_kinds()}
--        self.verify_kinds(kinds)
--
--        edges = set()
--        for kind in kinds.itervalues():
--            for dep in kind.config.get('kind-dependencies', []):
--                edges.add((kind.name, dep, 'kind-dependency'))
--        kind_graph = Graph(set(kinds), edges)
--
--        logger.info("Generating full task set")
--        all_tasks = {}
--        for kind_name in kind_graph.visit_postorder():
--            logger.debug("Loading tasks for kind {}".format(kind_name))
--            kind = kinds[kind_name]
--            new_tasks = kind.load_tasks(self.parameters, list(all_tasks.values()))
--            for task in new_tasks:
--                if task.label in all_tasks:
--                    raise Exception("duplicate tasks with label " + task.label)
--                all_tasks[task.label] = task
--            logger.info("Generated {} tasks for kind {}".format(len(new_tasks), kind_name))
--        full_task_set = TaskGraph(all_tasks, Graph(set(all_tasks), set()))
--        self.verify_attributes(all_tasks)
--        self.verify_run_using()
--        yield 'full_task_set', full_task_set
--
--        logger.info("Generating full task graph")
--        edges = set()
--        for t in full_task_set:
--            for depname, dep in t.dependencies.iteritems():
--                edges.add((t.label, dep, depname))
--
--        full_task_graph = TaskGraph(all_tasks,
--                                    Graph(full_task_set.graph.nodes, edges))
--        full_task_graph.for_each_task(verify_task_graph_symbol, scratch_pad={})
--        full_task_graph.for_each_task(verify_gecko_v2_routes, scratch_pad={})
--        logger.info("Full task graph contains %d tasks and %d dependencies" % (
--            len(full_task_set.graph.nodes), len(edges)))
--        yield 'full_task_graph', full_task_graph
--
--        logger.info("Generating target task set")
--        target_task_set = TaskGraph(dict(all_tasks),
--                                    Graph(set(all_tasks.keys()), set()))
--        for fltr in self.filters:
--            old_len = len(target_task_set.graph.nodes)
--            target_tasks = set(fltr(target_task_set, self.parameters))
--            target_task_set = TaskGraph(
--                {l: all_tasks[l] for l in target_tasks},
--                Graph(target_tasks, set()))
--            logger.info('Filter %s pruned %d tasks (%d remain)' % (
--                fltr.__name__,
--                old_len - len(target_tasks),
--                len(target_tasks)))
--
--        yield 'target_task_set', target_task_set
--
--        logger.info("Generating target task graph")
--        # include all docker-image build tasks here, in case they are needed for a graph morph
--        docker_image_tasks = set(t.label for t in full_task_graph.tasks.itervalues()
--                                 if t.attributes['kind'] == 'docker-image')
--        target_graph = full_task_graph.graph.transitive_closure(target_tasks | docker_image_tasks)
--        target_task_graph = TaskGraph(
--            {l: all_tasks[l] for l in target_graph.nodes},
--            target_graph)
--        yield 'target_task_graph', target_task_graph
--
--        logger.info("Generating optimized task graph")
--        do_not_optimize = set()
--        if not self.parameters.get('optimize_target_tasks', True):
--            do_not_optimize = target_task_set.graph.nodes
--        optimized_task_graph, label_to_taskid = optimize_task_graph(target_task_graph,
--                                                                    self.parameters,
--                                                                    do_not_optimize)
--
--        yield 'optimized_task_graph', optimized_task_graph
--
--        morphed_task_graph, label_to_taskid = morph(optimized_task_graph, label_to_taskid)
--
--        yield 'label_to_taskid', label_to_taskid
--        yield 'morphed_task_graph', morphed_task_graph
--
--    def _run_until(self, name):
--        while name not in self._run_results:
--            try:
--                k, v = self._run.next()
--            except StopIteration:
--                raise AttributeError("No such run result {}".format(name))
--            self._run_results[k] = v
--        return self._run_results[name]
--
--    def verify_parameters(self, parameters):
--        parameters_dict = dict(**parameters)
--        verify_docs(
--            filename="parameters.rst",
--            identifiers=parameters_dict.keys(),
--            appearing_as="inline-literal"
--         )
--
--    def verify_kinds(self, kinds):
--        verify_docs(
--            filename="kinds.rst",
--            identifiers=kinds.keys(),
--            appearing_as="heading"
--         )
--
--    def verify_attributes(self, all_tasks):
--        attribute_set = set()
--        for label, task in all_tasks.iteritems():
--            attribute_set.update(task.attributes.keys())
--        verify_docs(
--            filename="attributes.rst",
--            identifiers=list(attribute_set),
--            appearing_as="heading"
--         )
--
--    def verify_run_using(self):
--        from .transforms.job import registry
--        verify_docs(
--            filename="transforms.rst",
--            identifiers=registry.keys(),
--            appearing_as="inline-literal"
--         )
-diff --git a/taskcluster/taskgraph/graph.py b/taskcluster/taskgraph/graph.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/graph.py
-+++ /dev/null
-@@ -1,117 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import collections
--
--
--class Graph(object):
--    """
--    Generic representation of a directed acyclic graph with labeled edges
--    connecting the nodes.  Graph operations are implemented in a functional
--    manner, so the data structure is immutable.
--
--    It permits at most one edge of a given name between any set of nodes.  The
--    graph is not checked for cycles, and methods may hang or otherwise fail if
--    given a cyclic graph.
--
--    The `nodes` and `edges` attributes may be accessed in a read-only fashion.
--    The `nodes` attribute is a set of node names, while `edges` is a set of
--    `(left, right, name)` tuples representing an edge named `name` going from
--    node `left` to node `right..
--    """
--
--    def __init__(self, nodes, edges):
--        """
--        Create a graph.  Nodes and edges are both as described in the class
--        documentation.  Both values are used by reference, and should not be
--        modified after building a graph.
--        """
--        assert isinstance(nodes, set)
--        assert isinstance(edges, set)
--        self.nodes = nodes
--        self.edges = edges
--
--    def __eq__(self, other):
--        return self.nodes == other.nodes and self.edges == other.edges
--
--    def __repr__(self):
--        return "<Graph nodes={!r} edges={!r}>".format(self.nodes, self.edges)
--
--    def transitive_closure(self, nodes):
--        """
--        Return the transitive closure of <nodes>: the graph containing all
--        specified nodes as well as any nodes reachable from them, and any
--        intervening edges.
--        """
--        assert isinstance(nodes, set)
--        assert nodes <= self.nodes
--
--        # generate a new graph by expanding along edges until reaching a fixed
--        # point
--        new_nodes, new_edges = nodes, set()
--        nodes, edges = set(), set()
--        while (new_nodes, new_edges) != (nodes, edges):
--            nodes, edges = new_nodes, new_edges
--            add_edges = set((left, right, name)
--                            for (left, right, name) in self.edges
--                            if left in nodes)
--            add_nodes = set(right for (_, right, _) in add_edges)
--            new_nodes = nodes | add_nodes
--            new_edges = edges | add_edges
--        return Graph(new_nodes, new_edges)
--
--    def visit_postorder(self):
--        """
--        Generate a sequence of nodes in postorder, such that every node is
--        visited *after* any nodes it links to.
--
--        Behavior is undefined (read: it will hang) if the graph contains a
--        cycle.
--        """
--        queue = collections.deque(sorted(self.nodes))
--        links_by_node = self.links_dict()
--        seen = set()
--        while queue:
--            node = queue.popleft()
--            if node in seen:
--                continue
--            links = links_by_node[node]
--            if all((n in seen) for n in links):
--                seen.add(node)
--                yield node
--            else:
--                queue.extend(n for n in links if n not in seen)
--                queue.append(node)
--
--    def links_dict(self):
--        """
--        Return a dictionary mapping each node to a set of the nodes it links to
--        (omitting edge names)
--        """
--        links = collections.defaultdict(set)
--        for left, right, _ in self.edges:
--            links[left].add(right)
--        return links
--
--    def named_links_dict(self):
--        """
--        Return a two-level dictionary mapping each node to a dictionary mapping
--        edge names to labels.
--        """
--        links = collections.defaultdict(dict)
--        for left, right, name in self.edges:
--            links[left][name] = right
--        return links
--
--    def reverse_links_dict(self):
--        """
--        Return a dictionary mapping each node to a set of the nodes linking to
--        it (omitting edge names)
--        """
--        links = collections.defaultdict(set)
--        for left, right, _ in self.edges:
--            links[right].add(left)
--        return links
-diff --git a/taskcluster/taskgraph/loader/__init__.py b/taskcluster/taskgraph/loader/__init__.py
-deleted file mode 100644
-diff --git a/taskcluster/taskgraph/loader/build_signing.py b/taskcluster/taskgraph/loader/build_signing.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/loader/build_signing.py
-+++ /dev/null
-@@ -1,25 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.loader.single_dep import loader as base_loader
--
--# XXX: This logic should rely in kind.yml. This hasn't been done in the original
--# patch because it required some heavy changes in single_dep.
--LABELS_WHICH_SHOULD_SIGN_CI_BUILDS = (
--    'build-win32/debug', 'build-win32/opt', 'build-win32/pgo',
--    'build-win64/debug', 'build-win64/opt', 'build-win64/pgo',
--    'build-win32-devedition/opt', 'build-win64-devedition/opt',
--)
--
--
--def loader(kind, path, config, params, loaded_tasks):
--    jobs = base_loader(kind, path, config, params, loaded_tasks)
--
--    for job in jobs:
--        dependent_task = job['dependent-task']
--        if dependent_task.attributes.get('nightly') or \
--                dependent_task.label in LABELS_WHICH_SHOULD_SIGN_CI_BUILDS:
--            yield job
-diff --git a/taskcluster/taskgraph/loader/push_apk.py b/taskcluster/taskgraph/loader/push_apk.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/loader/push_apk.py
-+++ /dev/null
-@@ -1,48 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from .transform import loader as base_loader
--
--
--def loader(kind, path, config, params, loaded_tasks):
--    """
--    Generate inputs implementing PushApk jobs. These depend on signed multi-locales nightly builds.
--    """
--    jobs = base_loader(kind, path, config, params, loaded_tasks)
--
--    for job in jobs:
--        dependent_tasks = get_dependent_loaded_tasks(config, loaded_tasks)
--        if not dependent_tasks:
--            # PushApk must depend on signed APK. If no dependent task was found,
--            # this means another plaform (like windows) is being processed
--            continue
--
--        job['dependent-tasks'] = dependent_tasks
--        yield job
--
--
--def get_dependent_loaded_tasks(config, loaded_tasks):
--    nightly_tasks = (
--        task for task in loaded_tasks if task.attributes.get('nightly')
--    )
--    tasks_with_matching_kind = (
--        task for task in nightly_tasks if task.kind in config.get('kind-dependencies')
--    )
--    android_tasks = [
--        task for task in tasks_with_matching_kind
--        # old-id builds are not shipped through the Play store, so we don't
--        # want them as dependencies.
--        if task.attributes.get('build_platform', '').startswith('android') \
--        and 'old-id' not in task.attributes.get('build_platform', '')
--    ]
--
--    # TODO Bug 1368484: Activate aarch64 once ready
--    non_aarch64_tasks = [
--        task for task in android_tasks
--        if 'aarch64' not in task.attributes.get('build_platform', '')
--    ]
--
--    return non_aarch64_tasks
-diff --git a/taskcluster/taskgraph/loader/single_dep.py b/taskcluster/taskgraph/loader/single_dep.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/loader/single_dep.py
-+++ /dev/null
-@@ -1,57 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import copy
--
--
--def loader(kind, path, config, params, loaded_tasks):
--    """
--    Load tasks based on the jobs dependant kinds.
--
--    The `only-for-build-platforms` kind configuration, if specified, will limit
--    the build platforms for which a job will be created. Alternatively there is
--    'not-for-build-platforms' kind configuration which will be consulted only after
--    'only-for-build-platforms' is checked (if present), and omit any jobs where the
--    build platform matches.
--
--    Optional `only-for-attributes` kind configuration, if specified, will limit
--    the jobs chosen to ones which have the specified attribute, with the specified
--    value.
--
--    Optional `job-template` kind configuration value, if specified, will be used to
--    pass configuration down to the specified transforms used.
--    """
--    only_platforms = config.get('only-for-build-platforms')
--    not_platforms = config.get('not-for-build-platforms')
--    only_attributes = config.get('only-for-attributes')
--    job_template = config.get('job-template')
--
--    for task in loaded_tasks:
--        if task.kind not in config.get('kind-dependencies', []):
--            continue
--
--        if only_platforms or not_platforms:
--            build_platform = task.attributes.get('build_platform')
--            build_type = task.attributes.get('build_type')
--            if not build_platform or not build_type:
--                continue
--            platform = "{}/{}".format(build_platform, build_type)
--            if only_platforms and platform not in only_platforms:
--                continue
--            elif not_platforms and platform in not_platforms:
--                continue
--
--        if only_attributes:
--            config_attrs = set(only_attributes)
--            if config_attrs - set(task.attributes):
--                # make sure all attributes exist
--                continue
--
--        job = {'dependent-task': task}
--        if job_template:
--            job.update(copy.deepcopy(job_template))
--
--        yield job
-diff --git a/taskcluster/taskgraph/loader/test.py b/taskcluster/taskgraph/loader/test.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/loader/test.py
-+++ /dev/null
-@@ -1,132 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import copy
--import logging
--
--from ..util.yaml import load_yaml
--
--logger = logging.getLogger(__name__)
--
--
--def loader(kind, path, config, params, loaded_tasks):
--    """
--    Generate tasks implementing Gecko tests.
--    """
--
--    # the kind on which this one depends
--    if len(config.get('kind-dependencies', [])) != 2:
--        raise Exception(
--            'Test kinds must have exactly 2 items in kind-dependencies'
--        )
--
--    builds_by_platform = get_builds_by_platform(dep_kind='build', loaded_tasks=loaded_tasks)
--    signed_builds_by_platform = get_builds_by_platform(
--        dep_kind='build-signing', loaded_tasks=loaded_tasks
--    )
--
--    # get the test platforms for those build tasks
--    test_platforms_cfg = load_yaml(path, 'test-platforms.yml')
--    test_platforms = get_test_platforms(
--        test_platforms_cfg, builds_by_platform, signed_builds_by_platform
--    )
--
--    # expand the test sets for each of those platforms
--    test_sets_cfg = load_yaml(path, 'test-sets.yml')
--    test_platforms = expand_tests(test_sets_cfg, test_platforms)
--
--    # load the test descriptions
--    test_descriptions = load_yaml(path, 'tests.yml', enforce_order=True)
--
--    # generate all tests for all test platforms
--    for test_platform_name, test_platform in test_platforms.iteritems():
--        for test_name in test_platform['test-names']:
--            test = copy.deepcopy(test_descriptions[test_name])
--            test['build-platform'] = test_platform['build-platform']
--            test['test-platform'] = test_platform_name
--            test['build-label'] = test_platform['build-label']
--            if test_platform.get('build-signing-label', None):
--                test['build-signing-label'] = test_platform['build-signing-label']
--
--            test['build-attributes'] = test_platform['build-attributes']
--            test['test-name'] = test_name
--            if test_platform.get('nightly'):
--                test.setdefault('attributes', {})['nightly'] = True
--
--            logger.debug("Generating tasks for test {} on platform {}".format(
--                test_name, test['test-platform']))
--            yield test
--
--
--def get_builds_by_platform(dep_kind, loaded_tasks):
--    """Find the build tasks on which tests will depend, keyed by
--    platform/type.  Returns a dictionary mapping build platform to task."""
--    builds_by_platform = {}
--    for task in loaded_tasks:
--        if task.kind != dep_kind:
--            continue
--
--        build_platform = task.attributes.get('build_platform')
--        build_type = task.attributes.get('build_type')
--        if not build_platform or not build_type:
--            continue
--        platform = "{}/{}".format(build_platform, build_type)
--        if platform in builds_by_platform:
--            raise Exception("multiple build jobs for " + platform)
--        builds_by_platform[platform] = task
--    return builds_by_platform
--
--
--def get_test_platforms(test_platforms_cfg, builds_by_platform, signed_builds_by_platform={}):
--    """Get the test platforms for which test tasks should be generated,
--    based on the available build platforms.  Returns a dictionary mapping
--    test platform to {test-set, build-platform, build-label}."""
--    test_platforms = {}
--    for test_platform, cfg in test_platforms_cfg.iteritems():
--        build_platform = cfg['build-platform']
--        if build_platform not in builds_by_platform:
--            logger.warning(
--                "No build task with platform {}; ignoring test platform {}".format(
--                    build_platform, test_platform))
--            continue
--        test_platforms[test_platform] = {
--            'build-platform': build_platform,
--            'build-label': builds_by_platform[build_platform].label,
--            'build-attributes': builds_by_platform[build_platform].attributes,
--        }
--
--        if builds_by_platform[build_platform].attributes.get('nightly'):
--            test_platforms[test_platform]['nightly'] = \
--                builds_by_platform[build_platform].attributes['nightly']
--
--        test_platforms[test_platform].update(cfg)
--
--        if build_platform in signed_builds_by_platform:
--            # Context: Signed builds are only used by Windows
--            test_platforms[test_platform]['build-signing-label'] = \
--                signed_builds_by_platform[build_platform].label
--
--    return test_platforms
--
--
--def expand_tests(test_sets_cfg, test_platforms):
--    """Expand the test sets in `test_platforms` out to sets of test names.
--    Returns a dictionary like `get_test_platforms`, with an additional
--    `test-names` key for each test platform, containing a set of test
--    names."""
--    rv = {}
--    for test_platform, cfg in test_platforms.iteritems():
--        test_sets = cfg['test-sets']
--        if not set(test_sets) < set(test_sets_cfg):
--            raise Exception(
--                "Test sets {} for test platform {} are not defined".format(
--                    ', '.join(test_sets), test_platform))
--        test_names = set()
--        for test_set in test_sets:
--            test_names.update(test_sets_cfg[test_set])
--        rv[test_platform] = cfg.copy()
--        rv[test_platform]['test-names'] = test_names
--    return rv
-diff --git a/taskcluster/taskgraph/loader/transform.py b/taskcluster/taskgraph/loader/transform.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/loader/transform.py
-+++ /dev/null
-@@ -1,48 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--import itertools
--
--from ..util.templates import merge
--from ..util.yaml import load_yaml
--
--logger = logging.getLogger(__name__)
--
--
--def loader(kind, path, config, params, loaded_tasks):
--    """
--    Get the input elements that will be transformed into tasks in a generic
--    way.  The elements themselves are free-form, and become the input to the
--    first transform.
--
--    By default, this reads jobs from the `jobs` key, or from yaml files
--    named by `jobs-from`.  The entities are read from mappings, and the
--    keys to those mappings are added in the `name` key of each entity.
--
--    If there is a `job-defaults` config, then every job is merged with it.
--    This provides a simple way to set default values for all jobs of a
--    kind.  More complex defaults should be implemented with custom
--    transforms.
--
--    Other kind implementations can use a different loader function to
--    produce inputs and hand them to `transform_inputs`.
--    """
--    def jobs():
--        defaults = config.get('job-defaults')
--        jobs = config.get('jobs', {}).iteritems()
--        jobs_from = itertools.chain.from_iterable(
--            load_yaml(path, filename).iteritems()
--            for filename in config.get('jobs-from', {}))
--        for name, job in itertools.chain(jobs, jobs_from):
--            if defaults:
--                job = merge(defaults, job)
--            yield name, job
--
--    for name, job in jobs():
--        job['name'] = name
--        logger.debug("Generating tasks for {} {}".format(kind, name))
--        yield job
-diff --git a/taskcluster/taskgraph/morph.py b/taskcluster/taskgraph/morph.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/morph.py
-+++ /dev/null
-@@ -1,252 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--"""
--Graph morphs are modifications to task-graphs that take place *after* the
--optimization phase.
--
--These graph morphs are largely invisible to developers running `./mach`
--locally, so they should be limited to changes that do not modify the meaning of
--the graph.
--"""
--
--# Note that the translation of `{'task-reference': '..'}` is handled in the
--# optimization phase (since optimization involves dealing with taskIds
--# directly).  Similarly, `{'relative-datestamp': '..'}` is handled at the last
--# possible moment during task creation.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--import re
--
--from slugid import nice as slugid
--from .task import Task
--from .graph import Graph
--from .taskgraph import TaskGraph
--
--logger = logging.getLogger(__name__)
--MAX_ROUTES = 10
--
--
--def amend_taskgraph(taskgraph, label_to_taskid, to_add):
--    """Add the given tasks to the taskgraph, returning a new taskgraph"""
--    new_tasks = taskgraph.tasks.copy()
--    new_edges = taskgraph.graph.edges.copy()
--    for task in to_add:
--        new_tasks[task.task_id] = task
--        assert task.label not in label_to_taskid
--        label_to_taskid[task.label] = task.task_id
--        for depname, dep in task.dependencies.iteritems():
--            new_edges.add((task.task_id, dep, depname))
--
--    taskgraph = TaskGraph(new_tasks, Graph(set(new_tasks), new_edges))
--    return taskgraph, label_to_taskid
--
--
--def derive_misc_task(task, purpose, image, taskgraph, label_to_taskid):
--    """Create the shell of a task that depends on `task` and on the given docker
--    image."""
--    label = '{}-{}'.format(purpose, task.label)
--
--    # this is why all docker image tasks are included in the target task graph: we
--    # need to find them in label_to_taskid, if if nothing else required them
--    image_taskid = label_to_taskid['build-docker-image-' + image]
--
--    task_def = {
--        'provisionerId': 'aws-provisioner-v1',
--        'workerType': 'gecko-misc',
--        'dependencies': [task.task_id, image_taskid],
--        'created': {'relative-datestamp': '0 seconds'},
--        'deadline': task.task['deadline'],
--        # no point existing past the parent task's deadline
--        'expires': task.task['deadline'],
--        'metadata': {
--            'name': label,
--            'description': '{} for {}'.format(purpose, task.task['metadata']['description']),
--            'owner': task.task['metadata']['owner'],
--            'source': task.task['metadata']['source'],
--        },
--        'scopes': [],
--        'payload': {
--            'image': {
--                'path': 'public/image.tar.zst',
--                'taskId': image_taskid,
--                'type': 'task-image',
--            },
--            'features': {
--                'taskclusterProxy': True,
--            },
--            'maxRunTime': 600,
--        }
--    }
--
--    # only include the docker-image dependency here if it is actually in the
--    # taskgraph (has not been optimized).  It is included in
--    # task_def['dependencies'] unconditionally.
--    dependencies = {'parent': task.task_id}
--    if image_taskid in taskgraph.tasks:
--        dependencies['docker-image'] = image_taskid
--
--    task = Task(kind='misc', label=label, attributes={}, task=task_def,
--                dependencies=dependencies)
--    task.task_id = slugid()
--    return task
--
--
--# these regular expressions capture route prefixes for which we have a star
--# scope, allowing them to be summarized.  Each should correspond to a star scope
--# in each Gecko `assume:repo:hg.mozilla.org/...` role.
--SCOPE_SUMMARY_REGEXPS = [
--    re.compile(r'(index:insert-task:buildbot\.branches\.[^.]*\.).*'),
--    re.compile(r'(index:insert-task:buildbot\.revisions\.).*'),
--    re.compile(r'(index:insert-task:docker\.images\.v1\.[^.]*\.).*'),
--    re.compile(r'(index:insert-task:gecko\.v2\.[^.]*\.).*'),
--]
--
--
--def make_index_task(parent_task, taskgraph, label_to_taskid):
--    index_paths = [r.split('.', 1)[1] for r in parent_task.task['routes']
--                   if r.startswith('index.')]
--    parent_task.task['routes'] = [r for r in parent_task.task['routes']
--                                  if not r.startswith('index.')]
--
--    task = derive_misc_task(parent_task, 'index-task', 'index-task',
--                            taskgraph, label_to_taskid)
--
--    # we need to "summarize" the scopes, otherwise a particularly
--    # namespace-heavy index task might have more scopes than can fit in a
--    # temporary credential.
--    scopes = set()
--    for path in index_paths:
--        scope = 'index:insert-task:{}'.format(path)
--        for summ_re in SCOPE_SUMMARY_REGEXPS:
--            match = summ_re.match(scope)
--            if match:
--                scope = match.group(1) + '*'
--                break
--        scopes.add(scope)
--    task.task['scopes'] = sorted(scopes)
--
--    task.task['payload']['command'] = ['insert-indexes.js'] + index_paths
--    task.task['payload']['env'] = {"TARGET_TASKID": parent_task.task_id}
--    return task
--
--
--def add_index_tasks(taskgraph, label_to_taskid):
--    """
--    The TaskCluster queue only allows 10 routes on a task, but we have tasks
--    with many more routes, for purposes of indexing. This graph morph adds
--    "index tasks" that depend on such tasks and do the index insertions
--    directly, avoiding the limits on task.routes.
--    """
--    logger.debug('Morphing: adding index tasks')
--
--    added = []
--    for label, task in taskgraph.tasks.iteritems():
--        if len(task.task.get('routes', [])) <= MAX_ROUTES:
--            continue
--        added.append(make_index_task(task, taskgraph, label_to_taskid))
--
--    if added:
--        taskgraph, label_to_taskid = amend_taskgraph(
--            taskgraph, label_to_taskid, added)
--        logger.info('Added {} index tasks'.format(len(added)))
--
--    return taskgraph, label_to_taskid
--
--
--def make_s3_uploader_task(parent_task):
--    if parent_task.task['payload']['sourcestamp']['branch'] == 'try':
--        worker_type = 'buildbot-try'
--    else:
--        worker_type = 'buildbot'
--
--    task_def = {
--        # The null-provisioner and buildbot worker type don't actually exist.
--        # So this task doesn't actually run - we just need to create the task so
--        # we have something to attach artifacts to.
--        "provisionerId": "null-provisioner",
--        "workerType": worker_type,
--        "created": {'relative-datestamp': '0 seconds'},
--        "deadline": parent_task.task['deadline'],
--        "routes": parent_task.task['routes'],
--        "payload": {},
--        "extra": {
--            "index": {
--                "rank": 1493912914,
--            }
--        },
--        "metadata": {
--            "name": "Buildbot/mozharness S3 uploader",
--            "description": "Upload outputs of buildbot/mozharness builds to S3",
--            "owner": "mshal@mozilla.com",
--            "source": "http://hg.mozilla.org/build/mozharness/",
--        }
--    }
--    parent_task.task['routes'] = []
--    label = 's3-uploader-{}'.format(parent_task.label)
--    dependencies = {}
--    task = Task(kind='misc', label=label, attributes={}, task=task_def,
--                dependencies=dependencies)
--    task.task_id = parent_task.task['payload']['properties']['upload_to_task_id']
--    return task
--
--
--def update_test_tasks(taskid, build_taskid, taskgraph):
--    """Tests task must download artifacts from uploader task."""
--    # Notice we handle buildbot-bridge, native, and generic-worker payloads
--    # We can do better here in terms of graph searching
--    # We could do post order search and stop as soon as we
--    # reach the build task. Not worring about it because this is
--    # (supposed to be) a temporary solution.
--    for task in taskgraph.tasks.itervalues():
--        if build_taskid in task.task.get('dependencies', []):
--            payload = task.task['payload']
--            task.task['dependencies'].append(taskid)
--            taskgraph.graph.edges.add((task.task_id, taskid, 'uploader'))
--            if 'command' in payload:
--                try:
--                    payload['command'] = [
--                        cmd.replace(build_taskid, taskid) for cmd in payload['command']
--                    ]
--                except AttributeError:
--                    # generic-worker command attribute is an list of lists
--                    payload['command'] = [
--                        [cmd.replace(build_taskid, taskid) for cmd in x]
--                        for x in payload['command']
--                    ]
--            if 'mounts' in payload:
--                for mount in payload['mounts']:
--                    if mount.get('content', {}).get('taskId', '') == build_taskid:
--                        mount['content']['taskId'] = taskid
--            if 'env' in payload:
--                payload['env'] = {
--                    k: v.replace(build_taskid, taskid) for k, v in payload['env'].iteritems()
--                }
--            if 'properties' in payload:
--                payload['properties']['parent_task_id'] = taskid
--
--
--def add_s3_uploader_task(taskgraph, label_to_taskid):
--    """The S3 uploader task is used by mozharness to upload buildbot artifacts."""
--    for task in taskgraph.tasks.itervalues():
--        if 'upload_to_task_id' in task.task.get('payload', {}).get('properties', {}):
--            added = make_s3_uploader_task(task)
--            taskgraph, label_to_taskid = amend_taskgraph(
--                taskgraph, label_to_taskid, [added])
--            update_test_tasks(added.task_id, task.task_id, taskgraph)
--            logger.info('Added s3-uploader task')
--    return taskgraph, label_to_taskid
--
--
--def morph(taskgraph, label_to_taskid):
--    """Apply all morphs"""
--    morphs = [
--        add_index_tasks,
--        add_s3_uploader_task,
--    ]
--    for m in morphs:
--        taskgraph, label_to_taskid = m(taskgraph, label_to_taskid)
--    return taskgraph, label_to_taskid
-diff --git a/taskcluster/taskgraph/optimize.py b/taskcluster/taskgraph/optimize.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/optimize.py
-+++ /dev/null
-@@ -1,219 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import logging
--import os
--import requests
--
--from .graph import Graph
--from . import files_changed
--from .taskgraph import TaskGraph
--from .util.seta import is_low_value_task
--from .util.taskcluster import find_task_id
--from .util.parameterization import resolve_task_references
--from slugid import nice as slugid
--
--logger = logging.getLogger(__name__)
--
--_optimizations = {}
--
--
--def optimize_task_graph(target_task_graph, params, do_not_optimize, existing_tasks=None):
--    """
--    Perform task optimization, without optimizing tasks named in
--    do_not_optimize.
--    """
--    named_links_dict = target_task_graph.graph.named_links_dict()
--    label_to_taskid = {}
--
--    # This proceeds in two phases.  First, mark all optimized tasks (those
--    # which will be removed from the graph) as such, including a replacement
--    # taskId where applicable.  Second, generate a new task graph containing
--    # only the non-optimized tasks, with all task labels resolved to taskIds
--    # and with task['dependencies'] populated.
--    annotate_task_graph(target_task_graph=target_task_graph,
--                        params=params,
--                        do_not_optimize=do_not_optimize,
--                        named_links_dict=named_links_dict,
--                        label_to_taskid=label_to_taskid,
--                        existing_tasks=existing_tasks)
--    return get_subgraph(target_task_graph, named_links_dict, label_to_taskid), label_to_taskid
--
--
--def optimize_task(task, params):
--    """
--    Optimize a single task by running its optimizations in order until one
--    succeeds.
--    """
--    for opt in task.optimizations:
--        opt_type, args = opt[0], opt[1:]
--        opt_fn = _optimizations[opt_type]
--        opt_result = opt_fn(task, params, *args)
--        if opt_result:
--            return opt_result
--
--    return False
--
--
--def annotate_task_graph(target_task_graph, params, do_not_optimize,
--                        named_links_dict, label_to_taskid, existing_tasks):
--    """
--    Annotate each task in the graph with .optimized (boolean) and .task_id
--    (possibly None), following the rules for optimization and calling the task
--    kinds' `optimize_task` method.
--
--    As a side effect, label_to_taskid is updated with labels for all optimized
--    tasks that are replaced with existing tasks.
--    """
--
--    # set .optimized for all tasks, and .task_id for optimized tasks
--    # with replacements
--    for label in target_task_graph.graph.visit_postorder():
--        task = target_task_graph.tasks[label]
--        named_task_dependencies = named_links_dict.get(label, {})
--
--        # check whether any dependencies have been optimized away
--        dependencies = [target_task_graph.tasks[l] for l in named_task_dependencies.itervalues()]
--        for t in dependencies:
--            if t.optimized and not t.task_id:
--                raise Exception(
--                    "task {} was optimized away, but {} depends on it".format(
--                        t.label, label))
--
--        # if this task is blacklisted, don't even consider optimizing
--        replacement_task_id = None
--        if label in do_not_optimize:
--            optimized = False
--        # Let's check whether this task has been created before
--        elif existing_tasks is not None and label in existing_tasks:
--            optimized = True
--            replacement_task_id = existing_tasks[label]
--        # otherwise, examine the task itself (which may be an expensive operation)
--        else:
--            opt_result = optimize_task(task, params)
--
--            # use opt_result to determine values for optimized, replacement_task_id
--            optimized = bool(opt_result)
--            replacement_task_id = opt_result if opt_result and opt_result is not True else None
--
--        task.optimized = optimized
--        task.task_id = replacement_task_id
--        if replacement_task_id:
--            label_to_taskid[label] = replacement_task_id
--
--        if optimized:
--            if replacement_task_id:
--                logger.debug("optimizing `{}`, replacing with task `{}`"
--                             .format(label, replacement_task_id))
--            else:
--                logger.debug("optimizing `{}` away".format(label))
--                # note: any dependent tasks will fail when they see this
--        else:
--            if replacement_task_id:
--                raise Exception("{}: optimize_task returned False with a taskId".format(label))
--
--
--def get_subgraph(annotated_task_graph, named_links_dict, label_to_taskid):
--    """
--    Return the subgraph of annotated_task_graph consisting only of
--    non-optimized tasks and edges between them.
--
--    To avoid losing track of taskIds for tasks optimized away, this method
--    simultaneously substitutes real taskIds for task labels in the graph, and
--    populates each task definition's `dependencies` key with the appropriate
--    taskIds.  Task references are resolved in the process.
--    """
--
--    # resolve labels to taskIds and populate task['dependencies']
--    tasks_by_taskid = {}
--    for label in annotated_task_graph.graph.visit_postorder():
--        task = annotated_task_graph.tasks[label]
--        if task.optimized:
--            continue
--        task.task_id = label_to_taskid[label] = slugid()
--        named_task_dependencies = {
--                name: label_to_taskid[label]
--                for name, label in named_links_dict.get(label, {}).iteritems()}
--        task.task = resolve_task_references(task.label, task.task, named_task_dependencies)
--        task.task.setdefault('dependencies', []).extend(named_task_dependencies.itervalues())
--        tasks_by_taskid[task.task_id] = task
--
--    # resolve edges to taskIds
--    edges_by_taskid = (
--        (label_to_taskid.get(left), label_to_taskid.get(right), name)
--        for (left, right, name) in annotated_task_graph.graph.edges
--        )
--    # ..and drop edges that are no longer in the task graph
--    edges_by_taskid = set(
--        (left, right, name)
--        for (left, right, name) in edges_by_taskid
--        if left in tasks_by_taskid and right in tasks_by_taskid
--        )
--
--    return TaskGraph(
--        tasks_by_taskid,
--        Graph(set(tasks_by_taskid), edges_by_taskid))
--
--
--def optimization(name):
--    def wrap(func):
--        if name in _optimizations:
--            raise Exception("multiple optimizations with name {}".format(name))
--        _optimizations[name] = func
--        return func
--    return wrap
--
--
--@optimization('index-search')
--def opt_index_search(task, params, index_path):
--    try:
--        task_id = find_task_id(
--            index_path,
--            use_proxy=bool(os.environ.get('TASK_ID')))
--
--        return task_id or True
--    except requests.exceptions.HTTPError:
--        pass
--
--    return False
--
--
--@optimization('seta')
--def opt_seta(task, params):
--    bbb_task = False
--
--    # for bbb tasks we need to send in the buildbot buildername
--    if task.task.get('provisionerId', '') == 'buildbot-bridge':
--        label = task.task.get('payload').get('buildername')
--        bbb_task = True
--    else:
--        label = task.label
--
--    # we would like to return 'False, None' while it's high_value_task
--    # and we wouldn't optimize it. Otherwise, it will return 'True, None'
--    if is_low_value_task(label,
--                         params.get('project'),
--                         params.get('pushlog_id'),
--                         params.get('pushdate'),
--                         bbb_task):
--        # Always optimize away low-value tasks
--        return True
--    else:
--        return False
--
--
--@optimization('skip-unless-changed')
--def opt_files_changed(task, params, file_patterns):
--    # pushlog_id == -1 - this is the case when run from a cron.yml job
--    if params.get('pushlog_id') == -1:
--        return True
--
--    changed = files_changed.check(params, file_patterns)
--    if not changed:
--        logger.debug('no files found matching a pattern in `skip-unless-changed` for ' +
--                     task.label)
--        return True
--    return False
-diff --git a/taskcluster/taskgraph/parameters.py b/taskcluster/taskgraph/parameters.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/parameters.py
-+++ /dev/null
-@@ -1,95 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import yaml
--from mozbuild.util import ReadOnlyDict
--
--# Please keep this list sorted and in sync with taskcluster/docs/parameters.rst
--PARAMETER_NAMES = set([
--    'base_repository',
--    'build_date',
--    'filters',
--    'head_ref',
--    'head_repository',
--    'head_rev',
--    'include_nightly',
--    'level',
--    'message',
--    'moz_build_date',
--    'optimize_target_tasks',
--    'owner',
--    'project',
--    'pushdate',
--    'pushlog_id',
--    'target_tasks_method',
--])
--
--
--class Parameters(ReadOnlyDict):
--    """An immutable dictionary with nicer KeyError messages on failure"""
--    def check(self):
--        names = set(self)
--        msg = []
--
--        missing = PARAMETER_NAMES - names
--        if missing:
--            msg.append("missing parameters: " + ", ".join(missing))
--
--        extra = names - PARAMETER_NAMES
--        if extra:
--            msg.append("extra parameters: " + ", ".join(extra))
--
--        if msg:
--            raise Exception("; ".join(msg))
--
--    def __getitem__(self, k):
--        if k not in PARAMETER_NAMES:
--            raise KeyError("no such parameter {!r}".format(k))
--        try:
--            return super(Parameters, self).__getitem__(k)
--        except KeyError:
--            raise KeyError("taskgraph parameter {!r} not found".format(k))
--
--
--def load_parameters_file(filename):
--    """
--    Load parameters from a path, url, decision task-id or project.
--
--    Examples:
--        task-id=fdtgsD5DQUmAQZEaGMvQ4Q
--        project=mozilla-central
--    """
--    import urllib
--    from taskgraph.util.taskcluster import get_artifact_url, find_task_id
--
--    if not filename:
--        return Parameters()
--
--    try:
--        # reading parameters from a local parameters.yml file
--        f = open(filename)
--    except IOError:
--        # fetching parameters.yml using task task-id, project or supplied url
--        task_id = None
--        if filename.startswith("task-id="):
--            task_id = filename.split("=")[1]
--        elif filename.startswith("project="):
--            index = "gecko.v2.{}.latest.firefox.decision".format(filename.split("=")[1])
--            task_id = find_task_id(index)
--
--        if task_id:
--            filename = get_artifact_url(task_id, 'public/parameters.yml')
--        f = urllib.urlopen(filename)
--
--    if filename.endswith('.yml'):
--        return Parameters(**yaml.safe_load(f))
--    elif filename.endswith('.json'):
--        return Parameters(**json.load(f))
--    else:
--        raise TypeError("Parameters file `{}` is not JSON or YAML".format(filename))
-diff --git a/taskcluster/taskgraph/target_tasks.py b/taskcluster/taskgraph/target_tasks.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/target_tasks.py
-+++ /dev/null
-@@ -1,386 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import json
--
--from taskgraph import try_option_syntax
--from taskgraph.util.attributes import match_run_on_projects
--
--_target_task_methods = {}
--
--
--def _target_task(name):
--    def wrap(func):
--        _target_task_methods[name] = func
--        return func
--    return wrap
--
--
--def get_method(method):
--    """Get a target_task_method to pass to a TaskGraphGenerator."""
--    return _target_task_methods[method]
--
--
--def filter_on_nightly(task, parameters):
--    return not task.attributes.get('nightly') or parameters.get('include_nightly')
--
--
--def filter_for_project(task, parameters):
--    """Filter tasks by project.  Optionally enable nightlies."""
--    run_on_projects = set(task.attributes.get('run_on_projects', []))
--    return match_run_on_projects(parameters['project'], run_on_projects)
--
--
--def filter_upload_symbols(task, parameters):
--    # Filters out symbols when there are not part of a nightly or a release build
--    # TODO Remove this too specific filter (bug 1353296)
--    return '-upload-symbols' not in task.label or \
--        task.attributes.get('nightly') or \
--        parameters.get('project') in ('mozilla-beta', 'mozilla-release')
--
--
--def standard_filter(task, parameters):
--    return all(
--        filter_func(task, parameters) for filter_func in
--        (filter_on_nightly, filter_for_project, filter_upload_symbols)
--    )
--
--
--def _try_task_config(full_task_graph, parameters):
--    task_config_file = os.path.join(os.getcwd(), 'try_task_config.json')
--
--    if not os.path.isfile(task_config_file):
--        return []
--
--    with open(task_config_file, 'r') as fh:
--        task_config = json.load(fh)
--
--    target_task_labels = []
--    for task in full_task_graph.tasks.itervalues():
--        if task.label in task_config:
--            target_task_labels.append(task.label)
--
--    return target_task_labels
--
--
--def _try_option_syntax(full_task_graph, parameters):
--    """Generate a list of target tasks based on try syntax in
--    parameters['message'] and, for context, the full task graph."""
--    options = try_option_syntax.TryOptionSyntax(parameters['message'], full_task_graph)
--    target_tasks_labels = [t.label for t in full_task_graph.tasks.itervalues()
--                           if options.task_matches(t)]
--
--    attributes = {
--        k: getattr(options, k) for k in [
--            'env',
--            'no_retry',
--            'tag',
--        ]
--    }
--
--    for l in target_tasks_labels:
--        task = full_task_graph[l]
--        if 'unittest_suite' in task.attributes:
--            task.attributes['task_duplicates'] = options.trigger_tests
--
--    for l in target_tasks_labels:
--        task = full_task_graph[l]
--        # If the developer wants test jobs to be rebuilt N times we add that value here
--        if options.trigger_tests > 1 and 'unittest_suite' in task.attributes:
--            task.attributes['task_duplicates'] = options.trigger_tests
--            task.attributes['profile'] = False
--
--        # If the developer wants test talos jobs to be rebuilt N times we add that value here
--        if options.talos_trigger_tests > 1 and task.attributes.get('unittest_suite') == 'talos':
--            task.attributes['task_duplicates'] = options.talos_trigger_tests
--            task.attributes['profile'] = options.profile
--
--        task.attributes.update(attributes)
--
--    # Add notifications here as well
--    if options.notifications:
--        for task in full_task_graph:
--            owner = parameters.get('owner')
--            routes = task.task.setdefault('routes', [])
--            if options.notifications == 'all':
--                routes.append("notify.email.{}.on-any".format(owner))
--            elif options.notifications == 'failure':
--                routes.append("notify.email.{}.on-failed".format(owner))
--                routes.append("notify.email.{}.on-exception".format(owner))
--
--    return target_tasks_labels
--
--
--@_target_task('try_tasks')
--def target_tasks_try(full_task_graph, parameters):
--    labels = _try_task_config(full_task_graph, parameters)
--
--    if 'try:' in parameters['message'] or not labels:
--        labels.extend(_try_option_syntax(full_task_graph, parameters))
--
--    return labels
--
--
--@_target_task('default')
--def target_tasks_default(full_task_graph, parameters):
--    """Target the tasks which have indicated they should be run on this project
--    via the `run_on_projects` attributes."""
--
--    return [l for l, t in full_task_graph.tasks.iteritems()
--            if standard_filter(t, parameters)]
--
--
--@_target_task('ash_tasks')
--def target_tasks_ash(full_task_graph, parameters):
--    """Target tasks that only run on the ash branch."""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        # Early return if platform is None
--        if not platform:
--            return False
--        # Only on Linux platforms
--        if 'linux' not in platform:
--            return False
--        # No random non-build jobs either. This is being purposely done as a
--        # blacklist so newly-added jobs aren't missed by default.
--        for p in ('nightly', 'haz', 'artifact', 'cov', 'add-on'):
--            if p in platform:
--                return False
--        for k in ('toolchain', 'l10n', 'static-analysis'):
--            if k in task.attributes['kind']:
--                return False
--        # and none of this linux64-asan/debug stuff
--        if platform == 'linux64-asan' and task.attributes['build_type'] == 'debug':
--            return False
--        # no non-e10s tests
--        if task.attributes.get('unittest_suite'):
--            if not task.attributes.get('e10s'):
--                return False
--            # don't run talos on ash
--            if task.attributes.get('unittest_suite') == 'talos':
--                return False
--        # don't upload symbols
--        if task.attributes['kind'] == 'upload-symbols':
--            return False
--        return True
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('cedar_tasks')
--def target_tasks_cedar(full_task_graph, parameters):
--    """Target tasks that only run on the cedar branch."""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        # only select platforms
--        if platform not in ('linux64', 'macosx64'):
--            return False
--        if task.attributes.get('unittest_suite'):
--            if not (task.attributes['unittest_suite'].startswith('mochitest') or
--                    'xpcshell' in task.attributes['unittest_suite']):
--                return False
--        return True
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('graphics_tasks')
--def target_tasks_graphics(full_task_graph, parameters):
--    """In addition to doing the filtering by project that the 'default'
--       filter does, also remove artifact builds because we have csets on
--       the graphics branch that aren't on the candidate branches of artifact
--       builds"""
--    filtered_for_project = target_tasks_default(full_task_graph, parameters)
--
--    def filter(task):
--        if task.attributes['kind'] == 'artifact-build':
--            return False
--        return True
--    return [l for l in filtered_for_project if filter(full_task_graph[l])]
--
--
--@_target_task('mochitest_valgrind')
--def target_tasks_valgrind(full_task_graph, parameters):
--    """Target tasks that only run on the cedar branch."""
--    def filter(task):
--        platform = task.attributes.get('test_platform', '').split('/')[0]
--        if platform not in ['linux64']:
--            return False
--
--        if task.attributes.get('unittest_suite', '').startswith('mochitest') and \
--           task.attributes.get('unittest_flavor', '').startswith('valgrind-plain'):
--            return True
--        return False
--
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('nightly_fennec')
--def target_tasks_nightly_fennec(full_task_graph, parameters):
--    """Select the set of tasks required for a nightly build of fennec. The
--    nightly build process involves a pipeline of builds, signing,
--    and, eventually, uploading the tasks to balrog."""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        if platform in ('android-aarch64-nightly',
--                        'android-api-16-nightly',
--                        'android-api-16-old-id-nightly',
--                        'android-nightly',
--                        'android-x86-nightly',
--                        'android-x86-old-id-nightly'):
--            if not task.attributes.get('nightly', False):
--                return False
--            return filter_for_project(task, parameters)
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('nightly_linux')
--def target_tasks_nightly_linux(full_task_graph, parameters):
--    """Select the set of tasks required for a nightly build of linux. The
--    nightly build process involves a pipeline of builds, signing,
--    and, eventually, uploading the tasks to balrog."""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        if platform in ('linux64-nightly', 'linux-nightly'):
--            return task.attributes.get('nightly', False)
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('mozilla_beta_tasks')
--def target_tasks_mozilla_beta(full_task_graph, parameters):
--    """Select the set of tasks required for a promotable beta or release build
--    of linux, plus android CI. The candidates build process involves a pipeline
--    of builds and signing, but does not include beetmover or balrog jobs."""
--
--    def filter(task):
--        if not standard_filter(task, parameters):
--            return False
--        platform = task.attributes.get('build_platform')
--        if platform in (
--                # On beta, Nightly builds are already PGOs
--                'linux-pgo', 'linux64-pgo',
--                'win32-pgo', 'win64-pgo',
--                'android-api-16-nightly', 'android-x86-nightly'
--                ):
--            return False
--
--        if platform in (
--                'linux', 'linux64',
--                'macosx64',
--                'win32', 'win64',
--                ):
--            if task.attributes['build_type'] == 'opt' and \
--               task.attributes.get('unittest_suite') != 'talos':
--                return False
--
--        # skip l10n, beetmover, balrog
--        if task.kind in [
--            'balrog',
--            'beetmover', 'beetmover-checksums', 'beetmover-l10n',
--            'beetmover-repackage', 'beetmover-repackage-signing',
--            'checksums-signing',
--            'nightly-l10n', 'nightly-l10n-signing',
--            'push-apk', 'push-apk-breakpoint',
--            'repackage-l10n',
--        ]:
--            return False
--
--        # No l10n repacks per push. They may be triggered by kinds which depend
--        # on l10n builds/repacks. For instance: "repackage-signing"
--        if task.attributes.get('locale', '') != '':
--            return False
--
--        return True
--
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('mozilla_release_tasks')
--def target_tasks_mozilla_release(full_task_graph, parameters):
--    """Select the set of tasks required for a promotable beta or release build
--    of linux, plus android CI. The candidates build process involves a pipeline
--    of builds and signing, but does not include beetmover or balrog jobs."""
--    return target_tasks_mozilla_beta(full_task_graph, parameters)
--
--
--@_target_task('candidates_fennec')
--def target_tasks_candidates_fennec(full_task_graph, parameters):
--    """Select the set of tasks required for a candidates build of fennec. The
--    nightly build process involves a pipeline of builds, signing,
--    and, eventually, uploading the tasks to balrog."""
--    filtered_for_project = target_tasks_nightly_fennec(full_task_graph, parameters)
--
--    def filter(task):
--        if task.kind not in ['balrog']:
--            return task.attributes.get('nightly', False)
--
--    return [l for l in filtered_for_project if filter(full_task_graph[l])]
--
--
--@_target_task('pine_tasks')
--def target_tasks_pine(full_task_graph, parameters):
--    """Bug 1339179 - no mobile automation needed on pine"""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        # disable mobile jobs
--        if str(platform).startswith('android'):
--            return False
--        # disable asan
--        if platform == 'linux64-asan':
--            return False
--        # disable non-pine and nightly tasks
--        if standard_filter(task, parameters):
--            return True
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('nightly_macosx')
--def target_tasks_nightly_macosx(full_task_graph, parameters):
--    """Select the set of tasks required for a nightly build of macosx. The
--    nightly build process involves a pipeline of builds, signing,
--    and, eventually, uploading the tasks to balrog."""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        if platform in ('macosx64-nightly', ):
--            return task.attributes.get('nightly', False)
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('nightly_win')
--def target_tasks_nightly_win(full_task_graph, parameters):
--    """Select the set of tasks required for a nightly build of win32 and win64.
--    The nightly build process involves a pipeline of builds, signing,
--    and, eventually, uploading the tasks to balrog."""
--    def filter(task):
--        platform = task.attributes.get('build_platform')
--        if not filter_for_project(task, parameters):
--            return False
--        if platform in ('win32-nightly', 'win64-nightly'):
--            return task.attributes.get('nightly', False)
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
--
--
--@_target_task('nightly_desktop')
--def target_tasks_nightly_desktop(full_task_graph, parameters):
--    """Select the set of tasks required for a nightly build of linux, mac,
--    windows."""
--    # Avoid duplicate tasks.
--    return list(
--        set(target_tasks_nightly_win(full_task_graph, parameters))
--        | set(target_tasks_nightly_macosx(full_task_graph, parameters))
--        | set(target_tasks_nightly_linux(full_task_graph, parameters))
--    )
--
--
--# Opt DMD builds should only run nightly
--@_target_task('nightly_dmd')
--def target_tasks_dmd(full_task_graph, parameters):
--    """Target DMD that run nightly on the m-c branch."""
--    def filter(task):
--        platform = task.attributes.get('build_platform', '')
--        return platform.endswith('-dmd')
--    return [l for l, t in full_task_graph.tasks.iteritems() if filter(t)]
-diff --git a/taskcluster/taskgraph/task.py b/taskcluster/taskgraph/task.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/task.py
-+++ /dev/null
-@@ -1,86 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--
--class Task(object):
--    """
--    Representation of a task in a TaskGraph.  Each Task has, at creation:
--
--    - kind: the name of the task kind
--    - label; the label for this task
--    - attributes: a dictionary of attributes for this task (used for filtering)
--    - task: the task definition (JSON-able dictionary)
--    - optimizations: optimizations to apply to the task (see taskgraph.optimize)
--    - dependencies: tasks this one depends on, in the form {name: label}, for example
--      {'build': 'build-linux64/opt', 'docker-image': 'build-docker-image-desktop-test'}
--
--    And later, as the task-graph processing proceeds:
--
--    - task_id -- TaskCluster taskId under which this task will be created
--    - optimized -- true if this task need not be performed
--
--    This class is just a convenience wraper for the data type and managing
--    display, comparison, serialization, etc. It has no functionality of its own.
--    """
--    def __init__(self, kind, label, attributes, task,
--                 optimizations=None, dependencies=None):
--        self.kind = kind
--        self.label = label
--        self.attributes = attributes
--        self.task = task
--
--        self.task_id = None
--        self.optimized = False
--
--        self.attributes['kind'] = kind
--
--        self.optimizations = optimizations or []
--        self.dependencies = dependencies or {}
--
--    def __eq__(self, other):
--        return self.kind == other.kind and \
--            self.label == other.label and \
--            self.attributes == other.attributes and \
--            self.task == other.task and \
--            self.task_id == other.task_id and \
--            self.optimizations == other.optimizations and \
--            self.dependencies == other.dependencies
--
--    def __repr__(self):
--        return ('Task({kind!r}, {label!r}, {attributes!r}, {task!r}, '
--                'optimizations={optimizations!r}, '
--                'dependencies={dependencies!r})'.format(**self.__dict__))
--
--    def to_json(self):
--        rv = {
--            'kind': self.kind,
--            'label': self.label,
--            'attributes': self.attributes,
--            'dependencies': self.dependencies,
--            'optimizations': self.optimizations,
--            'task': self.task,
--        }
--        if self.task_id:
--            rv['task_id'] = self.task_id
--        return rv
--
--    @classmethod
--    def from_json(cls, task_dict):
--        """
--        Given a data structure as produced by taskgraph.to_json, re-construct
--        the original Task object.  This is used to "resume" the task-graph
--        generation process, for example in Action tasks.
--        """
--        rv = cls(
--            kind=task_dict['kind'],
--            label=task_dict['label'],
--            attributes=task_dict['attributes'],
--            task=task_dict['task'],
--            optimizations=task_dict['optimizations'],
--            dependencies=task_dict.get('dependencies'))
--        if 'task_id' in task_dict:
--            rv.task_id = task_dict['task_id']
--        return rv
-diff --git a/taskcluster/taskgraph/taskgraph.py b/taskcluster/taskgraph/taskgraph.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/taskgraph.py
-+++ /dev/null
-@@ -1,69 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from .graph import Graph
--from .task import Task
--
--
--class TaskGraph(object):
--    """
--    Representation of a task graph.
--
--    A task graph is a combination of a Graph and a dictionary of tasks indexed
--    by label.  TaskGraph instances should be treated as immutable.
--    """
--
--    def __init__(self, tasks, graph):
--        assert set(tasks) == graph.nodes
--        self.tasks = tasks
--        self.graph = graph
--
--    def for_each_task(self, f, *args, **kwargs):
--        for task_label in self.graph.visit_postorder():
--            task = self.tasks[task_label]
--            f(task, self, *args, **kwargs)
--
--    def __getitem__(self, label):
--        "Get a task by label"
--        return self.tasks[label]
--
--    def __iter__(self):
--        "Iterate over tasks in undefined order"
--        return self.tasks.itervalues()
--
--    def __repr__(self):
--        return "<TaskGraph graph={!r} tasks={!r}>".format(self.graph, self.tasks)
--
--    def __eq__(self, other):
--        return self.tasks == other.tasks and self.graph == other.graph
--
--    def to_json(self):
--        "Return a JSON-able object representing the task graph, as documented"
--        named_links_dict = self.graph.named_links_dict()
--        # this dictionary may be keyed by label or by taskid, so let's just call it 'key'
--        tasks = {}
--        for key in self.graph.visit_postorder():
--            tasks[key] = self.tasks[key].to_json()
--            # overwrite dependencies with the information in the taskgraph's edges.
--            tasks[key]['dependencies'] = named_links_dict.get(key, {})
--        return tasks
--
--    @classmethod
--    def from_json(cls, tasks_dict):
--        """
--        This code is used to generate the a TaskGraph using a dictionary
--        which is representative of the TaskGraph.
--        """
--        tasks = {}
--        edges = set()
--        for key, value in tasks_dict.iteritems():
--            tasks[key] = Task.from_json(value)
--            if 'task_id' in value:
--                tasks[key].task_id = value['task_id']
--            for depname, dep in value['dependencies'].iteritems():
--                edges.add((key, dep, depname))
--        task_graph = cls(tasks, Graph(set(tasks), edges))
--        return tasks, task_graph
-diff --git a/taskcluster/taskgraph/test/__init__.py b/taskcluster/taskgraph/test/__init__.py
-deleted file mode 100644
-diff --git a/taskcluster/taskgraph/test/automationrelevance.json b/taskcluster/taskgraph/test/automationrelevance.json
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/automationrelevance.json
-+++ /dev/null
-@@ -1,425 +0,0 @@
--{
--    "changesets": [
--        {
--            "author": "James Long <longster@gmail.com>",
--            "backsoutnodes": [],
--            "bugs": [
--                {
--                    "no": "1300866",
--                    "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1300866"
--                }
--            ],
--            "date": [
--                1473196655.0,
--                14400
--            ],
--            "desc": "Bug 1300866 - expose devtools require to new debugger r=jlast,bgrins",
--            "extra": {
--                "branch": "default"
--            },
--            "files": [
--                "devtools/client/debugger/new/index.html"
--            ],
--            "node": "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "parents": [
--                "37c9349b4e8167a61b08b7e119c21ea177b98942"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312890,
--            "reviewers": [
--                {
--                    "name": "jlast",
--                    "revset": "reviewer(jlast)"
--                },
--                {
--                    "name": "bgrins",
--                    "revset": "reviewer(bgrins)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Wes Kocher <wkocher@mozilla.com>",
--            "backsoutnodes": [],
--            "bugs": [],
--            "date": [
--                1473208638.0,
--                25200
--            ],
--            "desc": "Merge m-c to fx-team, a=merge",
--            "extra": {
--                "branch": "default"
--            },
--            "files": [
--                "taskcluster/scripts/builder/build-l10n.sh"
--            ],
--            "node": "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--            "parents": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "91c2b9d5c1354ca79e5b174591dbb03b32b15bbf"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312891,
--            "reviewers": [
--                {
--                    "name": "merge",
--                    "revset": "reviewer(merge)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Towkir Ahmed <towkir17@gmail.com>",
--            "backsoutnodes": [],
--            "bugs": [
--                {
--                    "no": "1296648",
--                    "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1296648"
--                }
--            ],
--            "date": [
--                1472957580.0,
--                14400
--            ],
--            "desc": "Bug 1296648 - Fix direction of .ruleview-expander.theme-twisty in RTL locales. r=ntim",
--            "extra": {
--                "branch": "default"
--            },
--            "files": [
--                "devtools/client/themes/rules.css"
--            ],
--            "node": "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--            "parents": [
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312892,
--            "reviewers": [
--                {
--                    "name": "ntim",
--                    "revset": "reviewer(ntim)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Oriol <oriol-bugzilla@hotmail.com>",
--            "backsoutnodes": [],
--            "bugs": [
--                {
--                    "no": "1300336",
--                    "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1300336"
--                }
--            ],
--            "date": [
--                1472921160.0,
--                14400
--            ],
--            "desc": "Bug 1300336 - Allow pseudo-arrays to have a length property. r=fitzgen",
--            "extra": {
--                "branch": "default"
--            },
--            "files": [
--                "devtools/client/webconsole/test/browser_webconsole_output_06.js",
--                "devtools/server/actors/object.js"
--            ],
--            "node": "99c542fa43a72ee863c813b5624048d1b443549b",
--            "parents": [
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312893,
--            "reviewers": [
--                {
--                    "name": "fitzgen",
--                    "revset": "reviewer(fitzgen)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Ruturaj Vartak <ruturaj@gmail.com>",
--            "backsoutnodes": [],
--            "bugs": [
--                {
--                    "no": "1295010",
--                    "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1295010"
--                }
--            ],
--            "date": [
--                1472854020.0,
--                -7200
--            ],
--            "desc": "Bug 1295010 - Don't move the eyedropper to the out of browser window by keyboard navigation. r=pbro\n\nMozReview-Commit-ID: vBwmSxVNXK",
--            "extra": {
--                "amend_source": "6885024ef00cfa33d73c59dc03c48ebcda9ccbdd",
--                "branch": "default",
--                "histedit_source": "c43167f0a7cbe9f4c733b15da726e5150a9529ba",
--                "rebase_source": "b74df421630fc46dab6b6cc026bf3e0ae6b4a651"
--            },
--            "files": [
--                "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
--                "devtools/client/inspector/test/head.js",
--                "devtools/server/actors/highlighters/eye-dropper.js"
--            ],
--            "node": "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--            "parents": [
--                "99c542fa43a72ee863c813b5624048d1b443549b"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312894,
--            "reviewers": [
--                {
--                    "name": "pbro",
--                    "revset": "reviewer(pbro)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Matteo Ferretti <mferretti@mozilla.com>",
--            "backsoutnodes": [],
--            "bugs": [
--                {
--                    "no": "1299154",
--                    "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1299154"
--                }
--            ],
--            "date": [
--                1472629906.0,
--                -7200
--            ],
--            "desc": "Bug 1299154 - added Set/GetOverrideDPPX to restorefromHistory; r=mstange\n\nMozReview-Commit-ID: AsyAcG3Igbn\n",
--            "extra": {
--                "branch": "default",
--                "committer": "Matteo Ferretti <mferretti@mozilla.com> 1473236511 -7200"
--            },
--            "files": [
--                "docshell/base/nsDocShell.cpp",
--                "dom/tests/mochitest/general/test_contentViewer_overrideDPPX.html"
--            ],
--            "node": "541c9086c0f27fba60beecc9bc94543103895c86",
--            "parents": [
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312895,
--            "reviewers": [
--                {
--                    "name": "mstange",
--                    "revset": "reviewer(mstange)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Patrick Brosset <pbrosset@mozilla.com>",
--            "backsoutnodes": [],
--            "bugs": [
--                {
--                    "no": "1295010",
--                    "url": "https://bugzilla.mozilla.org/show_bug.cgi?id=1295010"
--                }
--            ],
--            "date": [
--                1473239449.0,
--                -7200
--            ],
--            "desc": "Bug 1295010 - Removed testActor from highlighterHelper in inspector tests; r=me\n\nMozReview-Commit-ID: GMksl81iGcp",
--            "extra": {
--                "branch": "default"
--            },
--            "files": [
--                "devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js",
--                "devtools/client/inspector/test/head.js"
--            ],
--            "node": "041a925171e431bf51fb50193ab19d156088c89a",
--            "parents": [
--                "541c9086c0f27fba60beecc9bc94543103895c86"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312896,
--            "reviewers": [
--                {
--                    "name": "me",
--                    "revset": "reviewer(me)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        },
--        {
--            "author": "Carsten \"Tomcat\" Book <cbook@mozilla.com>",
--            "backsoutnodes": [],
--            "bugs": [],
--            "date": [
--                1473261233.0,
--                -7200
--            ],
--            "desc": "merge fx-team to mozilla-central a=merge",
--            "extra": {
--                "branch": "default"
--            },
--            "files": [],
--            "node": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "parents": [
--                "3d0b41fdd93bd8233745eadb4e0358e385bf2cb9",
--                "041a925171e431bf51fb50193ab19d156088c89a"
--            ],
--            "perfherderurl": "https://treeherder.mozilla.org/perf.html#/compare?originalProject=mozilla-central&originalRevision=a14f88a9af7a59e677478694bafd9375ac53683e&newProject=mozilla-central&newRevision=ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--            "pushdate": [
--                1473261248,
--                0
--            ],
--            "pushhead": "a14f88a9af7a59e677478694bafd9375ac53683e",
--            "pushid": 30664,
--            "pushnodes": [
--                "ae2144aa4356b65c2f8c0de8c9082dcb7e330e24",
--                "73a6a267a50a0e1c41e689b265ad3eebe43d7ac6",
--                "16a1a91f9269ab95dd83eb29dc5d0227665f7d94",
--                "99c542fa43a72ee863c813b5624048d1b443549b",
--                "a6b6a93eb41a05e310a11f0172f01ba9b21d3eac",
--                "541c9086c0f27fba60beecc9bc94543103895c86",
--                "041a925171e431bf51fb50193ab19d156088c89a",
--                "a14f88a9af7a59e677478694bafd9375ac53683e"
--            ],
--            "pushuser": "cbook@mozilla.com",
--            "rev": 312897,
--            "reviewers": [
--                {
--                    "name": "merge",
--                    "revset": "reviewer(merge)"
--                }
--            ],
--            "treeherderrepo": "mozilla-central",
--            "treeherderrepourl": "https://treeherder.mozilla.org/#/jobs?repo=mozilla-central"
--        }
--    ],
--    "visible": true
--}
--
-diff --git a/taskcluster/taskgraph/test/python.ini b/taskcluster/taskgraph/test/python.ini
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/python.ini
-+++ /dev/null
-@@ -1,25 +0,0 @@
--[DEFAULT]
--subsuite = taskgraph
--
--[test_create.py]
--[test_cron_util.py]
--[test_decision.py]
--[test_files_changed.py]
--[test_generator.py]
--[test_graph.py]
--[test_morph.py]
--[test_optimize.py]
--[test_parameters.py]
--[test_target_tasks.py]
--[test_taskgraph.py]
--[test_transforms_base.py]
--[test_try_option_syntax.py]
--[test_util_attributes.py]
--[test_util_docker.py]
--[test_util_parameterization.py]
--[test_util_python_path.py]
--[test_util_schema.py]
--[test_util_templates.py]
--[test_util_time.py]
--[test_util_treeherder.py]
--[test_util_yaml.py]
-diff --git a/taskcluster/taskgraph/test/test_create.py b/taskcluster/taskgraph/test/test_create.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_create.py
-+++ /dev/null
-@@ -1,76 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--import os
--
--from taskgraph import create
--from taskgraph.graph import Graph
--from taskgraph.taskgraph import TaskGraph
--from taskgraph.task import Task
--
--from mozunit import main
--
--
--class TestCreate(unittest.TestCase):
--
--    def setUp(self):
--        self.old_task_id = os.environ.get('TASK_ID')
--        if 'TASK_ID' in os.environ:
--            del os.environ['TASK_ID']
--        self.created_tasks = {}
--        self.old_create_task = create.create_task
--        create.create_task = self.fake_create_task
--
--    def tearDown(self):
--        create.create_task = self.old_create_task
--        if self.old_task_id:
--            os.environ['TASK_ID'] = self.old_task_id
--        elif 'TASK_ID' in os.environ:
--            del os.environ['TASK_ID']
--
--    def fake_create_task(self, session, task_id, label, task_def):
--        self.created_tasks[task_id] = task_def
--
--    def test_create_tasks(self):
--        tasks = {
--            'tid-a': Task(kind='test', label='a', attributes={}, task={'payload': 'hello world'}),
--            'tid-b': Task(kind='test', label='b', attributes={}, task={'payload': 'hello world'}),
--        }
--        label_to_taskid = {'a': 'tid-a', 'b': 'tid-b'}
--        graph = Graph(nodes={'tid-a', 'tid-b'}, edges={('tid-a', 'tid-b', 'edge')})
--        taskgraph = TaskGraph(tasks, graph)
--
--        create.create_tasks(taskgraph, label_to_taskid, {'level': '4'})
--
--        for tid, task in self.created_tasks.iteritems():
--            self.assertEqual(task['payload'], 'hello world')
--            self.assertEqual(task['schedulerId'], 'gecko-level-4')
--            # make sure the dependencies exist, at least
--            for depid in task.get('dependencies', []):
--                if depid is 'decisiontask':
--                    # Don't look for decisiontask here
--                    continue
--                self.assertIn(depid, self.created_tasks)
--
--    def test_create_task_without_dependencies(self):
--        "a task with no dependencies depends on the decision task"
--        os.environ['TASK_ID'] = 'decisiontask'
--        tasks = {
--            'tid-a': Task(kind='test', label='a', attributes={}, task={'payload': 'hello world'}),
--        }
--        label_to_taskid = {'a': 'tid-a'}
--        graph = Graph(nodes={'tid-a'}, edges=set())
--        taskgraph = TaskGraph(tasks, graph)
--
--        create.create_tasks(taskgraph, label_to_taskid, {'level': '4'})
--
--        for tid, task in self.created_tasks.iteritems():
--            self.assertEqual(task.get('dependencies'), [os.environ['TASK_ID']])
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_cron_util.py b/taskcluster/taskgraph/test/test_cron_util.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_cron_util.py
-+++ /dev/null
-@@ -1,66 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, unicode_literals
--
--import datetime
--import unittest
--
--from mozunit import main
--
--from taskgraph.cron.util import (
--    match_utc,
--)
--
--
--class TestMatchUtc(unittest.TestCase):
--
--    def test_hour_minute(self):
--        params = {'time': datetime.datetime(2017, 01, 26, 16, 30, 0)}
--        self.assertFalse(match_utc(params, hour=4, minute=30))
--        self.assertTrue(match_utc(params, hour=16, minute=30))
--        self.assertFalse(match_utc(params, hour=16, minute=0))
--
--    def test_hour_only(self):
--        params = {'time': datetime.datetime(2017, 01, 26, 16, 0, 0)}
--        self.assertFalse(match_utc(params, hour=0))
--        self.assertFalse(match_utc(params, hour=4))
--        self.assertTrue(match_utc(params, hour=16))
--        params = {'time': datetime.datetime(2017, 01, 26, 16, 15, 0)}
--        self.assertFalse(match_utc(params, hour=0))
--        self.assertFalse(match_utc(params, hour=4))
--        self.assertTrue(match_utc(params, hour=16))
--        params = {'time': datetime.datetime(2017, 01, 26, 16, 30, 0)}
--        self.assertFalse(match_utc(params, hour=0))
--        self.assertFalse(match_utc(params, hour=4))
--        self.assertTrue(match_utc(params, hour=16))
--        params = {'time': datetime.datetime(2017, 01, 26, 16, 45, 0)}
--        self.assertFalse(match_utc(params, hour=0))
--        self.assertFalse(match_utc(params, hour=4))
--        self.assertTrue(match_utc(params, hour=16))
--
--    def test_minute_only(self):
--        params = {'time': datetime.datetime(2017, 01, 26, 13, 0, 0)}
--        self.assertTrue(match_utc(params, minute=0))
--        self.assertFalse(match_utc(params, minute=15))
--        self.assertFalse(match_utc(params, minute=30))
--        self.assertFalse(match_utc(params, minute=45))
--
--    def test_zeroes(self):
--        params = {'time': datetime.datetime(2017, 01, 26, 0, 0, 0)}
--        self.assertTrue(match_utc(params, minute=0))
--        self.assertTrue(match_utc(params, hour=0))
--        self.assertFalse(match_utc(params, hour=1))
--        self.assertFalse(match_utc(params, minute=15))
--        self.assertFalse(match_utc(params, minute=30))
--        self.assertFalse(match_utc(params, minute=45))
--
--    def test_invalid_minute(self):
--        params = {'time': datetime.datetime(2017, 01, 26, 13, 0, 0)}
--        self.assertRaises(Exception, lambda:
--                          match_utc(params, minute=1))
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_decision.py b/taskcluster/taskgraph/test/test_decision.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_decision.py
-+++ /dev/null
-@@ -1,48 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import json
--import yaml
--import shutil
--import unittest
--import tempfile
--
--from taskgraph import decision
--from mozunit import main
--
--
--class TestDecision(unittest.TestCase):
--
--    def test_write_artifact_json(self):
--        data = [{'some': 'data'}]
--        tmpdir = tempfile.mkdtemp()
--        try:
--            decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
--            decision.write_artifact("artifact.json", data)
--            with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.json")) as f:
--                self.assertEqual(json.load(f), data)
--        finally:
--            if os.path.exists(tmpdir):
--                shutil.rmtree(tmpdir)
--            decision.ARTIFACTS_DIR = 'artifacts'
--
--    def test_write_artifact_yml(self):
--        data = [{'some': 'data'}]
--        tmpdir = tempfile.mkdtemp()
--        try:
--            decision.ARTIFACTS_DIR = os.path.join(tmpdir, "artifacts")
--            decision.write_artifact("artifact.yml", data)
--            with open(os.path.join(decision.ARTIFACTS_DIR, "artifact.yml")) as f:
--                self.assertEqual(yaml.safe_load(f), data)
--        finally:
--            if os.path.exists(tmpdir):
--                shutil.rmtree(tmpdir)
--            decision.ARTIFACTS_DIR = 'artifacts'
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_files_changed.py b/taskcluster/taskgraph/test/test_files_changed.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_files_changed.py
-+++ /dev/null
-@@ -1,78 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import os
--import unittest
--
--from taskgraph import files_changed
--from mozunit import main
--
--PARAMS = {
--    'head_repository': 'https://hg.mozilla.org/mozilla-central',
--    'head_rev': 'a14f88a9af7a',
--}
--
--FILES_CHANGED = [
--    'devtools/client/debugger/new/index.html',
--    'devtools/client/inspector/test/browser_inspector_highlighter-eyedropper-events.js',
--    'devtools/client/inspector/test/head.js',
--    'devtools/client/themes/rules.css',
--    'devtools/client/webconsole/test/browser_webconsole_output_06.js',
--    'devtools/server/actors/highlighters/eye-dropper.js',
--    'devtools/server/actors/object.js',
--    'docshell/base/nsDocShell.cpp',
--    'dom/tests/mochitest/general/test_contentViewer_overrideDPPX.html',
--    'taskcluster/scripts/builder/build-l10n.sh',
--]
--
--
--class FakeResponse:
--
--    def json(self):
--        with open(os.path.join(os.path.dirname(__file__), 'automationrelevance.json')) as f:
--            return json.load(f)
--
--
--class TestGetChangedFiles(unittest.TestCase):
--
--    def setUp(self):
--        files_changed._cache.clear()
--        self.old_get = files_changed.requests.get
--
--        def fake_get(url, **kwargs):
--            return FakeResponse()
--        files_changed.requests.get = fake_get
--
--    def tearDown(self):
--        files_changed.requests.get = self.old_get
--
--    def test_get_changed_files(self):
--        """Get_changed_files correctly gets the list of changed files in a push.
--        This tests against the production hg.mozilla.org so that it will detect
--        any changes in the format of the returned data."""
--        self.assertEqual(
--            sorted(files_changed.get_changed_files(PARAMS['head_repository'], PARAMS['head_rev'])),
--            FILES_CHANGED)
--
--
--class TestCheck(unittest.TestCase):
--
--    def setUp(self):
--        files_changed._cache[PARAMS['head_repository'], PARAMS['head_rev']] = FILES_CHANGED
--
--    def test_check_no_params(self):
--        self.assertTrue(files_changed.check({}, ["ignored"]))
--
--    def test_check_no_match(self):
--        self.assertFalse(files_changed.check(PARAMS, ["nosuch/**"]))
--
--    def test_check_match(self):
--        self.assertTrue(files_changed.check(PARAMS, ["devtools/**"]))
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_generator.py b/taskcluster/taskgraph/test/test_generator.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_generator.py
-+++ /dev/null
-@@ -1,126 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.generator import TaskGraphGenerator, Kind
--from taskgraph import graph, target_tasks as target_tasks_mod
--from mozunit import main
--
--
--def fake_loader(kind, path, config, parameters, loaded_tasks):
--    for i in range(3):
--        dependencies = {}
--        if i >= 1:
--            dependencies['prev'] = '{}-t-{}'.format(kind, i-1)
--        yield {'kind': kind,
--               'label': '{}-t-{}'.format(kind, i),
--               'attributes': {'_tasknum': str(i)},
--               'task': {'i': i},
--               'dependencies': dependencies}
--
--
--class FakeKind(Kind):
--
--    def _get_loader(self):
--        return fake_loader
--
--    def load_tasks(self, parameters, loaded_tasks):
--        FakeKind.loaded_kinds.append(self.name)
--        return super(FakeKind, self).load_tasks(parameters, loaded_tasks)
--
--
--class WithFakeKind(TaskGraphGenerator):
--
--    def _load_kinds(self):
--        for kind_name, deps in self.parameters['_kinds']:
--            config = {
--                'transforms': [],
--            }
--            if deps:
--                config['kind-dependencies'] = deps
--            yield FakeKind(kind_name, '/fake', config)
--
--
--class TestGenerator(unittest.TestCase):
--
--    def maketgg(self, target_tasks=None, kinds=[('_fake', [])]):
--        FakeKind.loaded_kinds = []
--        self.target_tasks = target_tasks or []
--
--        def target_tasks_method(full_task_graph, parameters):
--            return self.target_tasks
--
--        target_tasks_mod._target_task_methods['test_method'] = target_tasks_method
--
--        parameters = {
--            '_kinds': kinds,
--            'target_tasks_method': 'test_method',
--        }
--
--        return WithFakeKind('/root', parameters)
--
--    def test_kind_ordering(self):
--        "When task kinds depend on each other, they are loaded in postorder"
--        self.tgg = self.maketgg(kinds=[
--            ('_fake3', ['_fake2', '_fake1']),
--            ('_fake2', ['_fake1']),
--            ('_fake1', []),
--        ])
--        self.tgg._run_until('full_task_set')
--        self.assertEqual(FakeKind.loaded_kinds, ['_fake1', '_fake2', '_fake3'])
--
--    def test_full_task_set(self):
--        "The full_task_set property has all tasks"
--        self.tgg = self.maketgg()
--        self.assertEqual(self.tgg.full_task_set.graph,
--                         graph.Graph({'_fake-t-0', '_fake-t-1', '_fake-t-2'}, set()))
--        self.assertEqual(sorted(self.tgg.full_task_set.tasks.keys()),
--                         sorted(['_fake-t-0', '_fake-t-1', '_fake-t-2']))
--
--    def test_full_task_graph(self):
--        "The full_task_graph property has all tasks, and links"
--        self.tgg = self.maketgg()
--        self.assertEqual(self.tgg.full_task_graph.graph,
--                         graph.Graph({'_fake-t-0', '_fake-t-1', '_fake-t-2'},
--                                     {
--                                         ('_fake-t-1', '_fake-t-0', 'prev'),
--                                         ('_fake-t-2', '_fake-t-1', 'prev'),
--                         }))
--        self.assertEqual(sorted(self.tgg.full_task_graph.tasks.keys()),
--                         sorted(['_fake-t-0', '_fake-t-1', '_fake-t-2']))
--
--    def test_target_task_set(self):
--        "The target_task_set property has the targeted tasks"
--        self.tgg = self.maketgg(['_fake-t-1'])
--        self.assertEqual(self.tgg.target_task_set.graph,
--                         graph.Graph({'_fake-t-1'}, set()))
--        self.assertEqual(self.tgg.target_task_set.tasks.keys(),
--                         ['_fake-t-1'])
--
--    def test_target_task_graph(self):
--        "The target_task_graph property has the targeted tasks and deps"
--        self.tgg = self.maketgg(['_fake-t-1'])
--        self.assertEqual(self.tgg.target_task_graph.graph,
--                         graph.Graph({'_fake-t-0', '_fake-t-1'},
--                                     {('_fake-t-1', '_fake-t-0', 'prev')}))
--        self.assertEqual(sorted(self.tgg.target_task_graph.tasks.keys()),
--                         sorted(['_fake-t-0', '_fake-t-1']))
--
--    def test_optimized_task_graph(self):
--        "The optimized task graph contains task ids"
--        self.tgg = self.maketgg(['_fake-t-2'])
--        tid = self.tgg.label_to_taskid
--        self.assertEqual(
--            self.tgg.optimized_task_graph.graph,
--            graph.Graph({tid['_fake-t-0'], tid['_fake-t-1'], tid['_fake-t-2']}, {
--                (tid['_fake-t-1'], tid['_fake-t-0'], 'prev'),
--                (tid['_fake-t-2'], tid['_fake-t-1'], 'prev'),
--            }))
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_graph.py b/taskcluster/taskgraph/test/test_graph.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_graph.py
-+++ /dev/null
-@@ -1,158 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.graph import Graph
--from mozunit import main
--
--
--class TestGraph(unittest.TestCase):
--
--    tree = Graph(set(['a', 'b', 'c', 'd', 'e', 'f', 'g']), {
--        ('a', 'b', 'L'),
--        ('a', 'c', 'L'),
--        ('b', 'd', 'K'),
--        ('b', 'e', 'K'),
--        ('c', 'f', 'N'),
--        ('c', 'g', 'N'),
--    })
--
--    linear = Graph(set(['1', '2', '3', '4']), {
--        ('1', '2', 'L'),
--        ('2', '3', 'L'),
--        ('3', '4', 'L'),
--    })
--
--    diamonds = Graph(set(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J']),
--                     set(tuple(x) for x in
--                         'AFL ADL BDL BEL CEL CHL DFL DGL EGL EHL FIL GIL GJL HJL'.split()
--                         ))
--
--    multi_edges = Graph(set(['1', '2', '3', '4']), {
--        ('2', '1', 'red'),
--        ('2', '1', 'blue'),
--        ('3', '1', 'red'),
--        ('3', '2', 'blue'),
--        ('3', '2', 'green'),
--        ('4', '3', 'green'),
--    })
--
--    disjoint = Graph(set(['1', '2', '3', '4', 'α', 'β', 'γ']), {
--        ('2', '1', 'red'),
--        ('3', '1', 'red'),
--        ('3', '2', 'green'),
--        ('4', '3', 'green'),
--        ('α', 'β', 'πράσινο'),
--        ('β', 'γ', 'κόκκινο'),
--        ('α', 'γ', 'μπλε'),
--    })
--
--    def test_transitive_closure_empty(self):
--        "transitive closure of an empty set is an empty graph"
--        g = Graph(set(['a', 'b', 'c']), {('a', 'b', 'L'), ('a', 'c', 'L')})
--        self.assertEqual(g.transitive_closure(set()),
--                         Graph(set(), set()))
--
--    def test_transitive_closure_disjoint(self):
--        "transitive closure of a disjoint set is a subset"
--        g = Graph(set(['a', 'b', 'c']), set())
--        self.assertEqual(g.transitive_closure(set(['a', 'c'])),
--                         Graph(set(['a', 'c']), set()))
--
--    def test_transitive_closure_trees(self):
--        "transitive closure of a tree, at two non-root nodes, is the two subtrees"
--        self.assertEqual(self.tree.transitive_closure(set(['b', 'c'])),
--                         Graph(set(['b', 'c', 'd', 'e', 'f', 'g']), {
--                             ('b', 'd', 'K'),
--                             ('b', 'e', 'K'),
--                             ('c', 'f', 'N'),
--                             ('c', 'g', 'N'),
--                         }))
--
--    def test_transitive_closure_multi_edges(self):
--        "transitive closure of a tree with multiple edges between nodes keeps those edges"
--        self.assertEqual(self.multi_edges.transitive_closure(set(['3'])),
--                         Graph(set(['1', '2', '3']), {
--                             ('2', '1', 'red'),
--                             ('2', '1', 'blue'),
--                             ('3', '1', 'red'),
--                             ('3', '2', 'blue'),
--                             ('3', '2', 'green'),
--                         }))
--
--    def test_transitive_closure_disjoint_edges(self):
--        "transitive closure of a disjoint graph keeps those edges"
--        self.assertEqual(self.disjoint.transitive_closure(set(['3', 'β'])),
--                         Graph(set(['1', '2', '3', 'β', 'γ']), {
--                             ('2', '1', 'red'),
--                             ('3', '1', 'red'),
--                             ('3', '2', 'green'),
--                             ('β', 'γ', 'κόκκινο'),
--                         }))
--
--    def test_transitive_closure_linear(self):
--        "transitive closure of a linear graph includes all nodes in the line"
--        self.assertEqual(self.linear.transitive_closure(set(['1'])), self.linear)
--
--    def test_visit_postorder_empty(self):
--        "postorder visit of an empty graph is empty"
--        self.assertEqual(list(Graph(set(), set()).visit_postorder()), [])
--
--    def assert_postorder(self, seq, all_nodes):
--        seen = set()
--        for e in seq:
--            for l, r, n in self.tree.edges:
--                if l == e:
--                    self.failUnless(r in seen)
--            seen.add(e)
--        self.assertEqual(seen, all_nodes)
--
--    def test_visit_postorder_tree(self):
--        "postorder visit of a tree satisfies invariant"
--        self.assert_postorder(self.tree.visit_postorder(), self.tree.nodes)
--
--    def test_visit_postorder_diamonds(self):
--        "postorder visit of a graph full of diamonds satisfies invariant"
--        self.assert_postorder(self.diamonds.visit_postorder(), self.diamonds.nodes)
--
--    def test_visit_postorder_multi_edges(self):
--        "postorder visit of a graph with duplicate edges satisfies invariant"
--        self.assert_postorder(self.multi_edges.visit_postorder(), self.multi_edges.nodes)
--
--    def test_visit_postorder_disjoint(self):
--        "postorder visit of a disjoint graph satisfies invariant"
--        self.assert_postorder(self.disjoint.visit_postorder(), self.disjoint.nodes)
--
--    def test_links_dict(self):
--        "link dict for a graph with multiple edges is correct"
--        self.assertEqual(self.multi_edges.links_dict(), {
--            '2': set(['1']),
--            '3': set(['1', '2']),
--            '4': set(['3']),
--        })
--
--    def test_named_links_dict(self):
--        "named link dict for a graph with multiple edges is correct"
--        self.assertEqual(self.multi_edges.named_links_dict(), {
--            '2': dict(red='1', blue='1'),
--            '3': dict(red='1', blue='2', green='2'),
--            '4': dict(green='3'),
--        })
--
--    def test_reverse_links_dict(self):
--        "reverse link dict for a graph with multiple edges is correct"
--        self.assertEqual(self.multi_edges.reverse_links_dict(), {
--            '1': set(['2', '3']),
--            '2': set(['3']),
--            '3': set(['4']),
--        })
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_morph.py b/taskcluster/taskgraph/test/test_morph.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_morph.py
-+++ /dev/null
-@@ -1,87 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph import morph
--from taskgraph.graph import Graph
--from taskgraph.taskgraph import TaskGraph
--from taskgraph.task import Task
--
--from mozunit import main
--
--
--class TestIndexTask(unittest.TestCase):
--
--    def test_make_index_tasks(self):
--        task_def = {
--            'routes': [
--                "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.es-MX",
--                "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.fy-NL",
--                "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.sk",
--                "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.sl",
--                "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.uk",
--                "index.gecko.v2.mozilla-central.latest.firefox-l10n.linux64-opt.zh-CN",
--                "index.gecko.v2.mozilla-central.pushdate."
--                "2017.04.04.20170404100210.firefox-l10n.linux64-opt.es-MX",
--                "index.gecko.v2.mozilla-central.pushdate."
--                "2017.04.04.20170404100210.firefox-l10n.linux64-opt.fy-NL",
--                "index.gecko.v2.mozilla-central.pushdate."
--                "2017.04.04.20170404100210.firefox-l10n.linux64-opt.sk",
--                "index.gecko.v2.mozilla-central.pushdate."
--                "2017.04.04.20170404100210.firefox-l10n.linux64-opt.sl",
--                "index.gecko.v2.mozilla-central.pushdate."
--                "2017.04.04.20170404100210.firefox-l10n.linux64-opt.uk",
--                "index.gecko.v2.mozilla-central.pushdate."
--                "2017.04.04.20170404100210.firefox-l10n.linux64-opt.zh-CN",
--                "index.gecko.v2.mozilla-central.revision."
--                "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.es-MX",
--                "index.gecko.v2.mozilla-central.revision."
--                "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.fy-NL",
--                "index.gecko.v2.mozilla-central.revision."
--                "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.sk",
--                "index.gecko.v2.mozilla-central.revision."
--                "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.sl",
--                "index.gecko.v2.mozilla-central.revision."
--                "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.uk",
--                "index.gecko.v2.mozilla-central.revision."
--                "b5d8b27a753725c1de41ffae2e338798f3b5cacd.firefox-l10n.linux64-opt.zh-CN"
--            ],
--            'deadline': 'soon',
--            'metadata': {
--                'description': 'desc',
--                'owner': 'owner@foo.com',
--                'source': 'https://source',
--            },
--        }
--        task = Task(kind='test', label='a', attributes={}, task=task_def)
--        docker_task = Task(kind='docker-image', label='build-docker-image-index-task',
--                           attributes={}, task={})
--        taskgraph, label_to_taskid = self.make_taskgraph({
--            task.label: task,
--            docker_task.label: docker_task,
--        })
--
--        index_task = morph.make_index_task(task, taskgraph, label_to_taskid)
--
--        self.assertEqual(index_task.task['payload']['command'][0], 'insert-indexes.js')
--        self.assertEqual(index_task.task['payload']['env']['TARGET_TASKID'], 'a-tid')
--
--        # check the scope summary
--        self.assertEqual(index_task.task['scopes'],
--                         ['index:insert-task:gecko.v2.mozilla-central.*'])
--
--    def make_taskgraph(self, tasks):
--        label_to_taskid = {k: k + '-tid' for k in tasks}
--        for label, task_id in label_to_taskid.iteritems():
--            tasks[label].task_id = task_id
--        graph = Graph(nodes=set(tasks), edges=set())
--        taskgraph = TaskGraph(tasks, graph)
--        return taskgraph, label_to_taskid
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_optimize.py b/taskcluster/taskgraph/test/test_optimize.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_optimize.py
-+++ /dev/null
-@@ -1,249 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.optimize import optimize_task_graph, resolve_task_references, optimization
--from taskgraph.optimize import annotate_task_graph, get_subgraph
--from taskgraph.taskgraph import TaskGraph
--from taskgraph import graph
--from taskgraph.task import Task
--from mozunit import main
--
--
--class TestResolveTaskReferences(unittest.TestCase):
--
--    def do(self, input, output):
--        taskid_for_edge_name = {'edge%d' % n: 'tid%d' % n for n in range(1, 4)}
--        self.assertEqual(resolve_task_references('subject', input, taskid_for_edge_name), output)
--
--    def test_in_list(self):
--        "resolve_task_references resolves task references in a list"
--        self.do({'in-a-list': ['stuff', {'task-reference': '<edge1>'}]},
--                {'in-a-list': ['stuff', 'tid1']})
--
--    def test_in_dict(self):
--        "resolve_task_references resolves task references in a dict"
--        self.do({'in-a-dict': {'stuff': {'task-reference': '<edge2>'}}},
--                {'in-a-dict': {'stuff': 'tid2'}})
--
--    def test_multiple(self):
--        "resolve_task_references resolves multiple references in the same string"
--        self.do({'multiple': {'task-reference': 'stuff <edge1> stuff <edge2> after'}},
--                {'multiple': 'stuff tid1 stuff tid2 after'})
--
--    def test_embedded(self):
--        "resolve_task_references resolves ebmedded references"
--        self.do({'embedded': {'task-reference': 'stuff before <edge3> stuff after'}},
--                {'embedded': 'stuff before tid3 stuff after'})
--
--    def test_escaping(self):
--        "resolve_task_references resolves escapes in task references"
--        self.do({'escape': {'task-reference': '<<><edge3>>'}},
--                {'escape': '<tid3>'})
--
--    def test_invalid(self):
--        "resolve_task_references raises a KeyError on reference to an invalid task"
--        self.assertRaisesRegexp(
--            KeyError,
--            "task 'subject' has no dependency named 'no-such'",
--            lambda: resolve_task_references('subject', {'task-reference': '<no-such>'}, {})
--        )
--
--
--class TestOptimize(unittest.TestCase):
--
--    kind = None
--
--    @classmethod
--    def setUpClass(cls):
--        # set up some simple optimization functions
--        optimization('no-optimize')(lambda self, params: False)
--        optimization('optimize-away')(lambda self, params: True)
--        optimization('optimize-to-task')(lambda self, params, task: task)
--
--    def make_task(self, label, optimization=None, task_def=None, optimized=None, task_id=None):
--        task_def = task_def or {'sample': 'task-def'}
--        task = Task(kind='test', label=label, attributes={}, task=task_def)
--        task.optimized = optimized
--        if optimization:
--            task.optimizations = [optimization]
--        else:
--            task.optimizations = []
--        task.task_id = task_id
--        return task
--
--    def make_graph(self, *tasks_and_edges):
--        tasks = {t.label: t for t in tasks_and_edges if isinstance(t, Task)}
--        edges = {e for e in tasks_and_edges if not isinstance(e, Task)}
--        return TaskGraph(tasks, graph.Graph(set(tasks), edges))
--
--    def assert_annotations(self, graph, **annotations):
--        def repl(task_id):
--            return 'SLUGID' if task_id and len(task_id) == 22 else task_id
--        got_annotations = {
--            t.label: repl(t.task_id) or t.optimized for t in graph.tasks.itervalues()
--        }
--        self.assertEqual(got_annotations, annotations)
--
--    def test_annotate_task_graph_no_optimize(self):
--        "annotating marks everything as un-optimized if the kind returns that"
--        graph = self.make_graph(
--            self.make_task('task1', ['no-optimize']),
--            self.make_task('task2', ['no-optimize']),
--            self.make_task('task3', ['no-optimize']),
--            ('task2', 'task1', 'build'),
--            ('task2', 'task3', 'image'),
--        )
--        annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
--        self.assert_annotations(
--            graph,
--            task1=False,
--            task2=False,
--            task3=False
--        )
--
--    def test_annotate_task_graph_optimize_away_dependency(self):
--        "raises exception if kind optimizes away a task on which another depends"
--        graph = self.make_graph(
--            self.make_task('task1', ['optimize-away']),
--            self.make_task('task2', ['no-optimize']),
--            ('task2', 'task1', 'build'),
--        )
--        self.assertRaises(
--            Exception,
--            lambda: annotate_task_graph(graph, {}, set(), graph.graph.named_links_dict(), {}, None)
--        )
--
--    def test_annotate_task_graph_do_not_optimize(self):
--        "annotating marks everything as un-optimized if in do_not_optimize"
--        graph = self.make_graph(
--            self.make_task('task1', ['optimize-away']),
--            self.make_task('task2', ['optimize-away']),
--            ('task2', 'task1', 'build'),
--        )
--        label_to_taskid = {}
--        annotate_task_graph(graph, {}, {'task1', 'task2'},
--                            graph.graph.named_links_dict(), label_to_taskid, None)
--        self.assert_annotations(
--            graph,
--            task1=False,
--            task2=False
--        )
--        self.assertEqual
--
--    def test_annotate_task_graph_nos_do_not_propagate(self):
--        "a task with a non-optimized dependency can be optimized"
--        graph = self.make_graph(
--            self.make_task('task1', ['no-optimize']),
--            self.make_task('task2', ['optimize-to-task', 'taskid']),
--            self.make_task('task3', ['optimize-to-task', 'taskid']),
--            ('task2', 'task1', 'build'),
--            ('task2', 'task3', 'image'),
--        )
--        annotate_task_graph(graph, {}, set(),
--                            graph.graph.named_links_dict(), {}, None)
--        self.assert_annotations(
--            graph,
--            task1=False,
--            task2='taskid',
--            task3='taskid'
--        )
--
--    def test_get_subgraph_single_dep(self):
--        "when a single dependency is optimized, it is omitted from the graph"
--        graph = self.make_graph(
--            self.make_task('task1', optimized=True, task_id='dep1'),
--            self.make_task('task2', optimized=False),
--            self.make_task('task3', optimized=False),
--            ('task2', 'task1', 'build'),
--            ('task2', 'task3', 'image'),
--        )
--        label_to_taskid = {'task1': 'dep1'}
--        sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
--        task2 = label_to_taskid['task2']
--        task3 = label_to_taskid['task3']
--        self.assertEqual(sub.graph.nodes, {task2, task3})
--        self.assertEqual(sub.graph.edges, {(task2, task3, 'image')})
--        self.assertEqual(sub.tasks[task2].task_id, task2)
--        self.assertEqual(sorted(sub.tasks[task2].task['dependencies']),
--                         sorted([task3, 'dep1']))
--        self.assertEqual(sub.tasks[task3].task_id, task3)
--        self.assertEqual(sorted(sub.tasks[task3].task['dependencies']), [])
--
--    def test_get_subgraph_dep_chain(self):
--        "when a dependency chain is optimized, it is omitted from the graph"
--        graph = self.make_graph(
--            self.make_task('task1', optimized=True, task_id='dep1'),
--            self.make_task('task2', optimized=True, task_id='dep2'),
--            self.make_task('task3', optimized=False),
--            ('task2', 'task1', 'build'),
--            ('task3', 'task2', 'image'),
--        )
--        label_to_taskid = {'task1': 'dep1', 'task2': 'dep2'}
--        sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
--        task3 = label_to_taskid['task3']
--        self.assertEqual(sub.graph.nodes, {task3})
--        self.assertEqual(sub.graph.edges, set())
--        self.assertEqual(sub.tasks[task3].task_id, task3)
--        self.assertEqual(sorted(sub.tasks[task3].task['dependencies']), ['dep2'])
--
--    def test_get_subgraph_opt_away(self):
--        "when a leaf task is optimized away, it is omitted from the graph"
--        graph = self.make_graph(
--            self.make_task('task1', optimized=False),
--            self.make_task('task2', optimized=True),
--            ('task2', 'task1', 'build'),
--        )
--        label_to_taskid = {'task2': 'dep2'}
--        sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
--        task1 = label_to_taskid['task1']
--        self.assertEqual(sub.graph.nodes, {task1})
--        self.assertEqual(sub.graph.edges, set())
--        self.assertEqual(sub.tasks[task1].task_id, task1)
--        self.assertEqual(sorted(sub.tasks[task1].task['dependencies']), [])
--
--    def test_get_subgraph_refs_resolved(self):
--        "get_subgraph resolves task references"
--        graph = self.make_graph(
--            self.make_task('task1', optimized=True, task_id='dep1'),
--            self.make_task(
--                'task2',
--                optimized=False,
--                task_def={'payload': {'task-reference': 'http://<build>/<test>'}}
--            ),
--            ('task2', 'task1', 'build'),
--            ('task2', 'task3', 'test'),
--            self.make_task('task3', optimized=False),
--        )
--        label_to_taskid = {'task1': 'dep1'}
--        sub = get_subgraph(graph, graph.graph.named_links_dict(), label_to_taskid)
--        task2 = label_to_taskid['task2']
--        task3 = label_to_taskid['task3']
--        self.assertEqual(sub.graph.nodes, {task2, task3})
--        self.assertEqual(sub.graph.edges, {(task2, task3, 'test')})
--        self.assertEqual(sub.tasks[task2].task_id, task2)
--        self.assertEqual(sorted(sub.tasks[task2].task['dependencies']), sorted([task3, 'dep1']))
--        self.assertEqual(sub.tasks[task2].task['payload'], 'http://dep1/' + task3)
--        self.assertEqual(sub.tasks[task3].task_id, task3)
--
--    def test_optimize(self):
--        "optimize_task_graph annotates and extracts the subgraph from a simple graph"
--        input = self.make_graph(
--            self.make_task('task1', ['optimize-to-task', 'dep1']),
--            self.make_task('task2', ['no-optimize']),
--            self.make_task('task3', ['no-optimize']),
--            ('task2', 'task1', 'build'),
--            ('task2', 'task3', 'image'),
--        )
--        opt, label_to_taskid = optimize_task_graph(input, {}, set())
--        self.assertEqual(opt.graph, graph.Graph(
--            {label_to_taskid['task2'], label_to_taskid['task3']},
--            {(label_to_taskid['task2'], label_to_taskid['task3'], 'image')}))
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_parameters.py b/taskcluster/taskgraph/test/test_parameters.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_parameters.py
-+++ /dev/null
-@@ -1,63 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.parameters import Parameters, load_parameters_file, PARAMETER_NAMES
--from mozunit import main, MockedOpen
--
--
--class TestParameters(unittest.TestCase):
--
--    vals = {n: n for n in PARAMETER_NAMES}
--
--    def test_Parameters_immutable(self):
--        p = Parameters(**self.vals)
--
--        def assign():
--            p['head_ref'] = 20
--        self.assertRaises(Exception, assign)
--
--    def test_Parameters_missing_KeyError(self):
--        p = Parameters(**self.vals)
--        self.assertRaises(KeyError, lambda: p['z'])
--
--    def test_Parameters_invalid_KeyError(self):
--        """even if the value is present, if it's not a valid property, raise KeyError"""
--        p = Parameters(xyz=10, **self.vals)
--        self.assertRaises(KeyError, lambda: p['xyz'])
--
--    def test_Parameters_get(self):
--        p = Parameters(head_ref=10, level=20)
--        self.assertEqual(p['head_ref'], 10)
--
--    def test_Parameters_check(self):
--        p = Parameters(**self.vals)
--        p.check()  # should not raise
--
--    def test_Parameters_check_missing(self):
--        p = Parameters()
--        self.assertRaises(Exception, lambda: p.check())
--
--    def test_Parameters_check_extra(self):
--        p = Parameters(xyz=10, **self.vals)
--        self.assertRaises(Exception, lambda: p.check())
--
--    def test_load_parameters_file_yaml(self):
--        with MockedOpen({"params.yml": "some: data\n"}):
--            self.assertEqual(
--                    load_parameters_file('params.yml'),
--                    {'some': 'data'})
--
--    def test_load_parameters_file_json(self):
--        with MockedOpen({"params.json": '{"some": "data"}'}):
--            self.assertEqual(
--                    load_parameters_file('params.json'),
--                    {'some': 'data'})
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_target_tasks.py b/taskcluster/taskgraph/test/test_target_tasks.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_target_tasks.py
-+++ /dev/null
-@@ -1,109 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import unittest
--
--from taskgraph import target_tasks
--from taskgraph import try_option_syntax
--from taskgraph.graph import Graph
--from taskgraph.taskgraph import TaskGraph
--from taskgraph.task import Task
--from mozunit import main
--
--
--class FakeTryOptionSyntax(object):
--
--    def __init__(self, message, task_graph):
--        self.trigger_tests = 0
--        self.talos_trigger_tests = 0
--        self.notifications = None
--        self.env = []
--        self.profile = False
--        self.tag = None
--        self.no_retry = False
--
--    def task_matches(self, task):
--        return 'at-at' in task.attributes
--
--
--class TestTargetTasks(unittest.TestCase):
--
--    def default_matches(self, run_on_projects, project):
--        method = target_tasks.get_method('default')
--        graph = TaskGraph(tasks={
--            'a': Task(kind='build', label='a',
--                      attributes={'run_on_projects': run_on_projects},
--                      task={}),
--        }, graph=Graph(nodes={'a'}, edges=set()))
--        parameters = {'project': project}
--        return 'a' in method(graph, parameters)
--
--    def test_default_all(self):
--        """run_on_projects=[all] includes release, integration, and other projects"""
--        self.assertTrue(self.default_matches(['all'], 'mozilla-central'))
--        self.assertTrue(self.default_matches(['all'], 'mozilla-inbound'))
--        self.assertTrue(self.default_matches(['all'], 'baobab'))
--
--    def test_default_integration(self):
--        """run_on_projects=[integration] includes integration projects"""
--        self.assertFalse(self.default_matches(['integration'], 'mozilla-central'))
--        self.assertTrue(self.default_matches(['integration'], 'mozilla-inbound'))
--        self.assertFalse(self.default_matches(['integration'], 'baobab'))
--
--    def test_default_release(self):
--        """run_on_projects=[release] includes release projects"""
--        self.assertTrue(self.default_matches(['release'], 'mozilla-central'))
--        self.assertFalse(self.default_matches(['release'], 'mozilla-inbound'))
--        self.assertFalse(self.default_matches(['release'], 'baobab'))
--
--    def test_default_nothing(self):
--        """run_on_projects=[] includes nothing"""
--        self.assertFalse(self.default_matches([], 'mozilla-central'))
--        self.assertFalse(self.default_matches([], 'mozilla-inbound'))
--        self.assertFalse(self.default_matches([], 'baobab'))
--
--    def test_try_tasks(self):
--        tasks = {
--            'a': Task(kind=None, label='a', attributes={}, task={}),
--            'b': Task(kind=None, label='b', attributes={'at-at': 'yep'}, task={}),
--            'c': Task(kind=None, label='c', attributes={}, task={}),
--        }
--        graph = Graph(nodes=set('abc'), edges=set())
--        tg = TaskGraph(tasks, graph)
--
--        method = target_tasks.get_method('try_tasks')
--        config = os.path.join(os.getcwd(), 'try_task_config.json')
--
--        orig_TryOptionSyntax = try_option_syntax.TryOptionSyntax
--        try:
--            try_option_syntax.TryOptionSyntax = FakeTryOptionSyntax
--
--            # no try specifier
--            self.assertEqual(method(tg, {'message': ''}), ['b'])
--
--            # try syntax only
--            self.assertEqual(method(tg, {'message': 'try: me'}), ['b'])
--
--            # try task config only
--            with open(config, 'w') as fh:
--                fh.write('["c"]')
--            self.assertEqual(method(tg, {'message': ''}), ['c'])
--
--            with open(config, 'w') as fh:
--                fh.write('{"c": {}}')
--            self.assertEqual(method(tg, {'message': ''}), ['c'])
--
--            # both syntax and config
--            self.assertEqual(set(method(tg, {'message': 'try: me'})), set(['b', 'c']))
--        finally:
--            try_option_syntax.TryOptionSyntax = orig_TryOptionSyntax
--            if os.path.isfile(config):
--                os.remove(config)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_taskgraph.py b/taskcluster/taskgraph/test/test_taskgraph.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_taskgraph.py
-+++ /dev/null
-@@ -1,79 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.graph import Graph
--from taskgraph.task import Task
--from taskgraph.taskgraph import TaskGraph
--from mozunit import main
--
--
--class TestTaskGraph(unittest.TestCase):
--
--    maxDiff = None
--
--    def test_taskgraph_to_json(self):
--        tasks = {
--            'a': Task(kind='test', label='a',
--                      attributes={'attr': 'a-task'},
--                      task={'taskdef': True}),
--            'b': Task(kind='test', label='b',
--                      attributes={},
--                      task={'task': 'def'},
--                      optimizations=[['seta']],
--                      # note that this dep is ignored, superseded by that
--                      # from the taskgraph's edges
--                      dependencies={'first': 'a'}),
--        }
--        graph = Graph(nodes=set('ab'), edges={('a', 'b', 'edgelabel')})
--        taskgraph = TaskGraph(tasks, graph)
--
--        res = taskgraph.to_json()
--
--        self.assertEqual(res, {
--            'a': {
--                'kind': 'test',
--                'label': 'a',
--                'attributes': {'attr': 'a-task', 'kind': 'test'},
--                'task': {'taskdef': True},
--                'dependencies': {'edgelabel': 'b'},
--                'optimizations': [],
--            },
--            'b': {
--                'kind': 'test',
--                'label': 'b',
--                'attributes': {'kind': 'test'},
--                'task': {'task': 'def'},
--                'dependencies': {},
--                'optimizations': [['seta']],
--            }
--        })
--
--    def test_round_trip(self):
--        graph = TaskGraph(tasks={
--            'a': Task(
--                kind='fancy',
--                label='a',
--                attributes={},
--                dependencies={'prereq': 'b'},  # must match edges, below
--                optimizations=[['seta']],
--                task={'task': 'def'}),
--            'b': Task(
--                kind='pre',
--                label='b',
--                attributes={},
--                dependencies={},
--                optimizations=[['seta']],
--                task={'task': 'def2'}),
--        }, graph=Graph(nodes={'a', 'b'}, edges={('a', 'b', 'prereq')}))
--
--        tasks, new_graph = TaskGraph.from_json(graph.to_json())
--        self.assertEqual(graph, new_graph)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_transforms_base.py b/taskcluster/taskgraph/test/test_transforms_base.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_transforms_base.py
-+++ /dev/null
-@@ -1,42 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--from mozunit import main
--from taskgraph.transforms.base import (
--    TransformSequence
--)
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def trans1(config, tests):
--    for test in tests:
--        test['one'] = 1
--        yield test
--
--
--@transforms.add
--def trans2(config, tests):
--    for test in tests:
--        test['two'] = 2
--        yield test
--
--
--class TestTransformSequence(unittest.TestCase):
--
--    def test_sequence(self):
--        tests = [{}, {'two': 1, 'second': True}]
--        res = list(transforms({}, tests))
--        self.assertEqual(res, [
--            {u'two': 2, u'one': 1},
--            {u'second': True, u'two': 2, u'one': 1},
--        ])
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_try_option_syntax.py b/taskcluster/taskgraph/test/test_try_option_syntax.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_try_option_syntax.py
-+++ /dev/null
-@@ -1,329 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.try_option_syntax import TryOptionSyntax
--from taskgraph.try_option_syntax import RIDEALONG_BUILDS
--from taskgraph.graph import Graph
--from taskgraph.taskgraph import TaskGraph
--from taskgraph.task import Task
--from mozunit import main
--
--
--def unittest_task(n, tp, bt='opt'):
--    return (n, Task('test', n, {
--        'unittest_try_name': n,
--        'test_platform': tp.split('/')[0],
--        'build_type': bt,
--    }, {}))
--
--
--def talos_task(n, tp, bt='opt'):
--    return (n, Task('test', n, {
--        'talos_try_name': n,
--        'test_platform': tp.split('/')[0],
--        'build_type': bt,
--    }, {}))
--
--
--tasks = {k: v for k, v in [
--    unittest_task('mochitest-browser-chrome', 'linux/opt'),
--    unittest_task('mochitest-browser-chrome-e10s', 'linux64/opt'),
--    unittest_task('mochitest-chrome', 'linux/debug', 'debug'),
--    unittest_task('mochitest-webgl', 'linux/debug', 'debug'),
--    unittest_task('extra1', 'linux', 'debug/opt'),
--    unittest_task('extra2', 'win32/opt'),
--    unittest_task('crashtest-e10s', 'linux/other'),
--    unittest_task('gtest', 'linux64/asan'),
--    talos_task('dromaeojs', 'linux64/psan'),
--    unittest_task('extra3', 'linux/opt'),
--    unittest_task('extra4', 'linux64/debug', 'debug'),
--    unittest_task('extra5', 'linux/this'),
--    unittest_task('extra6', 'linux/that'),
--    unittest_task('extra7', 'linux/other'),
--    unittest_task('extra8', 'linux64/asan'),
--    talos_task('extra9', 'linux64/psan'),
--]}
--
--for r in RIDEALONG_BUILDS.values():
--    tasks.update({k: v for k, v in [
--        unittest_task(n + '-test', n) for n in r
--    ]})
--
--unittest_tasks = {k: v for k, v in tasks.iteritems()
--                  if 'unittest_try_name' in v.attributes}
--talos_tasks = {k: v for k, v in tasks.iteritems()
--               if 'talos_try_name' in v.attributes}
--graph_with_jobs = TaskGraph(tasks, Graph(set(tasks), set()))
--
--
--class TestTryOptionSyntax(unittest.TestCase):
--
--    def test_empty_message(self):
--        "Given an empty message, it should return an empty value"
--        tos = TryOptionSyntax('', graph_with_jobs)
--        self.assertEqual(tos.build_types, [])
--        self.assertEqual(tos.jobs, [])
--        self.assertEqual(tos.unittests, [])
--        self.assertEqual(tos.talos, [])
--        self.assertEqual(tos.platforms, [])
--        self.assertEqual(tos.trigger_tests, 0)
--        self.assertEqual(tos.talos_trigger_tests, 0)
--        self.assertEqual(tos.env, [])
--        self.assertFalse(tos.profile)
--        self.assertIsNone(tos.tag)
--        self.assertFalse(tos.no_retry)
--
--    def test_message_without_try(self):
--        "Given a non-try message, it should return an empty value"
--        tos = TryOptionSyntax('Bug 1234: frobnicte the foo', graph_with_jobs)
--        self.assertEqual(tos.build_types, [])
--        self.assertEqual(tos.jobs, [])
--        self.assertEqual(tos.unittests, [])
--        self.assertEqual(tos.talos, [])
--        self.assertEqual(tos.platforms, [])
--        self.assertEqual(tos.trigger_tests, 0)
--        self.assertEqual(tos.talos_trigger_tests, 0)
--        self.assertEqual(tos.env, [])
--        self.assertFalse(tos.profile)
--        self.assertIsNone(tos.tag)
--        self.assertFalse(tos.no_retry)
--
--    def test_unknown_args(self):
--        "unknown arguments are ignored"
--        tos = TryOptionSyntax('try: --doubledash -z extra', graph_with_jobs)
--        # equilvant to "try:"..
--        self.assertEqual(tos.build_types, [])
--        self.assertEqual(tos.jobs, None)
--
--    def test_apostrophe_in_message(self):
--        "apostrophe does not break parsing"
--        tos = TryOptionSyntax('Increase spammy log\'s log level. try: -b do', graph_with_jobs)
--        self.assertEqual(sorted(tos.build_types), ['debug', 'opt'])
--
--    def test_b_do(self):
--        "-b do should produce both build_types"
--        tos = TryOptionSyntax('try: -b do', graph_with_jobs)
--        self.assertEqual(sorted(tos.build_types), ['debug', 'opt'])
--
--    def test_b_d(self):
--        "-b d should produce build_types=['debug']"
--        tos = TryOptionSyntax('try: -b d', graph_with_jobs)
--        self.assertEqual(sorted(tos.build_types), ['debug'])
--
--    def test_b_o(self):
--        "-b o should produce build_types=['opt']"
--        tos = TryOptionSyntax('try: -b o', graph_with_jobs)
--        self.assertEqual(sorted(tos.build_types), ['opt'])
--
--    def test_build_o(self):
--        "--build o should produce build_types=['opt']"
--        tos = TryOptionSyntax('try: --build o', graph_with_jobs)
--        self.assertEqual(sorted(tos.build_types), ['opt'])
--
--    def test_b_dx(self):
--        "-b dx should produce build_types=['debug'], silently ignoring the x"
--        tos = TryOptionSyntax('try: -b dx', graph_with_jobs)
--        self.assertEqual(sorted(tos.build_types), ['debug'])
--
--    def test_j_job(self):
--        "-j somejob sets jobs=['somejob']"
--        tos = TryOptionSyntax('try: -j somejob', graph_with_jobs)
--        self.assertEqual(sorted(tos.jobs), ['somejob'])
--
--    def test_j_jobs(self):
--        "-j job1,job2 sets jobs=['job1', 'job2']"
--        tos = TryOptionSyntax('try: -j job1,job2', graph_with_jobs)
--        self.assertEqual(sorted(tos.jobs), ['job1', 'job2'])
--
--    def test_j_all(self):
--        "-j all sets jobs=None"
--        tos = TryOptionSyntax('try: -j all', graph_with_jobs)
--        self.assertEqual(tos.jobs, None)
--
--    def test_j_twice(self):
--        "-j job1 -j job2 sets jobs=job1, job2"
--        tos = TryOptionSyntax('try: -j job1 -j job2', graph_with_jobs)
--        self.assertEqual(sorted(tos.jobs), sorted(['job1', 'job2']))
--
--    def test_p_all(self):
--        "-p all sets platforms=None"
--        tos = TryOptionSyntax('try: -p all', graph_with_jobs)
--        self.assertEqual(tos.platforms, None)
--
--    def test_p_linux(self):
--        "-p linux sets platforms=['linux', 'linux-l10n']"
--        tos = TryOptionSyntax('try: -p linux', graph_with_jobs)
--        self.assertEqual(tos.platforms, ['linux', 'linux-l10n'])
--
--    def test_p_linux_win32(self):
--        "-p linux,win32 sets platforms=['linux', 'linux-l10n', 'win32']"
--        tos = TryOptionSyntax('try: -p linux,win32', graph_with_jobs)
--        self.assertEqual(sorted(tos.platforms), ['linux', 'linux-l10n', 'win32'])
--
--    def test_p_expands_ridealongs(self):
--        "-p linux,linux64 includes the RIDEALONG_BUILDS"
--        tos = TryOptionSyntax('try: -p linux,linux64', graph_with_jobs)
--        platforms = set(['linux'] + RIDEALONG_BUILDS['linux'])
--        platforms |= set(['linux64'] + RIDEALONG_BUILDS['linux64'])
--        self.assertEqual(sorted(tos.platforms), sorted(platforms))
--
--    def test_u_none(self):
--        "-u none sets unittests=[]"
--        tos = TryOptionSyntax('try: -u none', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), [])
--
--    def test_u_all(self):
--        "-u all sets unittests=[..whole list..]"
--        tos = TryOptionSyntax('try: -u all', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([{'test': t} for t in unittest_tasks]))
--
--    def test_u_single(self):
--        "-u mochitest-webgl sets unittests=[mochitest-webgl]"
--        tos = TryOptionSyntax('try: -u mochitest-webgl', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([{'test': 'mochitest-webgl'}]))
--
--    def test_u_alias(self):
--        "-u mochitest-gl sets unittests=[mochitest-webgl]"
--        tos = TryOptionSyntax('try: -u mochitest-gl', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([{'test': 'mochitest-webgl'}]))
--
--    def test_u_multi_alias(self):
--        "-u e10s sets unittests=[all e10s unittests]"
--        tos = TryOptionSyntax('try: -u e10s', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': t} for t in unittest_tasks if 'e10s' in t
--        ]))
--
--    def test_u_commas(self):
--        "-u mochitest-webgl,gtest sets unittests=both"
--        tos = TryOptionSyntax('try: -u mochitest-webgl,gtest', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'mochitest-webgl'},
--            {'test': 'gtest'},
--        ]))
--
--    def test_u_chunks(self):
--        "-u gtest-3,gtest-4 selects the third and fourth chunk of gtest"
--        tos = TryOptionSyntax('try: -u gtest-3,gtest-4', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'gtest', 'only_chunks': set('34')},
--        ]))
--
--    def test_u_platform(self):
--        "-u gtest[linux] selects the linux platform for gtest"
--        tos = TryOptionSyntax('try: -u gtest[linux]', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'gtest', 'platforms': ['linux']},
--        ]))
--
--    def test_u_platforms(self):
--        "-u gtest[linux,win32] selects the linux and win32 platforms for gtest"
--        tos = TryOptionSyntax('try: -u gtest[linux,win32]', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'gtest', 'platforms': ['linux', 'win32']},
--        ]))
--
--    def test_u_platforms_pretty(self):
--        "-u gtest[Ubuntu] selects the linux, linux64 and linux64-asan platforms for gtest"
--        tos = TryOptionSyntax('try: -u gtest[Ubuntu]', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'gtest', 'platforms': ['linux32', 'linux64', 'linux64-asan']},
--        ]))
--
--    def test_u_platforms_negated(self):
--        "-u gtest[-linux] selects all platforms but linux for gtest"
--        tos = TryOptionSyntax('try: -u gtest[-linux]', graph_with_jobs)
--        all_platforms = set([x.attributes['test_platform'] for x in unittest_tasks.values()])
--        self.assertEqual(sorted(tos.unittests[0]['platforms']), sorted(
--            [x for x in all_platforms if x != 'linux']
--        ))
--
--    def test_u_platforms_negated_pretty(self):
--        "-u gtest[Ubuntu,-x64] selects just linux for gtest"
--        tos = TryOptionSyntax('try: -u gtest[Ubuntu,-x64]', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'gtest', 'platforms': ['linux32']},
--        ]))
--
--    def test_u_chunks_platforms(self):
--        "-u gtest-1[linux,win32] selects the linux and win32 platforms for chunk 1 of gtest"
--        tos = TryOptionSyntax('try: -u gtest-1[linux,win32]', graph_with_jobs)
--        self.assertEqual(sorted(tos.unittests), sorted([
--            {'test': 'gtest', 'platforms': ['linux', 'win32'], 'only_chunks': set('1')},
--        ]))
--
--    def test_t_none(self):
--        "-t none sets talos=[]"
--        tos = TryOptionSyntax('try: -t none', graph_with_jobs)
--        self.assertEqual(sorted(tos.talos), [])
--
--    def test_t_all(self):
--        "-t all sets talos=[..whole list..]"
--        tos = TryOptionSyntax('try: -t all', graph_with_jobs)
--        self.assertEqual(sorted(tos.talos), sorted([{'test': t} for t in talos_tasks]))
--
--    def test_t_single(self):
--        "-t mochitest-webgl sets talos=[mochitest-webgl]"
--        tos = TryOptionSyntax('try: -t mochitest-webgl', graph_with_jobs)
--        self.assertEqual(sorted(tos.talos), sorted([{'test': 'mochitest-webgl'}]))
--
--    # -t shares an implementation with -u, so it's not tested heavily
--
--    def test_trigger_tests(self):
--        "--rebuild 10 sets trigger_tests"
--        tos = TryOptionSyntax('try: --rebuild 10', graph_with_jobs)
--        self.assertEqual(tos.trigger_tests, 10)
--
--    def test_talos_trigger_tests(self):
--        "--rebuild-talos 10 sets talos_trigger_tests"
--        tos = TryOptionSyntax('try: --rebuild-talos 10', graph_with_jobs)
--        self.assertEqual(tos.talos_trigger_tests, 10)
--
--    def test_interactive(self):
--        "--interactive sets interactive"
--        tos = TryOptionSyntax('try: --interactive', graph_with_jobs)
--        self.assertEqual(tos.interactive, True)
--
--    def test_all_email(self):
--        "--all-emails sets notifications"
--        tos = TryOptionSyntax('try: --all-emails', graph_with_jobs)
--        self.assertEqual(tos.notifications, 'all')
--
--    def test_fail_email(self):
--        "--failure-emails sets notifications"
--        tos = TryOptionSyntax('try: --failure-emails', graph_with_jobs)
--        self.assertEqual(tos.notifications, 'failure')
--
--    def test_no_email(self):
--        "no email settings don't set notifications"
--        tos = TryOptionSyntax('try:', graph_with_jobs)
--        self.assertEqual(tos.notifications, None)
--
--    def test_setenv(self):
--        "--setenv VAR=value adds a environment variables setting to env"
--        tos = TryOptionSyntax('try: --setenv VAR1=value1 --setenv VAR2=value2', graph_with_jobs)
--        self.assertEqual(tos.env, ['VAR1=value1', 'VAR2=value2'])
--
--    def test_profile(self):
--        "--geckoProfile sets profile to true"
--        tos = TryOptionSyntax('try: --geckoProfile', graph_with_jobs)
--        self.assertTrue(tos.profile)
--
--    def test_tag(self):
--        "--tag TAG sets tag to TAG value"
--        tos = TryOptionSyntax('try: --tag tagName', graph_with_jobs)
--        self.assertEqual(tos.tag, 'tagName')
--
--    def test_no_retry(self):
--        "--no-retry sets no_retry to true"
--        tos = TryOptionSyntax('try: --no-retry', graph_with_jobs)
--        self.assertTrue(tos.no_retry)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_attributes.py b/taskcluster/taskgraph/test/test_util_attributes.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_attributes.py
-+++ /dev/null
-@@ -1,96 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import unittest
--from taskgraph.util.attributes import (
--    attrmatch,
--    match_run_on_projects,
--)
--from mozunit import main
--
--
--class Attrmatch(unittest.TestCase):
--
--    def test_trivial_match(self):
--        """Given no conditions, anything matches"""
--        self.assertTrue(attrmatch({}))
--
--    def test_missing_attribute(self):
--        """If a filtering attribute is not present, no match"""
--        self.assertFalse(attrmatch({}, someattr=10))
--
--    def test_literal_attribute(self):
--        """Literal attributes must match exactly"""
--        self.assertTrue(attrmatch({'att': 10}, att=10))
--        self.assertFalse(attrmatch({'att': 10}, att=20))
--
--    def test_set_attribute(self):
--        """Set attributes require set membership"""
--        self.assertTrue(attrmatch({'att': 10}, att=set([9, 10])))
--        self.assertFalse(attrmatch({'att': 10}, att=set([19, 20])))
--
--    def test_callable_attribute(self):
--        """Callable attributes are called and any False causes the match to fail"""
--        self.assertTrue(attrmatch({'att': 10}, att=lambda val: True))
--        self.assertFalse(attrmatch({'att': 10}, att=lambda val: False))
--
--        def even(val):
--            return val % 2 == 0
--        self.assertTrue(attrmatch({'att': 10}, att=even))
--        self.assertFalse(attrmatch({'att': 11}, att=even))
--
--    def test_all_matches_required(self):
--        """If only one attribute does not match, the result is False"""
--        self.assertFalse(attrmatch({'a': 1}, a=1, b=2, c=3))
--        self.assertFalse(attrmatch({'a': 1, 'b': 2}, a=1, b=2, c=3))
--        self.assertTrue(attrmatch({'a': 1, 'b': 2, 'c': 3}, a=1, b=2, c=3))
--
--
--class MatchRunOnProjects(unittest.TestCase):
--
--    def test_empty(self):
--        self.assertFalse(match_run_on_projects('try', []))
--
--    def test_all(self):
--        self.assertTrue(match_run_on_projects('try', ['all']))
--        self.assertTrue(match_run_on_projects('larch', ['all']))
--        self.assertTrue(match_run_on_projects('autoland', ['all']))
--        self.assertTrue(match_run_on_projects('mozilla-inbound', ['all']))
--        self.assertTrue(match_run_on_projects('mozilla-central', ['all']))
--        self.assertTrue(match_run_on_projects('mozilla-beta', ['all']))
--        self.assertTrue(match_run_on_projects('mozilla-release', ['all']))
--
--    def test_release(self):
--        self.assertFalse(match_run_on_projects('try', ['release']))
--        self.assertFalse(match_run_on_projects('larch', ['release']))
--        self.assertFalse(match_run_on_projects('autoland', ['release']))
--        self.assertFalse(match_run_on_projects('mozilla-inbound', ['release']))
--        self.assertTrue(match_run_on_projects('mozilla-central', ['release']))
--        self.assertTrue(match_run_on_projects('mozilla-beta', ['release']))
--        self.assertTrue(match_run_on_projects('mozilla-release', ['release']))
--
--    def test_integration(self):
--        self.assertFalse(match_run_on_projects('try', ['integration']))
--        self.assertFalse(match_run_on_projects('larch', ['integration']))
--        self.assertTrue(match_run_on_projects('autoland', ['integration']))
--        self.assertTrue(match_run_on_projects('mozilla-inbound', ['integration']))
--        self.assertFalse(match_run_on_projects('mozilla-central', ['integration']))
--        self.assertFalse(match_run_on_projects('mozilla-beta', ['integration']))
--        self.assertFalse(match_run_on_projects('mozilla-integration', ['integration']))
--
--    def test_combo(self):
--        self.assertTrue(match_run_on_projects('try', ['release', 'try', 'date']))
--        self.assertFalse(match_run_on_projects('larch', ['release', 'try', 'date']))
--        self.assertTrue(match_run_on_projects('date', ['release', 'try', 'date']))
--        self.assertFalse(match_run_on_projects('autoland', ['release', 'try', 'date']))
--        self.assertFalse(match_run_on_projects('mozilla-inbound', ['release', 'try', 'date']))
--        self.assertTrue(match_run_on_projects('mozilla-central', ['release', 'try', 'date']))
--        self.assertTrue(match_run_on_projects('mozilla-beta', ['release', 'try', 'date']))
--        self.assertTrue(match_run_on_projects('mozilla-release', ['release', 'try', 'date']))
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_docker.py b/taskcluster/taskgraph/test/test_util_docker.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_docker.py
-+++ /dev/null
-@@ -1,216 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import shutil
--import stat
--import tarfile
--import tempfile
--import unittest
--
--from taskgraph.util import docker
--from mozunit import main, MockedOpen
--
--
--MODE_STANDARD = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH
--
--
--class TestDocker(unittest.TestCase):
--
--    def test_generate_context_hash(self):
--        tmpdir = tempfile.mkdtemp()
--        old_GECKO = docker.GECKO
--        docker.GECKO = tmpdir
--        try:
--            os.makedirs(os.path.join(tmpdir, 'docker', 'my-image'))
--            p = os.path.join(tmpdir, 'docker', 'my-image', 'Dockerfile')
--            with open(p, 'w') as f:
--                f.write("FROM node\nADD a-file\n")
--            os.chmod(p, MODE_STANDARD)
--            p = os.path.join(tmpdir, 'docker', 'my-image', 'a-file')
--            with open(p, 'w') as f:
--                f.write("data\n")
--            os.chmod(p, MODE_STANDARD)
--            self.assertEqual(
--                docker.generate_context_hash(docker.GECKO,
--                                             os.path.join(docker.GECKO, 'docker/my-image'),
--                                             'my-image'),
--                'e61e675ce05e8c11424437db3f1004079374c1a5fe6ad6800346cebe137b0797'
--            )
--        finally:
--            docker.GECKO = old_GECKO
--            shutil.rmtree(tmpdir)
--
--    def test_docker_image_explicit_registry(self):
--        files = {}
--        files["{}/myimage/REGISTRY".format(docker.IMAGE_DIR)] = "cool-images"
--        files["{}/myimage/VERSION".format(docker.IMAGE_DIR)] = "1.2.3"
--        files["{}/myimage/HASH".format(docker.IMAGE_DIR)] = "sha256:434..."
--        with MockedOpen(files):
--            self.assertEqual(docker.docker_image('myimage'), "cool-images/myimage@sha256:434...")
--
--    def test_docker_image_explicit_registry_by_tag(self):
--        files = {}
--        files["{}/myimage/REGISTRY".format(docker.IMAGE_DIR)] = "myreg"
--        files["{}/myimage/VERSION".format(docker.IMAGE_DIR)] = "1.2.3"
--        files["{}/myimage/HASH".format(docker.IMAGE_DIR)] = "sha256:434..."
--        with MockedOpen(files):
--            self.assertEqual(docker.docker_image('myimage', by_tag=True), "myreg/myimage:1.2.3")
--
--    def test_docker_image_default_registry(self):
--        files = {}
--        files["{}/REGISTRY".format(docker.IMAGE_DIR)] = "mozilla"
--        files["{}/myimage/VERSION".format(docker.IMAGE_DIR)] = "1.2.3"
--        files["{}/myimage/HASH".format(docker.IMAGE_DIR)] = "sha256:434..."
--        with MockedOpen(files):
--            self.assertEqual(docker.docker_image('myimage'), "mozilla/myimage@sha256:434...")
--
--    def test_docker_image_default_registry_by_tag(self):
--        files = {}
--        files["{}/REGISTRY".format(docker.IMAGE_DIR)] = "mozilla"
--        files["{}/myimage/VERSION".format(docker.IMAGE_DIR)] = "1.2.3"
--        files["{}/myimage/HASH".format(docker.IMAGE_DIR)] = "sha256:434..."
--        with MockedOpen(files):
--            self.assertEqual(docker.docker_image('myimage', by_tag=True), "mozilla/myimage:1.2.3")
--
--    def test_create_context_tar_basic(self):
--        tmp = tempfile.mkdtemp()
--        try:
--            d = os.path.join(tmp, 'test_image')
--            os.mkdir(d)
--            with open(os.path.join(d, 'Dockerfile'), 'a'):
--                pass
--            os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD)
--
--            with open(os.path.join(d, 'extra'), 'a'):
--                pass
--            os.chmod(os.path.join(d, 'extra'), MODE_STANDARD)
--
--            tp = os.path.join(tmp, 'tar')
--            h = docker.create_context_tar(tmp, d, tp, 'my_image')
--            self.assertEqual(h, '2a6d7f1627eba60daf85402418e041d728827d309143c6bc1c6bb3035bde6717')
--
--            # File prefix should be "my_image"
--            with tarfile.open(tp, 'r:gz') as tf:
--                self.assertEqual(tf.getnames(), [
--                    'my_image/Dockerfile',
--                    'my_image/extra',
--                ])
--        finally:
--            shutil.rmtree(tmp)
--
--    def test_create_context_topsrcdir_files(self):
--        tmp = tempfile.mkdtemp()
--        try:
--            d = os.path.join(tmp, 'test-image')
--            os.mkdir(d)
--            with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
--                fh.write(b'# %include extra/file0\n')
--            os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD)
--
--            extra = os.path.join(tmp, 'extra')
--            os.mkdir(extra)
--            with open(os.path.join(extra, 'file0'), 'a'):
--                pass
--            os.chmod(os.path.join(extra, 'file0'), MODE_STANDARD)
--
--            tp = os.path.join(tmp, 'tar')
--            h = docker.create_context_tar(tmp, d, tp, 'test_image')
--            self.assertEqual(h, '20faeb7c134f21187b142b5fadba94ae58865dc929c6c293d8cbc0a087269338')
--
--            with tarfile.open(tp, 'r:gz') as tf:
--                self.assertEqual(tf.getnames(), [
--                    'test_image/Dockerfile',
--                    'test_image/topsrcdir/extra/file0',
--                ])
--        finally:
--            shutil.rmtree(tmp)
--
--    def test_create_context_absolute_path(self):
--        tmp = tempfile.mkdtemp()
--        try:
--            d = os.path.join(tmp, 'test-image')
--            os.mkdir(d)
--
--            # Absolute paths in %include syntax are not allowed.
--            with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
--                fh.write(b'# %include /etc/shadow\n')
--
--            with self.assertRaisesRegexp(Exception, 'cannot be absolute'):
--                docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test')
--        finally:
--            shutil.rmtree(tmp)
--
--    def test_create_context_outside_topsrcdir(self):
--        tmp = tempfile.mkdtemp()
--        try:
--            d = os.path.join(tmp, 'test-image')
--            os.mkdir(d)
--
--            with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
--                fh.write(b'# %include foo/../../../etc/shadow\n')
--
--            with self.assertRaisesRegexp(Exception, 'path outside topsrcdir'):
--                docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test')
--        finally:
--            shutil.rmtree(tmp)
--
--    def test_create_context_missing_extra(self):
--        tmp = tempfile.mkdtemp()
--        try:
--            d = os.path.join(tmp, 'test-image')
--            os.mkdir(d)
--
--            with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
--                fh.write(b'# %include does/not/exist\n')
--
--            with self.assertRaisesRegexp(Exception, 'path does not exist'):
--                docker.create_context_tar(tmp, d, os.path.join(tmp, 'tar'), 'test')
--        finally:
--            shutil.rmtree(tmp)
--
--    def test_create_context_extra_directory(self):
--        tmp = tempfile.mkdtemp()
--        try:
--            d = os.path.join(tmp, 'test-image')
--            os.mkdir(d)
--
--            with open(os.path.join(d, 'Dockerfile'), 'wb') as fh:
--                fh.write(b'# %include extra\n')
--                fh.write(b'# %include file0\n')
--            os.chmod(os.path.join(d, 'Dockerfile'), MODE_STANDARD)
--
--            extra = os.path.join(tmp, 'extra')
--            os.mkdir(extra)
--            for i in range(3):
--                p = os.path.join(extra, 'file%d' % i)
--                with open(p, 'wb') as fh:
--                    fh.write(b'file%d' % i)
--                os.chmod(p, MODE_STANDARD)
--
--            with open(os.path.join(tmp, 'file0'), 'a'):
--                pass
--            os.chmod(os.path.join(tmp, 'file0'), MODE_STANDARD)
--
--            tp = os.path.join(tmp, 'tar')
--            h = docker.create_context_tar(tmp, d, tp, 'my_image')
--
--            self.assertEqual(h, 'e5440513ab46ae4c1d056269e1c6715d5da7d4bd673719d360411e35e5b87205')
--
--            with tarfile.open(tp, 'r:gz') as tf:
--                self.assertEqual(tf.getnames(), [
--                    'my_image/Dockerfile',
--                    'my_image/topsrcdir/extra/file0',
--                    'my_image/topsrcdir/extra/file1',
--                    'my_image/topsrcdir/extra/file2',
--                    'my_image/topsrcdir/file0',
--                ])
--        finally:
--            shutil.rmtree(tmp)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_parameterization.py b/taskcluster/taskgraph/test/test_util_parameterization.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_parameterization.py
-+++ /dev/null
-@@ -1,70 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--import datetime
--
--from mozunit import main
--from taskgraph.util.parameterization import (
--    resolve_timestamps,
--    resolve_task_references,
--)
--
--
--class TestTimestamps(unittest.TestCase):
--
--    def test_no_change(self):
--        now = datetime.datetime(2018, 01, 01)
--        input = {
--            "key": "value",
--            "numeric": 10,
--            "list": ["a", True, False, None],
--        }
--        self.assertEqual(resolve_timestamps(now, input), input)
--
--    def test_buried_replacement(self):
--        now = datetime.datetime(2018, 01, 01)
--        input = {"key": [{"key2": [{'relative-datestamp': '1 day'}]}]}
--        self.assertEqual(resolve_timestamps(now, input),
--                         {"key": [{"key2": ['2018-01-02T00:00:00Z']}]})
--
--    def test_appears_with_other_keys(self):
--        now = datetime.datetime(2018, 01, 01)
--        input = [{'relative-datestamp': '1 day', 'another-key': True}]
--        self.assertEqual(resolve_timestamps(now, input),
--                         [{'relative-datestamp': '1 day', 'another-key': True}])
--
--
--class TestTaskRefs(unittest.TestCase):
--
--    def test_no_change(self):
--        input = {"key": "value", "numeric": 10, "list": ["a", True, False, None]}
--        self.assertEqual(resolve_task_references('lable', input, {}), input)
--
--    def test_buried_replacement(self):
--        input = {"key": [{"key2": [{'task-reference': 'taskid=<toolchain>'}]}]}
--        self.assertEqual(resolve_task_references('lable', input, {'toolchain': 'abcd'}),
--                         {u'key': [{u'key2': [u'taskid=abcd']}]})
--
--    def test_appears_with_other_keys(self):
--        input = [{'task-reference': '<toolchain>', 'another-key': True}]
--        self.assertEqual(resolve_task_references('lable', input, {'toolchain': 'abcd'}),
--                         [{'task-reference': '<toolchain>', 'another-key': True}])
--
--    def test_multiple_subs(self):
--        input = [{'task-reference': 'toolchain=<toolchain>, build=<build>'}]
--        self.assertEqual(
--            resolve_task_references('lable', input, {'toolchain': 'abcd', 'build': 'def'}),
--            ['toolchain=abcd, build=def'])
--
--    def test_escaped(self):
--        input = [{'task-reference': '<<><toolchain>>'}]
--        self.assertEqual(resolve_task_references('lable', input, {'toolchain': 'abcd'}),
--                         ['<abcd>'])
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_python_path.py b/taskcluster/taskgraph/test/test_util_python_path.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_python_path.py
-+++ /dev/null
-@@ -1,37 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--from taskgraph.util import python_path
--from mozunit import main
--
--
--class TestObject(object):
--
--    testClassProperty = object()
--
--
--class TestPythonPath(unittest.TestCase):
--
--    def test_find_object_no_such_module(self):
--        """find_object raises ImportError for a nonexistent module"""
--        self.assertRaises(ImportError, python_path.find_object, "no_such_module:someobj")
--
--    def test_find_object_no_such_object(self):
--        """find_object raises AttributeError for a nonexistent object"""
--        self.assertRaises(AttributeError, python_path.find_object,
--                          "taskgraph.test.test_util_python_path:NoSuchObject")
--
--    def test_find_object_exists(self):
--        """find_object finds an existing object"""
--        from taskgraph.test.test_util_python_path import TestObject
--        obj = python_path.find_object(
--            "taskgraph.test.test_util_python_path:TestObject.testClassProperty")
--        self.assertIs(obj, TestObject.testClassProperty)
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_schema.py b/taskcluster/taskgraph/test/test_util_schema.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_schema.py
-+++ /dev/null
-@@ -1,140 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--from mozunit import main
--from taskgraph.util.schema import (
--    validate_schema,
--    resolve_keyed_by,
--)
--from voluptuous import Schema
--
--schema = Schema({
--    'x': int,
--    'y': basestring,
--})
--
--
--class TestValidateSchema(unittest.TestCase):
--
--    def test_valid(self):
--        validate_schema(schema, {'x': 10, 'y': 'foo'}, "pfx")
--
--    def test_invalid(self):
--        try:
--            validate_schema(schema, {'x': 'not-int'}, "pfx")
--            self.fail("no exception raised")
--        except Exception, e:
--            self.failUnless(str(e).startswith("pfx\n"))
--
--
--class TestResolveKeyedBy(unittest.TestCase):
--
--    def test_no_by(self):
--        self.assertEqual(
--            resolve_keyed_by({'x': 10}, 'z', 'n'),
--            {'x': 10})
--
--    def test_no_by_dotted(self):
--        self.assertEqual(
--            resolve_keyed_by({'x': {'y': 10}}, 'x.z', 'n'),
--            {'x': {'y': 10}})
--
--    def test_no_by_not_dict(self):
--        self.assertEqual(
--            resolve_keyed_by({'x': 10}, 'x.y', 'n'),
--            {'x': 10})
--
--    def test_no_by_not_by(self):
--        self.assertEqual(
--            resolve_keyed_by({'x': {'a': 10}}, 'x', 'n'),
--            {'x': {'a': 10}})
--
--    def test_nested(self):
--        x = {
--            'by-foo': {
--                'F1': {
--                    'by-bar': {
--                        'B1': 11,
--                        'B2': 12,
--                    },
--                },
--                'F2': 20,
--                'default': 0,
--            },
--        }
--        self.assertEqual(
--            resolve_keyed_by({'x': x}, 'x', 'x', foo='F1', bar='B1'),
--            {'x': 11})
--        self.assertEqual(
--            resolve_keyed_by({'x': x}, 'x', 'x', foo='F1', bar='B2'),
--            {'x': 12})
--        self.assertEqual(
--            resolve_keyed_by({'x': x}, 'x', 'x', foo='F2'),
--            {'x': 20})
--        self.assertEqual(
--            resolve_keyed_by({'x': x}, 'x', 'x', foo='F99', bar='B1'),
--            {'x': 0})
--
--    def test_no_by_empty_dict(self):
--        self.assertEqual(
--            resolve_keyed_by({'x': {}}, 'x', 'n'),
--            {'x': {}})
--
--    def test_no_by_not_only_by(self):
--        self.assertEqual(
--            resolve_keyed_by({'x': {'by-y': True, 'a': 10}}, 'x', 'n'),
--            {'x': {'by-y': True, 'a': 10}})
--
--    def test_match_nested_exact(self):
--        self.assertEqual(
--            resolve_keyed_by(
--                {'f': 'shoes', 'x': {'y': {'by-f': {'shoes': 'feet', 'gloves': 'hands'}}}},
--                'x.y', 'n'),
--            {'f': 'shoes', 'x': {'y': 'feet'}})
--
--    def test_match_regexp(self):
--        self.assertEqual(
--            resolve_keyed_by(
--                {'f': 'shoes', 'x': {'by-f': {'s?[hH]oes?': 'feet', 'gloves': 'hands'}}},
--                'x', 'n'),
--            {'f': 'shoes', 'x': 'feet'})
--
--    def test_match_partial_regexp(self):
--        self.assertEqual(
--            resolve_keyed_by(
--                {'f': 'shoes', 'x': {'by-f': {'sh': 'feet', 'default': 'hands'}}},
--                'x', 'n'),
--            {'f': 'shoes', 'x': 'hands'})
--
--    def test_match_default(self):
--        self.assertEqual(
--            resolve_keyed_by(
--                {'f': 'shoes', 'x': {'by-f': {'hat': 'head', 'default': 'anywhere'}}},
--                'x', 'n'),
--            {'f': 'shoes', 'x': 'anywhere'})
--
--    def test_match_extra_value(self):
--        self.assertEqual(
--            resolve_keyed_by(
--                {'f': {'by-foo': {'x': 10, 'y': 20}}},
--                'f', 'n',
--                foo='y'),
--            {'f': 20})
--
--    def test_no_match(self):
--        self.assertRaises(
--            Exception, resolve_keyed_by,
--            {'f': 'shoes', 'x': {'by-f': {'hat': 'head'}}}, 'x', 'n')
--
--    def test_multiple_matches(self):
--        self.assertRaises(
--            Exception, resolve_keyed_by,
--            {'f': 'hats', 'x': {'by-f': {'hat.*': 'head', 'ha.*': 'hair'}}}, 'x', 'n')
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_templates.py b/taskcluster/taskgraph/test/test_util_templates.py
-deleted file mode 100755
---- a/taskcluster/taskgraph/test/test_util_templates.py
-+++ /dev/null
-@@ -1,233 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--import mozunit
--import textwrap
--from taskgraph.util.templates import (
--    merge_to,
--    merge,
--    Templates,
--    TemplatesException
--)
--
--files = {}
--files['/fixtures/circular.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: 'circular_ref.yml'
--      variables:
--        woot: 'inherit'
--    """)
--
--files['/fixtures/inherit.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: 'templates.yml'
--      variables:
--        woot: 'inherit'
--    """)
--
--files['/fixtures/extend_child.yml'] = textwrap.dedent("""\
--    list: ['1', '2', '3']
--    was_list: ['1']
--    obj:
--      level: 1
--      deeper:
--        woot: 'bar'
--        list: ['baz']
--    """)
--
--files['/fixtures/circular_ref.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: 'circular.yml'
--    """)
--
--files['/fixtures/child_pass.yml'] = textwrap.dedent("""\
--    values:
--      - {{a}}
--      - {{b}}
--      - {{c}}
--    """)
--
--files['/fixtures/inherit_pass.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: 'child_pass.yml'
--      variables:
--        a: 'a'
--        b: 'b'
--        c: 'c'
--    """)
--
--files['/fixtures/deep/2.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: deep/1.yml
--
--    """)
--
--files['/fixtures/deep/3.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: deep/2.yml
--
--    """)
--
--files['/fixtures/deep/4.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: deep/3.yml
--    """)
--
--files['/fixtures/deep/1.yml'] = textwrap.dedent("""\
--    variable: {{value}}
--    """)
--
--files['/fixtures/simple.yml'] = textwrap.dedent("""\
--    is_simple: true
--    """)
--
--files['/fixtures/templates.yml'] = textwrap.dedent("""\
--    content: 'content'
--    variable: '{{woot}}'
--    """)
--
--files['/fixtures/extend_parent.yml'] = textwrap.dedent("""\
--    $inherits:
--      from: 'extend_child.yml'
--
--    list: ['4']
--    was_list:
--      replaced: true
--    obj:
--      level: 2
--      from_parent: true
--      deeper:
--        list: ['bar']
--    """)
--
--
--class TemplatesTest(unittest.TestCase):
--
--    def setUp(self):
--        self.mocked_open = mozunit.MockedOpen(files)
--        self.mocked_open.__enter__()
--        self.subject = Templates('/fixtures')
--
--    def tearDown(self):
--        self.mocked_open.__exit__(None, None, None)
--
--    def test_invalid_path(self):
--        with self.assertRaisesRegexp(TemplatesException, 'must be a directory'):
--            Templates('/zomg/not/a/dir')
--
--    def test_no_templates(self):
--        content = self.subject.load('simple.yml', {})
--        self.assertEquals(content, {
--            'is_simple': True
--        })
--
--    def test_with_templates(self):
--        content = self.subject.load('templates.yml', {
--            'woot': 'bar'
--        })
--
--        self.assertEquals(content, {
--            'content': 'content',
--            'variable': 'bar'
--        })
--
--    def test_inheritance(self):
--        '''
--        The simple single pass inheritance case.
--        '''
--        content = self.subject.load('inherit.yml', {})
--        self.assertEqual(content, {
--            'content': 'content',
--            'variable': 'inherit'
--        })
--
--    def test_inheritance_implicat_pass(self):
--        '''
--        Implicitly pass parameters from the child to the ancestor.
--        '''
--        content = self.subject.load('inherit_pass.yml', {
--            'a': 'overriden'
--        })
--
--        self.assertEqual(content, {'values': ['overriden', 'b', 'c']})
--
--    def test_inheritance_circular(self):
--        '''
--        Circular reference handling.
--        '''
--        with self.assertRaisesRegexp(TemplatesException, 'circular'):
--            self.subject.load('circular.yml', {})
--
--    def test_deep_inheritance(self):
--        content = self.subject.load('deep/4.yml', {
--            'value': 'myvalue'
--        })
--        self.assertEqual(content, {'variable': 'myvalue'})
--
--    def test_inheritance_with_simple_extensions(self):
--        content = self.subject.load('extend_parent.yml', {})
--        self.assertEquals(content, {
--            'list': ['1', '2', '3', '4'],
--            'obj': {
--                'from_parent': True,
--                'deeper': {
--                    'woot': 'bar',
--                    'list': ['baz', 'bar']
--                },
--                'level': 2,
--            },
--            'was_list': {'replaced': True}
--        })
--
--
--class MergeTest(unittest.TestCase):
--
--    def test_merge_to_dicts(self):
--        source = {'a': 1, 'b': 2}
--        dest = {'b': '20', 'c': 30}
--        expected = {
--            'a': 1,   # source only
--            'b': 2,   # source overrides dest
--            'c': 30,  # dest only
--        }
--        self.assertEqual(merge_to(source, dest), expected)
--        self.assertEqual(dest, expected)
--
--    def test_merge_to_lists(self):
--        source = {'x': [3, 4]}
--        dest = {'x': [1, 2]}
--        expected = {'x': [1, 2, 3, 4]}  # dest first
--        self.assertEqual(merge_to(source, dest), expected)
--        self.assertEqual(dest, expected)
--
--    def test_merge_diff_types(self):
--        source = {'x': [1, 2]}
--        dest = {'x': 'abc'}
--        expected = {'x': [1, 2]}  # source wins
--        self.assertEqual(merge_to(source, dest), expected)
--        self.assertEqual(dest, expected)
--
--    def test_merge(self):
--        first = {'a': 1, 'b': 2, 'd': 11}
--        second = {'b': 20, 'c': 30}
--        third = {'c': 300, 'd': 400}
--        expected = {
--            'a': 1,
--            'b': 20,
--            'c': 300,
--            'd': 400,
--        }
--        self.assertEqual(merge(first, second, third), expected)
--
--        # inputs haven't changed..
--        self.assertEqual(first, {'a': 1, 'b': 2, 'd': 11})
--        self.assertEqual(second, {'b': 20, 'c': 30})
--        self.assertEqual(third, {'c': 300, 'd': 400})
--
--
--if __name__ == '__main__':
--    mozunit.main()
-diff --git a/taskcluster/taskgraph/test/test_util_time.py b/taskcluster/taskgraph/test/test_util_time.py
-deleted file mode 100755
---- a/taskcluster/taskgraph/test/test_util_time.py
-+++ /dev/null
-@@ -1,58 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import unittest
--import mozunit
--from datetime import datetime
--from taskgraph.util.time import (
--    InvalidString,
--    UnknownTimeMeasurement,
--    value_of,
--    json_time_from_now
--)
--
--
--class FromNowTest(unittest.TestCase):
--
--    def test_invalid_str(self):
--        with self.assertRaises(InvalidString):
--            value_of('wtfs')
--
--    def test_missing_unit(self):
--        with self.assertRaises(InvalidString):
--            value_of('1')
--
--    def test_missing_unknown_unit(self):
--        with self.assertRaises(UnknownTimeMeasurement):
--            value_of('1z')
--
--    def test_value_of(self):
--        self.assertEqual(value_of('1s').total_seconds(), 1)
--        self.assertEqual(value_of('1 second').total_seconds(), 1)
--        self.assertEqual(value_of('1min').total_seconds(), 60)
--        self.assertEqual(value_of('1h').total_seconds(), 3600)
--        self.assertEqual(value_of('1d').total_seconds(), 86400)
--        self.assertEqual(value_of('1mo').total_seconds(), 2592000)
--        self.assertEqual(value_of('1 month').total_seconds(), 2592000)
--        self.assertEqual(value_of('1y').total_seconds(), 31536000)
--
--        with self.assertRaises(UnknownTimeMeasurement):
--            value_of('1m').total_seconds()  # ambiguous between minute and month
--
--    def test_json_from_now_utc_now(self):
--        # Just here to ensure we don't raise.
--        json_time_from_now('1 years')
--
--    def test_json_from_now(self):
--        now = datetime(2014, 1, 1)
--        self.assertEqual(json_time_from_now('1 years', now),
--                         '2015-01-01T00:00:00Z')
--        self.assertEqual(json_time_from_now('6 days', now),
--                         '2014-01-07T00:00:00Z')
--
--
--if __name__ == '__main__':
--    mozunit.main()
-diff --git a/taskcluster/taskgraph/test/test_util_treeherder.py b/taskcluster/taskgraph/test/test_util_treeherder.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_treeherder.py
-+++ /dev/null
-@@ -1,28 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--from taskgraph.util.treeherder import split_symbol, join_symbol
--from mozunit import main
--
--
--class TestSymbols(unittest.TestCase):
--
--    def test_split_no_group(self):
--        self.assertEqual(split_symbol('xy'), ('?', 'xy'))
--
--    def test_split_with_group(self):
--        self.assertEqual(split_symbol('ab(xy)'), ('ab', 'xy'))
--
--    def test_join_no_group(self):
--        self.assertEqual(join_symbol('?', 'xy'), 'xy')
--
--    def test_join_with_group(self):
--        self.assertEqual(join_symbol('ab', 'xy'), 'ab(xy)')
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/test/test_util_yaml.py b/taskcluster/taskgraph/test/test_util_yaml.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/test/test_util_yaml.py
-+++ /dev/null
-@@ -1,27 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import unittest
--
--from taskgraph.util import yaml
--from mozunit import main, MockedOpen
--
--FOO_YML = """\
--prop:
--    - val1
--"""
--
--
--class TestYaml(unittest.TestCase):
--
--    def test_load(self):
--        with MockedOpen({'/dir1/dir2/foo.yml': FOO_YML}):
--            self.assertEqual(yaml.load_yaml("/dir1/dir2", "foo.yml"),
--                             {'prop': ['val1']})
--
--
--if __name__ == '__main__':
--    main()
-diff --git a/taskcluster/taskgraph/transforms/__init__.py b/taskcluster/taskgraph/transforms/__init__.py
-deleted file mode 100644
-diff --git a/taskcluster/taskgraph/transforms/android_stuff.py b/taskcluster/taskgraph/transforms/android_stuff.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/android_stuff.py
-+++ /dev/null
-@@ -1,52 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Set dynamic task description properties of the android stuff.  Temporary!
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.transforms.job.common import SECRET_SCOPE
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def setup_task(config, tasks):
--    for task in tasks:
--        task['label'] = task['name']
--        env = task['worker'].setdefault('env', {})
--        env.update({
--            'GECKO_BASE_REPOSITORY': config.params['base_repository'],
--            'GECKO_HEAD_REF': config.params['head_rev'],
--            'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
--            'GECKO_HEAD_REV': config.params['head_rev'],
--            'MOZ_BUILD_DATE': config.params['moz_build_date'],
--            'MOZ_SCM_LEVEL': config.params['level'],
--            'MH_BRANCH': config.params['project'],
--        })
--
--        task['worker'].setdefault('caches', []).append({
--            'type': 'persistent',
--            'name': 'level-{}-{}-tc-vcs'.format(
--                config.params['level'], config.params['project']),
--            'mount-point': "/home/worker/.tc-vcs",
--        })
--
--        if int(config.params['level']) > 1:
--            task['worker'].setdefault('caches', []).append({
--                'type': 'persistent',
--                'name': 'level-{}-{}-build-{}-workspace'.format(
--                    config.params['level'], config.params['project'], task['name']),
--                'mount-point': "/home/worker/workspace",
--            })
--
--        # Need appropriate scopes for secrets, from the 'build' section
--        task['worker']['taskcluster-proxy'] = True
--        task['scopes'].append(SECRET_SCOPE.format(
--            'build', config.params['level'], '*'))
--
--        del task['name']
--        yield task
-diff --git a/taskcluster/taskgraph/transforms/balrog.py b/taskcluster/taskgraph/transforms/balrog.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/balrog.py
-+++ /dev/null
-@@ -1,103 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the beetmover task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import (get_balrog_server_scope,
--                                         get_balrog_channel_scopes)
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--balrog_description_schema = Schema({
--    # the dependent task (object) for this balrog job, used to inform balrogworker.
--    Required('dependent-task'): object,
--
--    # unique label to describe this balrog task, defaults to balrog-{dep.label}
--    Optional('label'): basestring,
--
--    # treeherder is allowed here to override any defaults we use for beetmover.  See
--    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
--    # below transforms for defaults of various values.
--    Optional('treeherder'): task_description_schema['treeherder'],
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            balrog_description_schema, job,
--            "In balrog ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc-Up(N)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform',
--                              "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--
--        if dep_job.attributes.get('locale'):
--            treeherder['symbol'] = 'tc-Up({})'.format(dep_job.attributes.get('locale'))
--            attributes['locale'] = dep_job.attributes.get('locale')
--
--        label = job.get('label', "balrog-{}".format(dep_job.label))
--
--        upstream_artifacts = [{
--            "taskId": {"task-reference": "<beetmover>"},
--            "taskType": "beetmover",
--            "paths": [
--                "public/manifest.json"
--            ],
--        }]
--
--        server_scope = get_balrog_server_scope(config)
--        channel_scopes = get_balrog_channel_scopes(config)
--
--        task = {
--            'label': label,
--            'description': "{} Balrog".format(
--                dep_job.task["metadata"]["description"]),
--            # do we have to define worker type somewhere?
--            'worker-type': 'scriptworker-prov-v1/balrogworker-v1',
--            'worker': {
--                'implementation': 'balrog',
--                'upstream-artifacts': upstream_artifacts,
--            },
--            'scopes': [server_scope] + channel_scopes,
--            'dependencies': {'beetmover': dep_job.label},
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--        }
--
--        yield task
-diff --git a/taskcluster/taskgraph/transforms/base.py b/taskcluster/taskgraph/transforms/base.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/base.py
-+++ /dev/null
-@@ -1,60 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--
--class TransformConfig(object):
--    """A container for configuration affecting transforms.  The `config`
--    argument to transforms is an instance of this class, possibly with
--    additional kind-specific attributes beyond those set here."""
--    def __init__(self, kind, path, config, params,
--                 kind_dependencies_tasks=None):
--        # the name of the current kind
--        self.kind = kind
--
--        # the path to the kind configuration directory
--        self.path = path
--
--        # the parsed contents of kind.yml
--        self.config = config
--
--        # the parameters for this task-graph generation run
--        self.params = params
--
--        # a list of all the tasks associated with the kind dependencies of the
--        # current kind
--        self.kind_dependencies_tasks = kind_dependencies_tasks
--
--
--class TransformSequence(object):
--    """
--    Container for a sequence of transforms.  Each transform is represented as a
--    callable taking (config, items) and returning a generator which will yield
--    transformed items.  The resulting sequence has the same interface.
--
--    This is convenient to use in a file full of transforms, as it provides a
--    decorator, @transforms.add, that will add the decorated function to the
--    sequence.
--    """
--
--    def __init__(self, transforms=None):
--        self.transforms = transforms or []
--
--    def __call__(self, config, items):
--        for xform in self.transforms:
--            items = xform(config, items)
--            if items is None:
--                raise Exception("Transform {} is not a generator".format(xform))
--        return items
--
--    def __repr__(self):
--        return '\n'.join(
--            ['TransformSequence(['] +
--            [repr(x) for x in self.transforms] +
--            ['])'])
--
--    def add(self, func):
--        self.transforms.append(func)
--        return func
-diff --git a/taskcluster/taskgraph/transforms/beetmover.py b/taskcluster/taskgraph/transforms/beetmover.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/beetmover.py
-+++ /dev/null
-@@ -1,425 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the beetmover task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
--                                         get_beetmover_action_scope)
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US = [
--    "balrog_props.json",
--    "target.common.tests.zip",
--    "target.cppunittest.tests.zip",
--    "target.crashreporter-symbols.zip",
--    "target.json",
--    "target.mochitest.tests.zip",
--    "target.mozinfo.json",
--    "target.reftest.tests.zip",
--    "target.talos.tests.zip",
--    "target.awsy.tests.zip",
--    "target.test_packages.json",
--    "target.txt",
--    "target.web-platform.tests.tar.gz",
--    "target.xpcshell.tests.zip",
--    "target_info.txt",
--    "target.jsshell.zip",
--    "mozharness.zip",
--    "target.langpack.xpi",
--]
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US = [
--    "update/target.complete.mar",
--]
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N = [
--    "target.langpack.xpi",
--    "balrog_props.json",
--]
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_L10N = [
--    "target.complete.mar",
--]
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US = [
--    "en-US/target.common.tests.zip",
--    "en-US/target.cppunittest.tests.zip",
--    "en-US/target.crashreporter-symbols.zip",
--    "en-US/target.json",
--    "en-US/target.mochitest.tests.zip",
--    "en-US/target.mozinfo.json",
--    "en-US/target.reftest.tests.zip",
--    "en-US/target.talos.tests.zip",
--    "en-US/target.awsy.tests.zip",
--    "en-US/target.test_packages.json",
--    "en-US/target.txt",
--    "en-US/target.web-platform.tests.tar.gz",
--    "en-US/target.xpcshell.tests.zip",
--    "en-US/target_info.txt",
--    "en-US/bouncer.apk",
--    "en-US/mozharness.zip",
--    "en-US/robocop.apk",
--    "en-US/target.jsshell.zip",
--]
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_MULTI = [
--    "balrog_props.json",
--    "target.common.tests.zip",
--    "target.cppunittest.tests.zip",
--    "target.json",
--    "target.mochitest.tests.zip",
--    "target.mozinfo.json",
--    "target.reftest.tests.zip",
--    "target.talos.tests.zip",
--    "target.awsy.tests.zip",
--    "target.test_packages.json",
--    "target.txt",
--    "target.web-platform.tests.tar.gz",
--    "target.xpcshell.tests.zip",
--    "target_info.txt",
--    "bouncer.apk",
--    "mozharness.zip",
--    "robocop.apk",
--    "target.jsshell.zip",
--]
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_MOBILE_UPSTREAM_ARTIFACTS_SIGNED_EN_US = [
--    "en-US/target.apk",
--]
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_MOBILE_UPSTREAM_ARTIFACTS_SIGNED_MULTI = [
--    "target.apk",
--]
--
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--UPSTREAM_ARTIFACT_UNSIGNED_PATHS = {
--    'linux64-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar",
--        "host/bin/mbsdiff",
--    ],
--    'linux-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar",
--        "host/bin/mbsdiff",
--    ],
--    'linux64-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar",
--        "host/bin/mbsdiff",
--    ],
--    'linux-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar",
--        "host/bin/mbsdiff",
--    ],
--    'android-x86-nightly': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US,
--    'android-aarch64-nightly': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US,
--    'android-api-16-nightly': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US,
--    'android-x86-old-id-nightly': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US,
--    'android-api-16-old-id-nightly': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US,
--    'macosx64-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar",
--        "host/bin/mbsdiff",
--    ],
--    'macosx64-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar",
--        "host/bin/mbsdiff",
--    ],
--    'win32-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar.exe",
--        "host/bin/mbsdiff.exe",
--    ],
--    'win32-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar.exe",
--        "host/bin/mbsdiff.exe",
--    ],
--    'win64-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar.exe",
--        "host/bin/mbsdiff.exe",
--    ],
--    'win64-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar.exe",
--        "host/bin/mbsdiff.exe",
--    ],
--    'linux64-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N,
--    'linux-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N,
--    'android-x86-nightly-multi': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_MULTI,
--    'android-x86-old-id-nightly-multi': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_MULTI,
--    'android-aarch64-nightly-multi': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_MULTI,
--    'android-api-16-nightly-l10n': ["balrog_props.json"],
--    'android-api-16-nightly-multi': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_MULTI,
--    'android-api-16-old-id-nightly-multi': _MOBILE_UPSTREAM_ARTIFACTS_UNSIGNED_MULTI,
--    'macosx64-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N,
--    'win32-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N,
--    'win64-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N,
--}
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--UPSTREAM_ARTIFACT_SIGNED_PATHS = {
--    'linux64-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.tar.bz2",
--        "target.tar.bz2.asc",
--    ],
--    'linux-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.tar.bz2",
--        "target.tar.bz2.asc",
--    ],
--    'linux64-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.tar.bz2",
--        "target.tar.bz2.asc",
--    ],
--    'linux-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.tar.bz2",
--        "target.tar.bz2.asc",
--    ],
--    'android-x86-nightly': ["en-US/target.apk"],
--    'android-aarch64-nightly': ["en-US/target.apk"],
--    'android-api-16-nightly': ["en-US/target.apk"],
--    'android-x86-old-id-nightly': ["en-US/target.apk"],
--    'android-api-16-old-id-nightly': ["en-US/target.apk"],
--    'macosx64-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.dmg",
--        "target.dmg.asc",
--    ],
--    'macosx64-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.dmg",
--        "target.dmg.asc",
--    ],
--    'win32-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.zip",
--    ],
--    'win32-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.zip",
--    ],
--    'win64-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.zip",
--    ],
--    'win64-devedition-nightly': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_EN_US + [
--        "target.zip",
--    ],
--    'linux64-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_L10N + [
--        "target.tar.bz2",
--        "target.tar.bz2.asc",
--    ],
--    'linux-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_L10N + [
--        "target.tar.bz2",
--        "target.tar.bz2.asc",
--    ],
--    'android-x86-nightly-multi': ["target.apk"],
--    'android-x86-old-id-nightly-multi': ["target.apk"],
--    'android-aarch64-nightly-multi': ["target.apk"],
--    'android-api-16-nightly-l10n': ["target.apk"],
--    'android-api-16-nightly-multi': ["target.apk"],
--    'android-api-16-old-id-nightly-multi': ["target.apk"],
--    'macosx64-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_L10N + [
--        "target.dmg",
--        "target.dmg.asc",
--    ],
--    'win32-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_L10N + [
--        "target.zip",
--    ],
--    'win64-nightly-l10n': _DESKTOP_UPSTREAM_ARTIFACTS_SIGNED_L10N + [
--        "target.zip",
--    ],
--
--}
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--beetmover_description_schema = Schema({
--    # the dependent task (object) for this beetmover job, used to inform beetmover.
--    Required('dependent-task'): object,
--
--    # depname is used in taskref's to identify the taskID of the unsigned things
--    Required('depname', default='build'): basestring,
--
--    # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
--    Optional('label'): basestring,
--
--    # treeherder is allowed here to override any defaults we use for beetmover.  See
--    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
--    # below transforms for defaults of various values.
--    Optional('treeherder'): task_description_schema['treeherder'],
--
--    # locale is passed only for l10n beetmoving
--    Optional('locale'): basestring,
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            beetmover_description_schema, job,
--            "In beetmover ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc(BM-S)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform',
--                              "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--        label = job.get('label', "beetmover-{}".format(dep_job.label))
--        dependent_kind = str(dep_job.kind)
--        dependencies = {dependent_kind: dep_job.label}
--
--        if len(dep_job.dependencies) > 1:
--            raise NotImplementedError(
--                "Can't beetmove a signing task with multiple dependencies")
--        signing_dependencies = dep_job.dependencies
--        dependencies.update(signing_dependencies)
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--
--        if job.get('locale'):
--            attributes['locale'] = job['locale']
--
--        bucket_scope = get_beetmover_bucket_scope(config)
--        action_scope = get_beetmover_action_scope(config)
--
--        task = {
--            'label': label,
--            'description': "{} Beetmover".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': 'scriptworker-prov-v1/beetmoverworker-v1',
--            'scopes': [bucket_scope, action_scope],
--            'dependencies': dependencies,
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--        }
--
--        yield task
--
--
--def generate_upstream_artifacts(signing_task_ref, build_task_ref, platform,
--                                locale=None):
--    build_mapping = UPSTREAM_ARTIFACT_UNSIGNED_PATHS
--    signing_mapping = UPSTREAM_ARTIFACT_SIGNED_PATHS
--
--    artifact_prefix = 'public/build'
--    if locale:
--        artifact_prefix = 'public/build/{}'.format(locale)
--        platform = "{}-l10n".format(platform)
--
--    upstream_artifacts = [{
--        "taskId": {"task-reference": build_task_ref},
--        "taskType": "build",
--        "paths": ["{}/{}".format(artifact_prefix, p)
--                  for p in build_mapping[platform]],
--        "locale": locale or "en-US",
--        }, {
--        "taskId": {"task-reference": signing_task_ref},
--        "taskType": "signing",
--        "paths": ["{}/{}".format(artifact_prefix, p)
--                  for p in signing_mapping[platform]],
--        "locale": locale or "en-US",
--    }]
--
--    if not locale and "android" in platform:
--        # edge case to support 'multi' locale paths
--        multi_platform = "{}-multi".format(platform)
--        upstream_artifacts.extend([{
--            "taskId": {"task-reference": build_task_ref},
--            "taskType": "build",
--            "paths": ["{}/{}".format(artifact_prefix, p)
--                      for p in build_mapping[multi_platform]],
--            "locale": "multi",
--            }, {
--            "taskId": {"task-reference": signing_task_ref},
--            "taskType": "signing",
--            "paths": ["{}/{}".format(artifact_prefix, p)
--                      for p in signing_mapping[multi_platform]],
--            "locale": "multi",
--        }])
--
--    return upstream_artifacts
--
--
--@transforms.add
--def make_task_worker(config, jobs):
--    for job in jobs:
--        valid_beetmover_job = (len(job["dependencies"]) == 2 and
--                               any(['signing' in j for j in job['dependencies']]))
--        if not valid_beetmover_job:
--            raise NotImplementedError("Beetmover must have two dependencies.")
--
--        locale = job["attributes"].get("locale")
--        platform = job["attributes"]["build_platform"]
--        build_task = None
--        signing_task = None
--        for dependency in job["dependencies"].keys():
--            if 'signing' in dependency:
--                signing_task = dependency
--            else:
--                build_task = dependency
--
--        signing_task_ref = "<" + str(signing_task) + ">"
--        build_task_ref = "<" + str(build_task) + ">"
--        upstream_artifacts = generate_upstream_artifacts(
--            signing_task_ref, build_task_ref, platform, locale
--        )
--
--        worker = {'implementation': 'beetmover',
--                  'upstream-artifacts': upstream_artifacts}
--        if locale:
--            worker["locale"] = locale
--        job["worker"] = worker
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/beetmover_checksums.py b/taskcluster/taskgraph/transforms/beetmover_checksums.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/beetmover_checksums.py
-+++ /dev/null
-@@ -1,157 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the checksums signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
--                                         get_beetmover_action_scope)
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--beetmover_checksums_description_schema = Schema({
--    Required('dependent-task'): object,
--    Required('depname', default='build'): basestring,
--    Optional('label'): basestring,
--    Optional('treeherder'): task_description_schema['treeherder'],
--    Optional('locale'): basestring,
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            beetmover_checksums_description_schema, job,
--            "In checksums-signing ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_beetmover_checksums_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc-BMcs(N)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform',
--                              "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--
--        label = job.get('label', "beetmover-{}".format(dep_job.label))
--        dependent_kind = str(dep_job.kind)
--        dependencies = {dependent_kind: dep_job.label}
--        for k, v in dep_job.dependencies.items():
--            if k.startswith('beetmover'):
--                dependencies[k] = v
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--
--        if dep_job.attributes.get('locale'):
--            treeherder['symbol'] = 'tc-BMcs({})'.format(dep_job.attributes.get('locale'))
--            attributes['locale'] = dep_job.attributes.get('locale')
--
--        bucket_scope = get_beetmover_bucket_scope(config)
--        action_scope = get_beetmover_action_scope(config)
--
--        task = {
--            'label': label,
--            'description': "Beetmover {} ".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': 'scriptworker-prov-v1/beetmoverworker-v1',
--            'scopes': [bucket_scope, action_scope],
--            'dependencies': dependencies,
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--        }
--
--        yield task
--
--
--def generate_upstream_artifacts(refs, platform, locale=None):
--    # Until bug 1331141 is fixed, if you are adding any new artifacts here that
--    # need to be transfered to S3, please be aware you also need to follow-up
--    # with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--    # See example in bug 1348286
--    common_paths = [
--        "public/target.checksums",
--        "public/target.checksums.asc",
--    ]
--
--    upstream_artifacts = [{
--        "taskId": {"task-reference": refs["signing"]},
--        "taskType": "signing",
--        "paths": common_paths,
--        "locale": locale or "en-US",
--    }, {
--        "taskId": {"task-reference": refs["beetmover"]},
--        "taskType": "beetmover",
--        "paths": ["public/balrog_props.json"],
--        "locale": locale or "en-US",
--    }]
--
--    if not locale and "android" in platform:
--        # edge case to support 'multi' locale paths
--        upstream_artifacts.extend([{
--            "taskId": {"task-reference": refs["signing"]},
--            "taskType": "signing",
--            "paths": common_paths,
--            "locale": "multi"
--        }])
--
--    return upstream_artifacts
--
--
--@transforms.add
--def make_beetmover_checksums_worker(config, jobs):
--    for job in jobs:
--        valid_beetmover_job = (len(job["dependencies"]) == 2)
--        if not valid_beetmover_job:
--            raise NotImplementedError("Beetmover checksums must have two dependencies.")
--
--        locale = job["attributes"].get("locale")
--        platform = job["attributes"]["build_platform"]
--
--        refs = {
--            "beetmover": None,
--            "signing": None,
--        }
--        for dependency in job["dependencies"].keys():
--            if dependency.startswith("beetmover"):
--                refs['beetmover'] = "<{}>".format(dependency)
--            else:
--                refs['signing'] = "<{}>".format(dependency)
--        if None in refs.values():
--            raise NotImplementedError(
--                "Beetmover checksums must have a beetmover and signing dependency!")
--
--        upstream_artifacts = generate_upstream_artifacts(refs,
--                                                         platform, locale)
--
--        worker = {'implementation': 'beetmover',
--                  'upstream-artifacts': upstream_artifacts}
--        if locale:
--            worker["locale"] = locale
--        job["worker"] = worker
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/beetmover_l10n.py b/taskcluster/taskgraph/transforms/beetmover_l10n.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/beetmover_l10n.py
-+++ /dev/null
-@@ -1,40 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.treeherder import join_symbol
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def make_beetmover_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--        for locale in dep_job.attributes.get('chunk_locales', []):
--
--            label = job.get('label',
--                            "beetmover-{}-{}".format(locale, dep_job.label))
--
--            group = 'tc-BM-L10n'
--
--            # add the locale code
--            symbol = locale
--
--            treeherder = {
--                'symbol': join_symbol(group, symbol),
--            }
--
--            beet_description = {
--                'dependent-task': dep_job,
--                'treeherder': treeherder,
--                'label': label,
--                'locale': locale,
--            }
--            yield beet_description
-diff --git a/taskcluster/taskgraph/transforms/beetmover_repackage.py b/taskcluster/taskgraph/transforms/beetmover_repackage.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/beetmover_repackage.py
-+++ /dev/null
-@@ -1,342 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the beetmover task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import (get_beetmover_bucket_scope,
--                                         get_beetmover_action_scope)
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--import logging
--import re
--
--logger = logging.getLogger(__name__)
--
--
--_WINDOWS_BUILD_PLATFORMS = [
--    'win64-nightly',
--    'win32-nightly'
--]
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US = [
--    "balrog_props.json",
--    "target.common.tests.zip",
--    "target.cppunittest.tests.zip",
--    "target.crashreporter-symbols.zip",
--    "target.json",
--    "target.mochitest.tests.zip",
--    "target.mozinfo.json",
--    "target.reftest.tests.zip",
--    "target.talos.tests.zip",
--    "target.awsy.tests.zip",
--    "target.test_packages.json",
--    "target.txt",
--    "target.web-platform.tests.tar.gz",
--    "target.xpcshell.tests.zip",
--    "target_info.txt",
--    "target.jsshell.zip",
--    "mozharness.zip",
--    "target.langpack.xpi",
--]
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--_DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N = [
--    "target.langpack.xpi",
--    "balrog_props.json",
--]
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--UPSTREAM_ARTIFACT_UNSIGNED_PATHS = {
--    r'^(linux(|64)|macosx64)-nightly$': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        'host/bin/mar',
--        'host/bin/mbsdiff',
--    ],
--    r'^win(32|64)-nightly$': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_EN_US + [
--        "host/bin/mar.exe",
--        "host/bin/mbsdiff.exe",
--    ],
--    r'^(linux(|64)|macosx64|win(32|64))-nightly-l10n$': _DESKTOP_UPSTREAM_ARTIFACTS_UNSIGNED_L10N,
--}
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--UPSTREAM_ARTIFACT_SIGNED_PATHS = {
--    r'^linux(|64)-nightly(|-l10n)$': ['target.tar.bz2', 'target.tar.bz2.asc'],
--    r'^win(32|64)-nightly(|-l10n)$': ['target.zip'],
--}
--
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--UPSTREAM_ARTIFACT_REPACKAGE_PATHS = {
--    r'^macosx64-nightly(|-l10n)$': ['target.dmg'],
--}
--# Until bug 1331141 is fixed, if you are adding any new artifacts here that
--# need to be transfered to S3, please be aware you also need to follow-up
--# with a beetmover patch in https://github.com/mozilla-releng/beetmoverscript/.
--# See example in bug 1348286
--UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS = {
--    r'^(linux(|64)|macosx64)-nightly(|-l10n)$': ['target.complete.mar'],
--    r'^win64-nightly(|-l10n)$': ['target.complete.mar', 'target.installer.exe'],
--    r'^win32-nightly(|-l10n)$': [
--        'target.complete.mar',
--        'target.installer.exe',
--        'target.stub-installer.exe'
--    ],
--}
--
--# Compile every regex once at import time
--for dict_ in (
--    UPSTREAM_ARTIFACT_UNSIGNED_PATHS, UPSTREAM_ARTIFACT_SIGNED_PATHS,
--    UPSTREAM_ARTIFACT_REPACKAGE_PATHS, UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS,
--):
--    for uncompiled_regex, value in dict_.iteritems():
--        compiled_regex = re.compile(uncompiled_regex)
--        del dict_[uncompiled_regex]
--        dict_[compiled_regex] = value
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--beetmover_description_schema = Schema({
--    # the dependent task (object) for this beetmover job, used to inform beetmover.
--    Required('dependent-task'): object,
--
--    # depname is used in taskref's to identify the taskID of the unsigned things
--    Required('depname', default='build'): basestring,
--
--    # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
--    Optional('label'): basestring,
--
--    # treeherder is allowed here to override any defaults we use for beetmover.  See
--    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
--    # below transforms for defaults of various values.
--    Optional('treeherder'): task_description_schema['treeherder'],
--
--    # locale is passed only for l10n beetmoving
--    Optional('locale'): basestring,
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            beetmover_description_schema, job,
--            "In beetmover ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc(BM-R)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform',
--                              "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--        label = job.get('label', "beetmover-{}".format(dep_job.label))
--
--        dependent_kind = str(dep_job.kind)
--        dependencies = {dependent_kind: dep_job.label}
--
--        if 'docker-image' in dep_job.dependencies:
--            # macosx nightly builds depend on repackage which use in tree
--            # docker images and thus have two dependencies
--            # change the signing_dependencies to be use the ones in
--            docker_dependencies = {"docker-image":
--                                   dep_job.dependencies['docker-image']
--                                   }
--            dependencies.update(docker_dependencies)
--
--        signing_name = "build-signing"
--        if job.get('locale'):
--            signing_name = "nightly-l10n-signing"
--        signing_dependencies = {signing_name:
--                                dep_job.dependencies[signing_name]
--                                }
--        dependencies.update(signing_dependencies)
--
--        build_name = "build"
--        if job.get('locale'):
--            build_name = "unsigned-repack"
--        build_dependencies = {"build":
--                              dep_job.dependencies[build_name]
--                              }
--        dependencies.update(build_dependencies)
--
--        repackage_name = "repackage"
--        # repackage-l10n actually uses the repackage depname here
--        repackage_dependencies = {"repackage":
--                                  dep_job.dependencies[repackage_name]
--                                  }
--        dependencies.update(repackage_dependencies)
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--        if job.get('locale'):
--            attributes['locale'] = job['locale']
--
--        bucket_scope = get_beetmover_bucket_scope(config)
--        action_scope = get_beetmover_action_scope(config)
--
--        task = {
--            'label': label,
--            'description': "{} Beetmover".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': 'scriptworker-prov-v1/beetmoverworker-v1',
--            'scopes': [bucket_scope, action_scope],
--            'dependencies': dependencies,
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--        }
--
--        yield task
--
--
--def generate_upstream_artifacts(build_task_ref, build_signing_task_ref,
--                                repackage_task_ref, repackage_signing_task_ref,
--                                platform, locale=None):
--
--    build_mapping = UPSTREAM_ARTIFACT_UNSIGNED_PATHS
--    build_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_PATHS
--    repackage_mapping = UPSTREAM_ARTIFACT_REPACKAGE_PATHS
--    repackage_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS
--
--    artifact_prefix = 'public/build'
--    if locale:
--        artifact_prefix = 'public/build/{}'.format(locale)
--        platform = "{}-l10n".format(platform)
--
--    upstream_artifacts = []
--
--    task_refs = [
--        build_task_ref,
--        build_signing_task_ref,
--        repackage_task_ref,
--        repackage_signing_task_ref
--    ]
--    tasktypes = ['build', 'signing', 'repackage', 'repackage']
--    mapping = [
--        build_mapping,
--        build_signing_mapping,
--        repackage_mapping,
--        repackage_signing_mapping
--    ]
--
--    for ref, tasktype, mapping in zip(task_refs, tasktypes, mapping):
--        plarform_was_previously_matched_by_regex = None
--        for platform_regex, paths in mapping.iteritems():
--            if platform_regex.match(platform) is not None:
--                _check_platform_matched_only_one_regex(
--                    tasktype, platform, plarform_was_previously_matched_by_regex, platform_regex
--                )
--
--                upstream_artifacts.append({
--                    "taskId": {"task-reference": ref},
--                    "taskType": tasktype,
--                    "paths": ["{}/{}".format(artifact_prefix, path) for path in paths],
--                    "locale": locale or "en-US",
--                })
--                plarform_was_previously_matched_by_regex = platform_regex
--
--    return upstream_artifacts
--
--
--def _check_platform_matched_only_one_regex(
--    task_type, platform, plarform_was_previously_matched_by_regex, platform_regex
--):
--    if plarform_was_previously_matched_by_regex is not None:
--        raise Exception('In task type "{task_type}", platform "{platform}" matches at \
--least 2 regular expressions. First matched: "{first_matched}". Second matched: \
--"{second_matched}"'.format(
--            task_type=task_type, platform=platform,
--            first_matched=plarform_was_previously_matched_by_regex.pattern,
--            second_matched=platform_regex.pattern
--        ))
--
--
--def is_valid_beetmover_job(job):
--    # windows builds don't have docker-image, so fewer dependencies
--    if any(b in job['attributes']['build_platform'] for b in _WINDOWS_BUILD_PLATFORMS):
--        expected_dep_count = 4
--    else:
--        expected_dep_count = 5
--
--    return (len(job["dependencies"]) == expected_dep_count and
--            any(['repackage' in j for j in job['dependencies']]))
--
--
--@transforms.add
--def make_task_worker(config, jobs):
--    for job in jobs:
--        if not is_valid_beetmover_job(job):
--            raise NotImplementedError("Beetmover_repackage must have five dependencies.")
--
--        locale = job["attributes"].get("locale")
--        platform = job["attributes"]["build_platform"]
--        build_task = None
--        build_signing_task = None
--        repackage_task = None
--        repackage_signing_task = None
--        for dependency in job["dependencies"].keys():
--            if 'repackage-signing' in dependency:
--                repackage_signing_task = dependency
--            elif 'repackage' in dependency:
--                repackage_task = dependency
--            elif 'signing' in dependency:
--                # catches build-signing and nightly-l10n-signing
--                build_signing_task = dependency
--            else:
--                build_task = "build"
--
--        build_task_ref = "<" + str(build_task) + ">"
--        build_signing_task_ref = "<" + str(build_signing_task) + ">"
--        repackage_task_ref = "<" + str(repackage_task) + ">"
--        repackage_signing_task_ref = "<" + str(repackage_signing_task) + ">"
--        upstream_artifacts = generate_upstream_artifacts(
--            build_task_ref, build_signing_task_ref, repackage_task_ref,
--            repackage_signing_task_ref, platform, locale
--        )
--
--        worker = {'implementation': 'beetmover',
--                  'upstream-artifacts': upstream_artifacts}
--        if locale:
--            worker["locale"] = locale
--        job["worker"] = worker
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py b/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/beetmover_repackage_l10n.py
-+++ /dev/null
-@@ -1,44 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.treeherder import join_symbol
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def make_beetmover_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        locale = dep_job.attributes.get('locale')
--        if not locale:
--            yield job
--            continue
--
--        label = job.get('label',
--                        "beetmover-{}-{}".format(locale, dep_job.label))
--
--        group = 'tc-BMR-L10n'
--
--        # add the locale code
--        symbol = locale
--
--        treeherder = {
--            'symbol': join_symbol(group, symbol),
--        }
--
--        beet_description = {
--            'dependent-task': dep_job,
--            'treeherder': treeherder,
--            'label': label,
--            'locale': locale,
--        }
--        yield beet_description
-diff --git a/taskcluster/taskgraph/transforms/build.py b/taskcluster/taskgraph/transforms/build.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/build.py
-+++ /dev/null
-@@ -1,54 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Apply some defaults and minor modifications to the jobs defined in the build
--kind.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.workertypes import worker_type_implementation
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def set_defaults(config, jobs):
--    """Set defaults, including those that differ per worker implementation"""
--    for job in jobs:
--        job['treeherder'].setdefault('kind', 'build')
--        job['treeherder'].setdefault('tier', 1)
--        job.setdefault('needs-sccache', True)
--        _, worker_os = worker_type_implementation(job['worker-type'])
--        worker = job.setdefault('worker', {})
--        if worker_os == "linux":
--            worker.setdefault('docker-image', {'in-tree': 'desktop-build'})
--            worker['chain-of-trust'] = True
--            extra = job.setdefault('extra', {})
--            extra.setdefault('chainOfTrust', {})
--            extra['chainOfTrust'].setdefault('inputs', {})
--            extra['chainOfTrust']['inputs']['docker-image'] = {
--                "task-reference": "<docker-image>"
--            }
--        elif worker_os == "windows":
--            worker.setdefault('env', {})
--            worker['chain-of-trust'] = True
--        elif worker_os == "macosx":
--            worker.setdefault('env', {})
--
--        yield job
--
--
--@transforms.add
--def set_env(config, jobs):
--    """Set extra environment variables from try command line."""
--    for job in jobs:
--        env = config.config['args'].env
--        if env:
--            job_env = {}
--            if 'worker' in job:
--                job_env = job['worker']['env']
--            job_env.update(dict(x.split('=') for x in env))
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/build_attrs.py b/taskcluster/taskgraph/transforms/build_attrs.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/build_attrs.py
-+++ /dev/null
-@@ -1,33 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def set_build_attributes(config, jobs):
--    """
--    Set the build_platform and build_type attributes based on the job name.
--    Although not all jobs using this transform are actual "builds", the try
--    option syntax treats them as such, and this arranges the attributes
--    appropriately for that purpose.
--    """
--    for job in jobs:
--        build_platform, build_type = job['name'].split('/')
--
--        # pgo builds are represented as a different platform, type opt
--        if build_type == 'pgo':
--            build_platform = build_platform + '-pgo'
--            build_type = 'opt'
--
--        attributes = job.setdefault('attributes', {})
--        attributes.update({
--            'build_platform': build_platform,
--            'build_type': build_type,
--        })
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/build_lints.py b/taskcluster/taskgraph/transforms/build_lints.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/build_lints.py
-+++ /dev/null
-@@ -1,58 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Apply some defaults and minor modifications to the jobs defined in the build
--kind.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def check_mozharness_perfherder_options(config, jobs):
--    """Verify that multiple jobs don't use the same perfherder bucket.
--
--    Build jobs record perfherder metrics by default. Perfherder metrics go
--    to a bucket derived by the platform by default. The name can further be
--    customized by the presence of "extra options" either defined in
--    mozharness sub-configs or in an environment variable.
--
--    This linter tries to verify that no 2 jobs will send Perfherder metrics
--    to the same bucket by looking for jobs not defining extra options when
--    their platform or mozharness config are otherwise similar.
--    """
--    seen_configs = {}
--
--    for job in jobs:
--        if job['run']['using'] != 'mozharness':
--            yield job
--            continue
--
--        worker = job.get('worker', {})
--
--        platform = job['treeherder']['platform']
--        primary_config = job['run']['config'][0]
--        options = worker.get('env', {}).get('PERFHERDER_EXTRA_OPTIONS')
--        nightly = job.get('attributes', {}).get('nightly', False)
--
--        # This isn't strictly necessary. But the Perfherder code looking at the
--        # values we care about is only active on builds. So it doesn't make
--        # sense to run this linter elsewhere.
--        assert primary_config.startswith('builds/')
--
--        key = (platform, primary_config, nightly, options)
--
--        if key in seen_configs:
--            raise Exception('Non-unique Perfherder data collection for jobs '
--                            '%s and %s: set PERFHERDER_EXTRA_OPTIONS in worker '
--                            'environment variables or use different mozconfigs'
--                            % (job['name'], seen_configs[key]['name']))
--
--        seen_configs[key] = job
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/build_signing.py b/taskcluster/taskgraph/transforms/build_signing.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/build_signing.py
-+++ /dev/null
-@@ -1,64 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.signed_artifacts import generate_specifications_of_artifacts_to_sign
--
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def add_signed_routes(config, jobs):
--    """Add routes corresponding to the routes of the build task
--       this corresponds to, with .signed inserted, for all gecko.v2 routes"""
--
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        job['routes'] = []
--        if dep_job.attributes.get('nightly'):
--            for dep_route in dep_job.task.get('routes', []):
--                if not dep_route.startswith('index.gecko.v2'):
--                    continue
--                branch = dep_route.split(".")[3]
--                rest = ".".join(dep_route.split(".")[4:])
--                job['routes'].append(
--                    'index.gecko.v2.{}.signed-nightly.{}'.format(branch, rest))
--
--        yield job
--
--
--@transforms.add
--def define_upstream_artifacts(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--        build_platform = dep_job.attributes.get('build_platform')
--
--        artifacts_specifications = generate_specifications_of_artifacts_to_sign(
--            build_platform,
--            dep_job.attributes.get('nightly'),
--            keep_locale_template=False
--        )
--
--        if 'android' in build_platform:
--            # We're in the job that creates both multilocale and en-US APKs
--            artifacts_specifications[0]['artifacts'].append('public/build/en-US/target.apk')
--
--        job['upstream-artifacts'] = [{
--            'taskId': {'task-reference': '<build>'},
--            'taskType': 'build',
--            'paths': spec['artifacts'],
--            'formats': spec['formats'],
--        } for spec in artifacts_specifications]
--
--        label = dep_job.label.replace("build-", "signing-")
--        job['label'] = label
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/checksums_signing.py b/taskcluster/taskgraph/transforms/checksums_signing.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/checksums_signing.py
-+++ /dev/null
-@@ -1,95 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the checksums signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import get_signing_cert_scope
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--checksums_signing_description_schema = Schema({
--    Required('dependent-task'): object,
--    Required('depname', default='beetmover'): basestring,
--    Optional('label'): basestring,
--    Optional('treeherder'): task_description_schema['treeherder'],
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            checksums_signing_description_schema, job,
--            "In checksums-signing ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_checksums_signing_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc-cs(N)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform',
--                              "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--
--        label = job.get('label', "checksumssigning-{}".format(dep_job.label))
--        dependencies = {"beetmover": dep_job.label}
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--
--        if dep_job.attributes.get('locale'):
--            treeherder['symbol'] = 'tc-cs({})'.format(dep_job.attributes.get('locale'))
--            attributes['locale'] = dep_job.attributes.get('locale')
--
--        upstream_artifacts = [{
--            "taskId": {"task-reference": "<beetmover>"},
--            "taskType": "beetmover",
--            "paths": [
--                "public/target.checksums",
--            ],
--            "formats": ["gpg"]
--        }]
--
--        signing_cert_scope = get_signing_cert_scope(config)
--        task = {
--            'label': label,
--            'description': "Checksum signing {} ".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': "scriptworker-prov-v1/signing-linux-v1",
--            'worker': {'implementation': 'scriptworker-signing',
--                       'upstream-artifacts': upstream_artifacts,
--                       'max-run-time': 3600},
--            'scopes': [
--                signing_cert_scope,
--                "project:releng:signing:format:gpg"
--            ],
--            'dependencies': dependencies,
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--        }
--
--        yield task
-diff --git a/taskcluster/taskgraph/transforms/docker_image.py b/taskcluster/taskgraph/transforms/docker_image.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/docker_image.py
-+++ /dev/null
-@@ -1,126 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the upload-symbols task description template,
--  taskcluster/ci/upload-symbols/job-template.yml
--into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--
--from taskgraph.transforms.base import TransformSequence
--from .. import GECKO
--from taskgraph.util.docker import (
--    docker_image,
--    generate_context_hash,
--    INDEX_PREFIX,
--)
--
--transforms = TransformSequence()
--
--ROUTE_TEMPLATES = [
--    'index.{index_prefix}.level-{level}.{image_name}.latest',
--    'index.{index_prefix}.level-{level}.{image_name}.pushdate.{year}.{month}-{day}-{pushtime}',
--    'index.{index_prefix}.level-{level}.{image_name}.hash.{context_hash}',
--]
--
--
--@transforms.add
--def fill_template(config, tasks):
--    for task in tasks:
--        image_name = task.pop('name')
--        job_symbol = task.pop('symbol')
--
--        context_path = os.path.join('taskcluster', 'docker', image_name)
--        context_hash = generate_context_hash(GECKO, context_path, image_name)
--
--        description = 'Build the docker image {} for use by dependent tasks'.format(
--            image_name)
--
--        routes = []
--        for tpl in ROUTE_TEMPLATES:
--            routes.append(tpl.format(
--                index_prefix=INDEX_PREFIX,
--                level=config.params['level'],
--                image_name=image_name,
--                project=config.params['project'],
--                head_rev=config.params['head_rev'],
--                pushlog_id=config.params.get('pushlog_id', 0),
--                pushtime=config.params['moz_build_date'][8:],
--                year=config.params['moz_build_date'][0:4],
--                month=config.params['moz_build_date'][4:6],
--                day=config.params['moz_build_date'][6:8],
--                context_hash=context_hash,
--            ))
--
--        # As an optimization, if the context hash exists for a high level, that image
--        # task ID will be used.  The reasoning behind this is that eventually everything ends
--        # up on level 3 at some point if most tasks use this as a common image
--        # for a given context hash, a worker within Taskcluster does not need to contain
--        # the same image per branch.
--        optimizations = [['index-search', '{}.level-{}.{}.hash.{}'.format(
--            INDEX_PREFIX, level, image_name, context_hash)]
--            for level in reversed(range(int(config.params['level']), 4))]
--
--        # Adjust the zstandard compression level based on the execution level.
--        # We use faster compression for level 1 because we care more about
--        # end-to-end times. We use slower/better compression for other levels
--        # because images are read more often and it is worth the trade-off to
--        # burn more CPU once to reduce image size.
--        zstd_level = '3' if int(config.params['level']) == 1 else '10'
--
--        # include some information that is useful in reconstructing this task
--        # from JSON
--        taskdesc = {
--            'label': 'build-docker-image-' + image_name,
--            'description': description,
--            'attributes': {'image_name': image_name},
--            'expires-after': '1 year',
--            'routes': routes,
--            'optimizations': optimizations,
--            'scopes': ['secrets:get:project/taskcluster/gecko/hgfingerprint'],
--            'treeherder': {
--                'symbol': job_symbol,
--                'platform': 'taskcluster-images/opt',
--                'kind': 'other',
--                'tier': 1,
--            },
--            'run-on-projects': [],
--            'worker-type': 'aws-provisioner-v1/gecko-images',
--            # can't use {in-tree: ..} here, otherwise we might try to build
--            # this image..
--            'worker': {
--                'implementation': 'docker-worker',
--                'os': 'linux',
--                'docker-image': docker_image('image_builder'),
--                'caches': [{
--                    'type': 'persistent',
--                    'name': 'level-{}-imagebuilder-v1'.format(config.params['level']),
--                    'mount-point': '/home/worker/checkouts',
--                }],
--                'artifacts': [{
--                    'type': 'file',
--                    'path': '/home/worker/workspace/artifacts/image.tar.zst',
--                    'name': 'public/image.tar.zst',
--                }],
--                'env': {
--                    'HG_STORE_PATH': '/home/worker/checkouts/hg-store',
--                    'HASH': context_hash,
--                    'PROJECT': config.params['project'],
--                    'IMAGE_NAME': image_name,
--                    'DOCKER_IMAGE_ZSTD_LEVEL': zstd_level,
--                    'GECKO_BASE_REPOSITORY': config.params['base_repository'],
--                    'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
--                    'GECKO_HEAD_REV': config.params['head_rev'],
--                },
--                'chain-of-trust': True,
--                'docker-in-docker': True,
--                'taskcluster-proxy': True,
--                'max-run-time': 7200,
--            },
--        }
--
--        yield taskdesc
-diff --git a/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py b/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/gecko_v2_whitelist.py
-+++ /dev/null
-@@ -1,124 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--This file contains a whitelist of gecko.v2 index route job names.  The intent
--of this whitelist is to raise an alarm when new jobs are added.  If those jobs
--already run in Buildbot, then it's important that the generated index routes
--match (and that only one of Buildbot and TaskCluster be tier-1 at any time).
--If the jobs are new and never ran in Buildbot, then their job name can be added
--here without any further fuss.
--
--Once all jobs have been ported from Buildbot, this file can be removed.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--# please keep me in lexical order
--JOB_NAME_WHITELIST = set([
--    'android-aarch64-opt',
--    'android-api-16-debug',
--    'android-api-16-gradle-dependencies-opt',
--    'android-api-16-gradle-opt',
--    'android-api-16-opt',
--    'android-api-16-old-id-opt',
--    'android-x86-opt',
--    'android-x86-old-id-opt',
--    'browser-haz-debug',
--    'linux-debug',
--    'linux-devedition',
--    'linux-devedition-nightly-repackage',
--    'linux-devedition-nightly-repackage-signing',
--    'linux-nightly-repackage',
--    'linux-nightly-repackage-signing',
--    'linux-opt',
--    'linux-pgo',
--    'linux64-add-on-devel',
--    'linux64-artifact-opt',
--    'linux64-asan-debug',
--    'linux64-asan-opt',
--    'linux64-base-toolchains-debug',
--    'linux64-base-toolchains-opt',
--    'linux64-fuzzing-asan-opt',
--    'linux64-ccov-opt',
--    'linux64-clang-tidy',
--    'linux64-debug',
--    'linux64-devedition',
--    'linux64-devedition-nightly-repackage',
--    'linux64-devedition-nightly-repackage-signing',
--    'linux64-jsdcov-opt',
--    'linux64-nightly-repackage',
--    'linux64-nightly-repackage-signing',
--    'linux64-noopt-debug',
--    'linux64-opt',
--    'linux64-pgo',
--    'linux64-st-an-debug',
--    'linux64-st-an-opt',
--    'linux64-stylo-debug',
--    'linux64-stylo-opt',
--    'linux64-valgrind-opt',
--    'linux64-dmd-opt',
--    'macosx64-add-on-devel',
--    'macosx64-clang-tidy',
--    'macosx64-debug',
--    'macosx64-nightly-repackage',
--    'macosx64-nightly-repackage-signing',
--    'macosx64-noopt-debug',
--    'macosx64-opt',
--    'macosx64-devedition-nightly-repackage',
--    'macosx64-devedition-nightly-repackage-signing',
--    'macosx64-st-an-debug',
--    'macosx64-st-an-opt',
--    'macosx64-dmd-opt',
--    'shell-haz-debug',
--    'sm-arm-sim-debug',
--    'sm-arm64-sim-debug',
--    'sm-asan-opt',
--    'sm-compacting-debug',
--    'sm-fuzzing',
--    'sm-mozjs-sys-debug',
--    'sm-msan-opt',
--    'sm-nonunified-debug',
--    'sm-package-opt',
--    'sm-plain-opt',
--    'sm-plaindebug-debug',
--    'sm-rootanalysis-debug',
--    'sm-tsan-opt',
--    'win32-add-on-devel',
--    'win32-clang-tidy',
--    'win32-debug',
--    'win32-devedition-nightly-repackage',
--    'win32-devedition-nightly-repackage-signing',
--    'win32-devedition-opt',
--    'win32-nightly-repackage',
--    'win32-nightly-repackage-signing',
--    'win32-noopt-debug',
--    'win32-opt',
--    'win32-pgo',
--    'win32-st-an-debug',
--    'win32-st-an-opt',
--    'win32-dmd-opt',
--    'win64-add-on-devel',
--    'win64-clang-tidy',
--    'win64-debug',
--    'win64-devedition-opt',
--    'win64-devedition-nightly-repackage',
--    'win64-devedition-nightly-repackage-signing',
--    'win64-nightly-repackage',
--    'win64-nightly-repackage-signing',
--    'win64-noopt-debug',
--    'win64-opt',
--    'win64-pgo',
--    'win64-st-an-debug',
--    'win64-st-an-opt',
--    'win64-asan-debug',
--    'win64-asan-opt',
--    'win64-dmd-opt',
--])
--
--JOB_NAME_WHITELIST_ERROR = """\
--The gecko-v2 job name {} is not in the whitelist in gecko_v2_whitelist.py.
--If this job runs on Buildbot, please ensure that the job names match between
--Buildbot and TaskCluster, then add the job name to the whitelist.  If this is a
--new job, there is nothing to check -- just add the job to the whitelist.
--"""
-diff --git a/taskcluster/taskgraph/transforms/job/__init__.py b/taskcluster/taskgraph/transforms/job/__init__.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/__init__.py
-+++ /dev/null
-@@ -1,217 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Convert a job description into a task description.
--
--Jobs descriptions are similar to task descriptions, but they specify how to run
--the job at a higher level, using a "run" field that can be interpreted by
--run-using handlers in `taskcluster/taskgraph/transforms/job`.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import copy
--import logging
--import os
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.schema import (
--    validate_schema,
--    Schema,
--)
--from taskgraph.util.workertypes import worker_type_implementation
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import (
--    Any,
--    Extra,
--    Optional,
--    Required,
--)
--
--logger = logging.getLogger(__name__)
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--# Schema for a build description
--job_description_schema = Schema({
--    # The name of the job and the job's label.  At least one must be specified,
--    # and the label will be generated from the name if necessary, by prepending
--    # the kind.
--    Optional('name'): basestring,
--    Optional('label'): basestring,
--
--    # the following fields are passed directly through to the task description,
--    # possibly modified by the run implementation.  See
--    # taskcluster/taskgraph/transforms/task.py for the schema details.
--    Required('description'): task_description_schema['description'],
--    Optional('attributes'): task_description_schema['attributes'],
--    Optional('dependencies'): task_description_schema['dependencies'],
--    Optional('expires-after'): task_description_schema['expires-after'],
--    Optional('routes'): task_description_schema['routes'],
--    Optional('scopes'): task_description_schema['scopes'],
--    Optional('tags'): task_description_schema['tags'],
--    Optional('extra'): task_description_schema['extra'],
--    Optional('treeherder'): task_description_schema['treeherder'],
--    Optional('index'): task_description_schema['index'],
--    Optional('run-on-projects'): task_description_schema['run-on-projects'],
--    Optional('coalesce-name'): task_description_schema['coalesce-name'],
--    Optional('optimizations'): task_description_schema['optimizations'],
--    Optional('needs-sccache'): task_description_schema['needs-sccache'],
--
--    # The "when" section contains descriptions of the circumstances
--    # under which this task should be included in the task graph.  This
--    # will be converted into an element in the `optimizations` list.
--    Optional('when'): Any({
--        # This task only needs to be run if a file matching one of the given
--        # patterns has changed in the push.  The patterns use the mozpack
--        # match function (python/mozbuild/mozpack/path.py).
--        Optional('files-changed'): [basestring],
--    }),
--
--    # A description of how to run this job.
--    'run': {
--        # The key to a job implementation in a peer module to this one
--        'using': basestring,
--
--        # Any remaining content is verified against that job implementation's
--        # own schema.
--        Extra: object,
--    },
--
--    Required('worker-type'): task_description_schema['worker-type'],
--
--    # This object will be passed through to the task description, with additions
--    # provided by the job's run-using function
--    Optional('worker'): dict,
--})
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        yield validate_schema(job_description_schema, job,
--                              "In job {!r}:".format(job.get('name', job.get('label'))))
--
--
--@transforms.add
--def rewrite_when_to_optimization(config, jobs):
--    for job in jobs:
--        when = job.pop('when', {})
--        files_changed = when.get('files-changed')
--        if not files_changed:
--            yield job
--            continue
--
--        # add some common files
--        files_changed.extend([
--            '{}/**'.format(config.path),
--            'taskcluster/taskgraph/**',
--        ])
--        if 'in-tree' in job.get('worker', {}).get('docker-image', {}):
--            files_changed.append('taskcluster/docker/{}/**'.format(
--                job['worker']['docker-image']['in-tree']))
--
--        # "only when files changed" implies "skip if files have not changed"
--        job.setdefault('optimizations', []).append(['skip-unless-changed', files_changed])
--
--        assert 'when' not in job
--        yield job
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    """Given a build description, create a task description"""
--    # import plugin modules first, before iterating over jobs
--    import_all()
--    for job in jobs:
--        if 'label' not in job:
--            if 'name' not in job:
--                raise Exception("job has neither a name nor a label")
--            job['label'] = '{}-{}'.format(config.kind, job['name'])
--        if job.get('name'):
--            del job['name']
--
--        impl, os = worker_type_implementation(job['worker-type'])
--        worker = job.setdefault('worker', {})
--        assert 'implementation' not in worker
--        worker['implementation'] = impl
--        if os:
--            worker['os'] = os
--
--        taskdesc = copy.deepcopy(job)
--
--        # fill in some empty defaults to make run implementations easier
--        taskdesc.setdefault('attributes', {})
--        taskdesc.setdefault('dependencies', {})
--        taskdesc.setdefault('routes', [])
--        taskdesc.setdefault('scopes', [])
--        taskdesc.setdefault('extra', {})
--
--        # give the function for job.run.using on this worker implementation a
--        # chance to set up the task description.
--        configure_taskdesc_for_run(config, job, taskdesc, impl)
--        del taskdesc['run']
--
--        # yield only the task description, discarding the job description
--        yield taskdesc
--
--
--# A registry of all functions decorated with run_job_using
--registry = {}
--
--
--def run_job_using(worker_implementation, run_using, schema=None):
--    """Register the decorated function as able to set up a task description for
--    jobs with the given worker implementation and `run.using` property.  If
--    `schema` is given, the job's run field will be verified to match it.
--
--    The decorated function should have the signature `using_foo(config, job,
--    taskdesc) and should modify the task description in-place.  The skeleton of
--    the task description is already set up, but without a payload."""
--    def wrap(func):
--        for_run_using = registry.setdefault(run_using, {})
--        if worker_implementation in for_run_using:
--            raise Exception("run_job_using({!r}, {!r}) already exists: {!r}".format(
--                run_using, worker_implementation, for_run_using[run_using]))
--        for_run_using[worker_implementation] = (func, schema)
--        return func
--    return wrap
--
--
--def configure_taskdesc_for_run(config, job, taskdesc, worker_implementation):
--    """
--    Run the appropriate function for this job against the given task
--    description.
--
--    This will raise an appropriate error if no function exists, or if the job's
--    run is not valid according to the schema.
--    """
--    run_using = job['run']['using']
--    if run_using not in registry:
--        raise Exception("no functions for run.using {!r}".format(run_using))
--
--    if worker_implementation not in registry[run_using]:
--        raise Exception("no functions for run.using {!r} on {!r}".format(
--            run_using, worker_implementation))
--
--    func, schema = registry[run_using][worker_implementation]
--    if schema:
--        job['run'] = validate_schema(
--                schema, job['run'],
--                "In job.run using {!r} for job {!r}:".format(
--                    job['run']['using'], job['label']))
--
--    func(config, job, taskdesc)
--
--
--def import_all():
--    """Import all modules that are siblings of this one, triggering the decorator
--    above in the process."""
--    for f in os.listdir(os.path.dirname(__file__)):
--        if f.endswith('.py') and f not in ('commmon.py', '__init__.py'):
--            __import__('taskgraph.transforms.job.' + f[:-3])
-diff --git a/taskcluster/taskgraph/transforms/job/common.py b/taskcluster/taskgraph/transforms/job/common.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/common.py
-+++ /dev/null
-@@ -1,131 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Common support for various job types.  These functions are all named after the
--worker implementation they operate on, and take the same three parameters, for
--consistency.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--SECRET_SCOPE = 'secrets:get:project/releng/gecko/{}/level-{}/{}'
--
--
--def docker_worker_add_workspace_cache(config, job, taskdesc, extra=None):
--    """Add the workspace cache based on the build platform/type and level,
--    except on try where workspace caches are not used.
--
--    extra, is an optional kwarg passed in that supports extending the cache
--    key name to avoid undesired conflicts with other caches."""
--    if config.params['project'] == 'try':
--        return
--
--    taskdesc['worker'].setdefault('caches', []).append({
--        'type': 'persistent',
--        'name': 'level-{}-{}-build-{}-{}-workspace'.format(
--            config.params['level'], config.params['project'],
--            taskdesc['attributes']['build_platform'],
--            taskdesc['attributes']['build_type'],
--        ),
--        'mount-point': "/home/worker/workspace",
--    })
--    if extra:
--        taskdesc['worker']['caches'][-1]['name'] += '-{}'.format(
--            extra
--        )
--
--
--def docker_worker_add_tc_vcs_cache(config, job, taskdesc):
--    taskdesc['worker'].setdefault('caches', []).append({
--        'type': 'persistent',
--        'name': 'level-{}-{}-tc-vcs'.format(
--            config.params['level'], config.params['project']),
--        'mount-point': "/home/worker/.tc-vcs",
--    })
--
--
--def add_public_artifacts(config, job, taskdesc, path):
--    taskdesc['worker'].setdefault('artifacts', []).append({
--        'name': 'public/build',
--        'path': path,
--        'type': 'directory',
--    })
--
--
--def docker_worker_add_public_artifacts(config, job, taskdesc):
--    """ Adds a public artifact directory to the task """
--    add_public_artifacts(config, job, taskdesc, path='/home/worker/artifacts/')
--
--
--def generic_worker_add_public_artifacts(config, job, taskdesc):
--    """ Adds a public artifact directory to the task """
--    add_public_artifacts(config, job, taskdesc, path=r'public/build')
--
--
--def docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc):
--    """Add the GECKO_BASE_* and GECKO_HEAD_* env vars to the worker."""
--    env = taskdesc['worker'].setdefault('env', {})
--    env.update({
--        'GECKO_BASE_REPOSITORY': config.params['base_repository'],
--        'GECKO_HEAD_REF': config.params['head_rev'],
--        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
--        'GECKO_HEAD_REV': config.params['head_rev'],
--    })
--
--
--def support_vcs_checkout(config, job, taskdesc):
--    """Update a job/task with parameters to enable a VCS checkout.
--
--    The configuration is intended for tasks using "run-task" and its
--    VCS checkout behavior.
--    """
--    level = config.params['level']
--
--    # native-engine does not support caches (yet), so we just do a full clone
--    # every time :(
--    if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
--        taskdesc['worker'].setdefault('caches', []).append({
--            'type': 'persistent',
--            # History of versions:
--            #
--            # ``level-%s-checkouts`` was initially used and contained a number
--            # of backwards incompatible changes, such as moving HG_STORE_PATH
--            # from a separate cache to this cache.
--            #
--            # ``v1`` was introduced to provide a clean break from the unversioned
--            # cache.
--            'name': 'level-%s-checkouts-v1' % level,
--            'mount-point': '/home/worker/checkouts',
--        })
--
--    taskdesc['worker'].setdefault('env', {}).update({
--        'GECKO_BASE_REPOSITORY': config.params['base_repository'],
--        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
--        'GECKO_HEAD_REV': config.params['head_rev'],
--        'HG_STORE_PATH': '~/checkouts/hg-store',
--    })
--
--    # Give task access to hgfingerprint secret so it can pin the certificate
--    # for hg.mozilla.org.
--    taskdesc['scopes'].append('secrets:get:project/taskcluster/gecko/hgfingerprint')
--
--    # only some worker platforms have taskcluster-proxy enabled
--    if job['worker']['implementation'] in ('docker-worker', 'docker-engine'):
--        taskdesc['worker']['taskcluster-proxy'] = True
--
--
--def docker_worker_setup_secrets(config, job, taskdesc):
--    """Set up access to secrets via taskcluster-proxy.  The value of
--    run['secrets'] should be a boolean or a list of secret names that
--    can be accessed."""
--    if not job['run'].get('secrets'):
--        return
--
--    taskdesc['worker']['taskcluster-proxy'] = True
--    secrets = job['run']['secrets']
--    if secrets is True:
--        secrets = ['*']
--    for sec in secrets:
--        taskdesc['scopes'].append(SECRET_SCOPE.format(
--            job['treeherder']['kind'], config.params['level'], sec))
-diff --git a/taskcluster/taskgraph/transforms/job/hazard.py b/taskcluster/taskgraph/transforms/job/hazard.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/hazard.py
-+++ /dev/null
-@@ -1,85 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Support for running hazard jobs via dedicated scripts
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.util.schema import Schema
--from voluptuous import Required, Optional, Any
--
--from taskgraph.transforms.job import run_job_using
--from taskgraph.transforms.job.common import (
--    docker_worker_add_workspace_cache,
--    docker_worker_setup_secrets,
--    docker_worker_add_public_artifacts,
--    support_vcs_checkout,
--)
--
--haz_run_schema = Schema({
--    Required('using'): 'hazard',
--
--    # The command to run within the task image (passed through to the worker)
--    Required('command'): basestring,
--
--    # The mozconfig to use; default in the script is used if omitted
--    Optional('mozconfig'): basestring,
--
--    # The set of secret names to which the task has access; these are prefixed
--    # with `project/releng/gecko/{treeherder.kind}/level-{level}/`.   Setting
--    # this will enable any worker features required and set the task's scopes
--    # appropriately.  `true` here means ['*'], all secrets.  Not supported on
--    # Windows
--    Required('secrets', default=False): Any(bool, [basestring]),
--})
--
--
--@run_job_using("docker-worker", "hazard", schema=haz_run_schema)
--def docker_worker_hazard(config, job, taskdesc):
--    run = job['run']
--
--    worker = taskdesc['worker']
--    worker['artifacts'] = []
--    worker['caches'] = []
--
--    docker_worker_add_public_artifacts(config, job, taskdesc)
--    docker_worker_add_workspace_cache(config, job, taskdesc)
--    docker_worker_setup_secrets(config, job, taskdesc)
--    support_vcs_checkout(config, job, taskdesc)
--
--    env = worker['env']
--    env.update({
--        'MOZ_BUILD_DATE': config.params['moz_build_date'],
--        'MOZ_SCM_LEVEL': config.params['level'],
--    })
--
--    # script parameters
--    if run.get('mozconfig'):
--        env['MOZCONFIG'] = run['mozconfig']
--
--    # tooltool downloads
--    worker['caches'].append({
--        'type': 'persistent',
--        'name': 'tooltool-cache',
--        'mount-point': '/home/worker/tooltool-cache',
--    })
--    worker['relengapi-proxy'] = True
--    taskdesc['scopes'].extend([
--        'docker-worker:relengapi-proxy:tooltool.download.public',
--    ])
--    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
--
--    # build-haz-linux.sh needs this otherwise it assumes the checkout is in
--    # the workspace.
--    env['GECKO_DIR'] = '/home/worker/checkouts/gecko'
--
--    worker['command'] = [
--        '/home/worker/bin/run-task',
--        '--chown-recursive', '/home/worker/tooltool-cache',
--        '--chown-recursive', '/home/worker/workspace',
--        '--vcs-checkout', '/home/worker/checkouts/gecko',
--        '--',
--        '/bin/bash', '-c', run['command']
--    ]
-diff --git a/taskcluster/taskgraph/transforms/job/mach.py b/taskcluster/taskgraph/transforms/job/mach.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/mach.py
-+++ /dev/null
-@@ -1,38 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Support for running mach tasks (via run-task)
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.job import run_job_using
--from taskgraph.transforms.job.run_task import (
--    docker_worker_run_task,
--    native_engine_run_task,
--)
--from taskgraph.util.schema import Schema
--from voluptuous import Required
--
--mach_schema = Schema({
--    Required('using'): 'mach',
--
--    # The mach command (omitting `./mach`) to run
--    Required('mach'): basestring,
--})
--
--
--@run_job_using("docker-worker", "mach", schema=mach_schema)
--@run_job_using("native-engine", "mach", schema=mach_schema)
--def docker_worker_mach(config, job, taskdesc):
--    run = job['run']
--
--    # defer to the run_task implementation
--    run['command'] = 'cd ~/checkouts/gecko && ./mach ' + run['mach']
--    run['checkout'] = True
--    del run['mach']
--    if job['worker']['implementation'] == 'docker-worker':
--        docker_worker_run_task(config, job, taskdesc)
--    else:
--        native_engine_run_task(config, job, taskdesc)
-diff --git a/taskcluster/taskgraph/transforms/job/mozharness.py b/taskcluster/taskgraph/transforms/job/mozharness.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/mozharness.py
-+++ /dev/null
-@@ -1,310 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--
--Support for running jobs via mozharness.  Ideally, most stuff gets run this
--way, and certainly anything using mozharness should use this approach.
--
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--import slugid
--
--from textwrap import dedent
--
--from taskgraph.util.schema import Schema
--from voluptuous import Required, Optional, Any
--
--from taskgraph.transforms.job import run_job_using
--from taskgraph.transforms.job.common import (
--    docker_worker_add_workspace_cache,
--    docker_worker_add_gecko_vcs_env_vars,
--    docker_worker_setup_secrets,
--    docker_worker_add_public_artifacts,
--    generic_worker_add_public_artifacts,
--    support_vcs_checkout,
--)
--
--COALESCE_KEY = 'builds.{project}.{name}'
--
--mozharness_run_schema = Schema({
--    Required('using'): 'mozharness',
--
--    # the mozharness script used to run this task, relative to the testing/
--    # directory and using forward slashes even on Windows
--    Required('script'): basestring,
--
--    # the config files required for the task, relative to
--    # testing/mozharness/configs and using forward slashes even on Windows
--    Required('config'): [basestring],
--
--    # any additional actions to pass to the mozharness command; not supported
--    # on Windows
--    Optional('actions'): [basestring],
--
--    # any additional options (without leading --) to be passed to mozharness;
--    # not supported on Windows
--    Optional('options'): [basestring],
--
--    # --custom-build-variant-cfg value (not supported on Windows)
--    Optional('custom-build-variant-cfg'): basestring,
--
--    # Extra metadata to use toward the workspace caching.
--    # Only supported on docker-worker
--    Optional('extra-workspace-cache-key'): basestring,
--
--    # If not false, tooltool downloads will be enabled via relengAPIProxy
--    # for either just public files, or all files.  Not supported on Windows
--    Required('tooltool-downloads', default=False): Any(
--        False,
--        'public',
--        'internal',
--    ),
--
--    # The set of secret names to which the task has access; these are prefixed
--    # with `project/releng/gecko/{treeherder.kind}/level-{level}/`.  Setting
--    # this will enable any worker features required and set the task's scopes
--    # appropriately.  `true` here means ['*'], all secrets.  Not supported on
--    # Windows
--    Required('secrets', default=False): Any(bool, [basestring]),
--
--    # If true, taskcluster proxy will be enabled; note that it may also be enabled
--    # automatically e.g., for secrets support.  Not supported on Windows.
--    Required('taskcluster-proxy', default=False): bool,
--
--    # If true, the build scripts will start Xvfb.  Not supported on Windows.
--    Required('need-xvfb', default=False): bool,
--
--    # If false, indicate that builds should skip producing artifacts.  Not
--    # supported on Windows.
--    Required('keep-artifacts', default=True): bool,
--
--    # If specified, use the in-tree job script specified.
--    Optional('job-script'): basestring,
--
--    Required('requires-signed-builds', default=False): bool,
--
--    # If false, don't set MOZ_SIMPLE_PACKAGE_NAME
--    # Only disableable on windows
--    Required('use-simple-package', default=True): bool,
--
--    # If false don't pass --branch or --skip-buildbot-actions to mozharness script
--    # Only disableable on windows
--    Required('use-magic-mh-args', default=True): bool,
--})
--
--
--@run_job_using("docker-worker", "mozharness", schema=mozharness_run_schema)
--def mozharness_on_docker_worker_setup(config, job, taskdesc):
--    run = job['run']
--
--    worker = taskdesc['worker']
--    worker['implementation'] = job['worker']['implementation']
--
--    if not run['use-simple-package']:
--        raise NotImplementedError("Simple packaging cannot be disabled via"
--                                  "'use-simple-package' on docker-workers")
--    if not run['use-magic-mh-args']:
--        raise NotImplementedError("Cannot disabled mh magic arg passing via"
--                                  "'use-magic-mh-args' on docker-workers")
--
--    # running via mozharness assumes desktop-build (which contains build.sh)
--    taskdesc['worker']['docker-image'] = {"in-tree": "desktop-build"}
--
--    worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
--    worker['taskcluster-proxy'] = run.get('taskcluster-proxy')
--
--    docker_worker_add_public_artifacts(config, job, taskdesc)
--    docker_worker_add_workspace_cache(config, job, taskdesc,
--                                      extra=run.get('extra-workspace-cache-key'))
--    support_vcs_checkout(config, job, taskdesc)
--
--    env = worker.setdefault('env', {})
--    env.update({
--        'MOZHARNESS_CONFIG': ' '.join(run['config']),
--        'MOZHARNESS_SCRIPT': run['script'],
--        'MH_BRANCH': config.params['project'],
--        'MH_BUILD_POOL': 'taskcluster',
--        'MOZ_BUILD_DATE': config.params['moz_build_date'],
--        'MOZ_SCM_LEVEL': config.params['level'],
--        'MOZ_AUTOMATION': '1',
--    })
--
--    if 'actions' in run:
--        env['MOZHARNESS_ACTIONS'] = ' '.join(run['actions'])
--
--    if 'options' in run:
--        env['MOZHARNESS_OPTIONS'] = ' '.join(run['options'])
--
--    if 'custom-build-variant-cfg' in run:
--        env['MH_CUSTOM_BUILD_VARIANT_CFG'] = run['custom-build-variant-cfg']
--
--    if 'job-script' in run:
--        env['JOB_SCRIPT'] = run['job-script']
--
--    # if we're not keeping artifacts, set some env variables to empty values
--    # that will cause the build process to skip copying the results to the
--    # artifacts directory.  This will have no effect for operations that are
--    # not builds.
--    if not run['keep-artifacts']:
--        env['DIST_TARGET_UPLOADS'] = ''
--        env['DIST_UPLOADS'] = ''
--
--    # Xvfb
--    if run['need-xvfb']:
--        env['NEED_XVFB'] = 'true'
--
--    # tooltool downloads
--    if run['tooltool-downloads']:
--        worker['relengapi-proxy'] = True
--        worker['caches'].append({
--            'type': 'persistent',
--            'name': 'tooltool-cache',
--            'mount-point': '/home/worker/tooltool-cache',
--        })
--        taskdesc['scopes'].extend([
--            'docker-worker:relengapi-proxy:tooltool.download.public',
--        ])
--        if run['tooltool-downloads'] == 'internal':
--            taskdesc['scopes'].append(
--                'docker-worker:relengapi-proxy:tooltool.download.internal')
--        env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
--
--    # Retry if mozharness returns TBPL_RETRY
--    worker['retry-exit-status'] = 4
--
--    docker_worker_setup_secrets(config, job, taskdesc)
--
--    command = [
--        '/home/worker/bin/run-task',
--        # Various caches/volumes are default owned by root:root.
--        '--chown-recursive', '/home/worker/workspace',
--        '--chown-recursive', '/home/worker/tooltool-cache',
--        '--vcs-checkout', '/home/worker/workspace/build/src',
--        '--tools-checkout', '/home/worker/workspace/build/tools',
--        '--',
--    ]
--    command.append("/home/worker/workspace/build/src/{}".format(
--        run.get('job-script',
--                "taskcluster/scripts/builder/build-linux.sh"
--                )))
--
--    worker['command'] = command
--
--
--@run_job_using("generic-worker", "mozharness", schema=mozharness_run_schema)
--def mozharness_on_generic_worker(config, job, taskdesc):
--    assert job['worker']['os'] == 'windows', 'only supports windows right now'
--
--    run = job['run']
--
--    # fail if invalid run options are included
--    invalid = []
--    for prop in ['tooltool-downloads',
--                 'secrets', 'taskcluster-proxy', 'need-xvfb']:
--        if prop in run and run[prop]:
--            invalid.append(prop)
--    if not run.get('keep-artifacts', True):
--        invalid.append('keep-artifacts')
--    if invalid:
--        raise Exception("Jobs run using mozharness on Windows do not support properties " +
--                        ', '.join(invalid))
--
--    worker = taskdesc['worker']
--
--    generic_worker_add_public_artifacts(config, job, taskdesc)
--
--    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
--
--    env = worker['env']
--    env.update({
--        'MOZ_BUILD_DATE': config.params['moz_build_date'],
--        'MOZ_SCM_LEVEL': config.params['level'],
--        'MOZ_AUTOMATION': '1',
--    })
--    if run['use-simple-package']:
--        env.update({'MOZ_SIMPLE_PACKAGE_NAME': 'target'})
--
--    if not job['attributes']['build_platform'].startswith('win'):
--        raise Exception(
--            "Task generation for mozharness build jobs currently only supported on Windows"
--        )
--
--    mh_command = [r'c:\mozilla-build\python\python.exe']
--    mh_command.append('\\'.join([r'.\build\src\testing', run['script'].replace('/', '\\')]))
--    for cfg in run['config']:
--        mh_command.append('--config ' + cfg.replace('/', '\\'))
--    if run['use-magic-mh-args']:
--        mh_command.append('--branch ' + config.params['project'])
--        mh_command.append(r'--skip-buildbot-actions')
--    mh_command.append(r'--work-dir %cd:Z:=z:%\build')
--    for action in run.get('actions', []):
--        assert ' ' not in action
--        mh_command.append('--' + action)
--
--    for option in run.get('options', []):
--        assert ' ' not in option
--        mh_command.append('--' + option)
--    if run.get('custom-build-variant-cfg'):
--        mh_command.append('--custom-build-variant')
--        mh_command.append(run['custom-build-variant-cfg'])
--
--    hg_command = ['"c:\\Program Files\\Mercurial\\hg.exe"']
--    hg_command.append('robustcheckout')
--    hg_command.extend(['--sharebase', 'y:\\hg-shared'])
--    hg_command.append('--purge')
--    hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified'])
--    hg_command.extend(['--revision', env['GECKO_HEAD_REV']])
--    hg_command.append(env['GECKO_HEAD_REPOSITORY'])
--    hg_command.append('.\\build\\src')
--
--    worker['command'] = []
--    if taskdesc.get('needs-sccache'):
--        worker['command'].extend([
--            # Make the comment part of the first command, as it will help users to
--            # understand what is going on, and why these steps are implemented.
--            dedent('''\
--            :: sccache currently uses the full compiler commandline as input to the
--            :: cache hash key, so create a symlink to the task dir and build from
--            :: the symlink dir to get consistent paths.
--            if exist z:\\build rmdir z:\\build'''),
--            r'mklink /d z:\build %cd%',
--            # Grant delete permission on the link to everyone.
--            r'icacls z:\build /grant *S-1-1-0:D /L',
--            r'cd /d z:\build',
--        ])
--
--    worker['command'].extend([
--        ' '.join(hg_command),
--        ' '.join(mh_command)
--    ])
--
--
--@run_job_using('buildbot-bridge', 'mozharness', schema=mozharness_run_schema)
--def mozharness_on_buildbot_bridge(config, job, taskdesc):
--    run = job['run']
--    worker = taskdesc['worker']
--    branch = config.params['project']
--    product = run.get('index', {}).get('product', 'firefox')
--
--    worker.pop('env', None)
--
--    if 'devedition' in job['attributes']['build_platform']:
--        buildername = 'OS X 10.7 {} devedition build'.format(branch)
--    else:
--        buildername = 'OS X 10.7 {} build'.format(branch)
--
--    worker.update({
--        'buildername': buildername,
--        'sourcestamp': {
--            'branch': branch,
--            'repository': config.params['head_repository'],
--            'revision': config.params['head_rev'],
--        },
--        'properties': {
--            'product': product,
--            'who': config.params['owner'],
--            'upload_to_task_id': slugid.nice(),
--        }
--    })
-diff --git a/taskcluster/taskgraph/transforms/job/mozharness_test.py b/taskcluster/taskgraph/transforms/job/mozharness_test.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/mozharness_test.py
-+++ /dev/null
-@@ -1,533 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from voluptuous import Required
--from taskgraph.util.taskcluster import get_artifact_url
--from taskgraph.transforms.job import run_job_using
--from taskgraph.util.schema import Schema
--from taskgraph.transforms.tests import (
--    test_description_schema,
--    normpath
--)
--from taskgraph.transforms.job.common import (
--    support_vcs_checkout,
--)
--import os
--
--BUILDER_NAME_PREFIX = {
--    'linux64-pgo': 'Ubuntu VM 12.04 x64',
--    'linux64': 'Ubuntu VM 12.04 x64',
--    'linux64-nightly': 'Ubuntu VM 12.04 x64',
--    'linux64-asan': 'Ubuntu ASAN VM 12.04 x64',
--    'linux64-ccov': 'Ubuntu Code Coverage VM 12.04 x64',
--    'linux64-jsdcov': 'Ubuntu Code Coverage VM 12.04 x64',
--    'linux64-stylo': 'Ubuntu VM 12.04 x64',
--    'linux64-stylo-sequential': 'Ubuntu VM 12.04 x64',
--    'linux64-devedition': 'Ubuntu VM 12.04 x64',
--    'linux64-devedition-nightly': 'Ubuntu VM 12.04 x64',
--    'macosx64': 'Rev7 MacOSX Yosemite 10.10.5',
--    'macosx64-devedition': 'Rev7 MacOSX Yosemite 10.10.5 DevEdition',
--    'android-4.3-arm7-api-16': 'Android 4.3 armv7 api-16+',
--    'android-4.2-x86': 'Android 4.2 x86 Emulator',
--    'android-4.3-arm7-api-16-gradle': 'Android 4.3 armv7 api-16+',
--    'windows10-64': 'Windows 10 64-bit',
--    'windows10-64-nightly': 'Windows 10 64-bit',
--    'windows10-64-pgo': 'Windows 10 64-bit',
--    'windows10-64-asan': 'Windows 10 64-bit',
--    'windows10-64-stylo': 'Windows 10 64-bit',
--    'windows7-32': 'Windows 7 32-bit',
--    ('windows7-32', 'virtual-with-gpu'): 'Windows 7 VM-GFX 32-bit',
--    'windows7-32-nightly': 'Windows 7 32-bit',
--    'windows7-32-devedition': 'Windows 7 32-bit DevEdition',
--    'windows7-32-pgo': 'Windows 7 32-bit',
--    'windows7-32-stylo': 'Windows 7 32-bit',
--    'windows8-64': 'Windows 8 64-bit',
--    'windows8-64-nightly': 'Windows 8 64-bit',
--    'windows8-64-devedition': 'Windows 8 64-bit DevEdition',
--    'windows8-64-pgo': 'Windows 8 64-bit',
--}
--
--VARIANTS = [
--    'nightly',
--    'devedition',
--    'pgo',
--    'asan',
--    'stylo',
--    'stylo-sequential',
--    'qr',
--    'ccov',
--    'jsdcov',
--]
--
--
--def get_variant(test_platform):
--    for v in VARIANTS:
--        if '-{}/'.format(v) in test_platform:
--            return v
--    return ''
--
--
--test_description_schema = {str(k): v for k, v in test_description_schema.schema.iteritems()}
--
--mozharness_test_run_schema = Schema({
--    Required('using'): 'mozharness-test',
--    Required('test'): test_description_schema,
--})
--
--
--def test_packages_url(taskdesc):
--    """Account for different platforms that name their test packages differently"""
--    return get_artifact_url('<build>', 'public/build/target.test_packages.json')
--
--
--@run_job_using('docker-engine', 'mozharness-test', schema=mozharness_test_run_schema)
--@run_job_using('docker-worker', 'mozharness-test', schema=mozharness_test_run_schema)
--def mozharness_test_on_docker(config, job, taskdesc):
--    test = taskdesc['run']['test']
--    mozharness = test['mozharness']
--    worker = taskdesc['worker']
--
--    # apply some defaults
--    worker['docker-image'] = test['docker-image']
--    worker['allow-ptrace'] = True  # required for all tests, for crashreporter
--    worker['loopback-video'] = test['loopback-video']
--    worker['loopback-audio'] = test['loopback-audio']
--    worker['max-run-time'] = test['max-run-time']
--    worker['retry-exit-status'] = test['retry-exit-status']
--
--    artifacts = [
--        # (artifact name prefix, in-image path)
--        ("public/logs/", "/builds/worker/workspace/build/upload/logs/"),
--        ("public/test", "/builds/worker/artifacts/"),
--        ("public/test_info/", "/builds/worker/workspace/build/blobber_upload_dir/"),
--    ]
--
--    installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
--    mozharness_url = get_artifact_url('<build>',
--                                      'public/build/mozharness.zip')
--
--    worker['artifacts'] = [{
--        'name': prefix,
--        'path': os.path.join('/builds/worker/workspace', path),
--        'type': 'directory',
--    } for (prefix, path) in artifacts]
--
--    worker['caches'] = [{
--        'type': 'persistent',
--        'name': 'level-{}-{}-test-workspace'.format(
--            config.params['level'], config.params['project']),
--        'mount-point': "/builds/worker/workspace",
--    }]
--
--    env = worker['env'] = {
--        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
--        'MOZHARNESS_SCRIPT': mozharness['script'],
--        'MOZILLA_BUILD_URL': {'task-reference': installer_url},
--        'NEED_PULSEAUDIO': 'true',
--        'NEED_WINDOW_MANAGER': 'true',
--        'ENABLE_E10S': str(bool(test.get('e10s'))).lower(),
--        'MOZ_AUTOMATION': '1',
--    }
--
--    if mozharness.get('mochitest-flavor'):
--        env['MOCHITEST_FLAVOR'] = mozharness['mochitest-flavor']
--
--    if mozharness['set-moz-node-path']:
--        env['MOZ_NODE_PATH'] = '/usr/local/bin/node'
--
--    if 'actions' in mozharness:
--        env['MOZHARNESS_ACTIONS'] = ' '.join(mozharness['actions'])
--
--    if config.params['project'] == 'try':
--        env['TRY_COMMIT_MSG'] = config.params['message']
--
--    # handle some of the mozharness-specific options
--
--    if mozharness['tooltool-downloads']:
--        worker['relengapi-proxy'] = True
--        worker['caches'].append({
--            'type': 'persistent',
--            'name': 'tooltool-cache',
--            'mount-point': '/home/worker/tooltool-cache',
--        })
--        taskdesc['scopes'].extend([
--            'docker-worker:relengapi-proxy:tooltool.download.internal',
--            'docker-worker:relengapi-proxy:tooltool.download.public',
--        ])
--
--    if test['reboot']:
--        raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot']))
--
--    # assemble the command line
--    command = [
--        '/home/worker/bin/run-task',
--        # The workspace cache/volume is default owned by root:root.
--        '--chown', '/home/worker/workspace',
--    ]
--
--    # Support vcs checkouts regardless of whether the task runs from
--    # source or not in case it is needed on an interactive loaner.
--    support_vcs_checkout(config, job, taskdesc)
--
--    # If we have a source checkout, run mozharness from it instead of
--    # downloading a zip file with the same content.
--    if test['checkout']:
--        command.extend(['--vcs-checkout', '/home/worker/checkouts/gecko'])
--        env['MOZHARNESS_PATH'] = '/home/worker/checkouts/gecko/testing/mozharness'
--    else:
--        env['MOZHARNESS_URL'] = {'task-reference': mozharness_url}
--
--    command.extend([
--        '--',
--        '/home/worker/bin/test-linux.sh',
--    ])
--
--    if mozharness.get('no-read-buildbot-config'):
--        command.append("--no-read-buildbot-config")
--    command.extend([
--        {"task-reference": "--installer-url=" + installer_url},
--        {"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)},
--    ])
--    command.extend(mozharness.get('extra-options', []))
--
--    # TODO: remove the need for run['chunked']
--    if mozharness.get('chunked') or test['chunks'] > 1:
--        # Implement mozharness['chunking-args'], modifying command in place
--        if mozharness['chunking-args'] == 'this-chunk':
--            command.append('--total-chunk={}'.format(test['chunks']))
--            command.append('--this-chunk={}'.format(test['this-chunk']))
--        elif mozharness['chunking-args'] == 'test-suite-suffix':
--            suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
--            for i, c in enumerate(command):
--                if isinstance(c, basestring) and c.startswith('--test-suite'):
--                    command[i] += suffix
--
--    if 'download-symbols' in mozharness:
--        download_symbols = mozharness['download-symbols']
--        download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
--        command.append('--download-symbols=' + download_symbols)
--
--    worker['command'] = command
--
--
--@run_job_using('generic-worker', 'mozharness-test', schema=mozharness_test_run_schema)
--def mozharness_test_on_generic_worker(config, job, taskdesc):
--    test = taskdesc['run']['test']
--    mozharness = test['mozharness']
--    worker = taskdesc['worker']
--
--    is_macosx = worker['os'] == 'macosx'
--    is_windows = worker['os'] == 'windows'
--    assert is_macosx or is_windows
--
--    artifacts = [
--        {
--            'name': 'public/logs',
--            'path': 'logs',
--            'type': 'directory'
--        },
--    ]
--
--    # jittest doesn't have blob_upload_dir
--    if test['test-name'] != 'jittest':
--        artifacts.append({
--            'name': 'public/test_info',
--            'path': 'build/blobber_upload_dir',
--            'type': 'directory'
--        })
--
--    upstream_task = '<build-signing>' if mozharness['requires-signed-builds'] else '<build>'
--    installer_url = get_artifact_url(upstream_task, mozharness['build-artifact-name'])
--
--    taskdesc['scopes'].extend(
--        ['generic-worker:os-group:{}'.format(group) for group in test['os-groups']])
--
--    worker['os-groups'] = test['os-groups']
--
--    if test['reboot']:
--        raise Exception('reboot: {} not supported on generic-worker'.format(test['reboot']))
--
--    worker['max-run-time'] = test['max-run-time']
--    worker['artifacts'] = artifacts
--
--    env = worker.setdefault('env', {})
--    env['MOZ_AUTOMATION'] = '1'
--
--    # this list will get cleaned up / reduced / removed in bug 1354088
--    if is_macosx:
--        env.update({
--            'IDLEIZER_DISABLE_SHUTDOWN': 'true',
--            'LANG': 'en_US.UTF-8',
--            'LC_ALL': 'en_US.UTF-8',
--            'MOZ_HIDE_RESULTS_TABLE': '1',
--            'MOZ_NODE_PATH': '/usr/local/bin/node',
--            'MOZ_NO_REMOTE': '1',
--            'NO_EM_RESTART': '1',
--            'NO_FAIL_ON_TEST_ERRORS': '1',
--            'PATH': '/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin',
--            'SHELL': '/bin/bash',
--            'XPCOM_DEBUG_BREAK': 'warn',
--            'XPC_FLAGS': '0x0',
--            'XPC_SERVICE_NAME': '0',
--        })
--
--    if is_macosx:
--        mh_command = [
--            'python2.7',
--            '-u',
--            'mozharness/scripts/' + mozharness['script']
--        ]
--    elif is_windows:
--        mh_command = [
--            'c:\\mozilla-build\\python\\python.exe',
--            '-u',
--            'mozharness\\scripts\\' + normpath(mozharness['script'])
--        ]
--
--    for mh_config in mozharness['config']:
--        cfg_path = 'mozharness/configs/' + mh_config
--        if is_windows:
--            cfg_path = normpath(cfg_path)
--        mh_command.extend(['--cfg', cfg_path])
--    mh_command.extend(mozharness.get('extra-options', []))
--    if mozharness.get('no-read-buildbot-config'):
--        mh_command.append('--no-read-buildbot-config')
--    mh_command.extend(['--installer-url', installer_url])
--    mh_command.extend(['--test-packages-url', test_packages_url(taskdesc)])
--    if mozharness.get('download-symbols'):
--        if isinstance(mozharness['download-symbols'], basestring):
--            mh_command.extend(['--download-symbols', mozharness['download-symbols']])
--        else:
--            mh_command.extend(['--download-symbols', 'true'])
--    if mozharness.get('include-blob-upload-branch'):
--        mh_command.append('--blob-upload-branch=' + config.params['project'])
--    mh_command.extend(mozharness.get('extra-options', []))
--
--    # TODO: remove the need for run['chunked']
--    if mozharness.get('chunked') or test['chunks'] > 1:
--        # Implement mozharness['chunking-args'], modifying command in place
--        if mozharness['chunking-args'] == 'this-chunk':
--            mh_command.append('--total-chunk={}'.format(test['chunks']))
--            mh_command.append('--this-chunk={}'.format(test['this-chunk']))
--        elif mozharness['chunking-args'] == 'test-suite-suffix':
--            suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
--            for i, c in enumerate(mh_command):
--                if isinstance(c, basestring) and c.startswith('--test-suite'):
--                    mh_command[i] += suffix
--
--    if config.params['project'] == 'try':
--        env['TRY_COMMIT_MSG'] = config.params['message']
--
--    worker['mounts'] = [{
--        'directory': '.',
--        'content': {
--            'artifact': 'public/build/mozharness.zip',
--            'task-id': {
--                'task-reference': '<build>'
--            }
--        },
--        'format': 'zip'
--    }]
--
--    if is_windows:
--        worker['command'] = [
--            {'task-reference': ' '.join(mh_command)}
--        ]
--    else:  # is_macosx
--        mh_command_task_ref = []
--        for token in mh_command:
--            mh_command_task_ref.append({'task-reference': token})
--        worker['command'] = [
--            mh_command_task_ref
--        ]
--
--
--@run_job_using('native-engine', 'mozharness-test', schema=mozharness_test_run_schema)
--def mozharness_test_on_native_engine(config, job, taskdesc):
--    test = taskdesc['run']['test']
--    mozharness = test['mozharness']
--    worker = taskdesc['worker']
--    is_talos = test['suite'] == 'talos'
--    is_macosx = worker['os'] == 'macosx'
--
--    installer_url = get_artifact_url('<build>', mozharness['build-artifact-name'])
--    mozharness_url = get_artifact_url('<build>',
--                                      'public/build/mozharness.zip')
--
--    worker['artifacts'] = [{
--        'name': prefix.rstrip('/'),
--        'path': path.rstrip('/'),
--        'type': 'directory',
--    } for (prefix, path) in [
--        # (artifact name prefix, in-image path relative to homedir)
--        ("public/logs/", "workspace/build/upload/logs/"),
--        ("public/test", "artifacts/"),
--        ("public/test_info/", "workspace/build/blobber_upload_dir/"),
--    ]]
--
--    if test['reboot']:
--        worker['reboot'] = test['reboot']
--
--    worker['env'] = env = {
--        'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
--        'GECKO_HEAD_REV': config.params['head_rev'],
--        'MOZHARNESS_CONFIG': ' '.join(mozharness['config']),
--        'MOZHARNESS_SCRIPT': mozharness['script'],
--        'MOZHARNESS_URL': {'task-reference': mozharness_url},
--        'MOZILLA_BUILD_URL': {'task-reference': installer_url},
--        "MOZ_NO_REMOTE": '1',
--        "NO_EM_RESTART": '1',
--        "XPCOM_DEBUG_BREAK": 'warn',
--        "NO_FAIL_ON_TEST_ERRORS": '1',
--        "MOZ_HIDE_RESULTS_TABLE": '1',
--        "MOZ_NODE_PATH": "/usr/local/bin/node",
--        'MOZ_AUTOMATION': '1',
--    }
--    # talos tests don't need Xvfb
--    if is_talos:
--        env['NEED_XVFB'] = 'false'
--
--    script = 'test-macosx.sh' if is_macosx else 'test-linux.sh'
--    worker['context'] = '{}/raw-file/{}/taskcluster/scripts/tester/{}'.format(
--        config.params['head_repository'], config.params['head_rev'], script
--    )
--
--    command = worker['command'] = ["./{}".format(script)]
--    if mozharness.get('no-read-buildbot-config'):
--        command.append("--no-read-buildbot-config")
--    command.extend([
--        {"task-reference": "--installer-url=" + installer_url},
--        {"task-reference": "--test-packages-url=" + test_packages_url(taskdesc)},
--    ])
--    if mozharness.get('include-blob-upload-branch'):
--        command.append('--blob-upload-branch=' + config.params['project'])
--    command.extend(mozharness.get('extra-options', []))
--
--    # TODO: remove the need for run['chunked']
--    if mozharness.get('chunked') or test['chunks'] > 1:
--        # Implement mozharness['chunking-args'], modifying command in place
--        if mozharness['chunking-args'] == 'this-chunk':
--            command.append('--total-chunk={}'.format(test['chunks']))
--            command.append('--this-chunk={}'.format(test['this-chunk']))
--        elif mozharness['chunking-args'] == 'test-suite-suffix':
--            suffix = mozharness['chunk-suffix'].replace('<CHUNK>', str(test['this-chunk']))
--            for i, c in enumerate(command):
--                if isinstance(c, basestring) and c.startswith('--test-suite'):
--                    command[i] += suffix
--
--    if 'download-symbols' in mozharness:
--        download_symbols = mozharness['download-symbols']
--        download_symbols = {True: 'true', False: 'false'}.get(download_symbols, download_symbols)
--        command.append('--download-symbols=' + download_symbols)
--
--
--@run_job_using('buildbot-bridge', 'mozharness-test', schema=mozharness_test_run_schema)
--def mozharness_test_buildbot_bridge(config, job, taskdesc):
--    test = taskdesc['run']['test']
--    mozharness = test['mozharness']
--    worker = taskdesc['worker']
--
--    branch = config.params['project']
--    build_platform, build_type = test['build-platform'].split('/')
--    test_platform = test['test-platform'].split('/')[0]
--    test_name = test.get('try-name', test['test-name'])
--    mozharness = test['mozharness']
--
--    # mochitest e10s follows the pattern mochitest-e10s-<suffix>
--    # in buildbot, except for these special cases
--    buildbot_specials = [
--        'mochitest-webgl',
--        'mochitest-clipboard',
--        'mochitest-media',
--        'mochitest-gpu',
--        'mochitest-e10s',
--    ]
--    test_name = test.get('try-name', test['test-name'])
--    if test['e10s'] and 'e10s' not in test_name:
--        test_name += '-e10s'
--
--    if test_name.startswith('mochitest') \
--            and test_name.endswith('e10s') \
--            and not any(map(
--                lambda name: test_name.startswith(name),
--                buildbot_specials
--            )):
--        split_mochitest = test_name.split('-')
--        test_name = '-'.join([
--            split_mochitest[0],
--            split_mochitest[-1],
--            '-'.join(split_mochitest[1:-1])
--        ])
--
--    # in buildbot, mochitest-webgl is called mochitest-gl
--    test_name = test_name.replace('webgl', 'gl')
--
--    if mozharness.get('chunked', False):
--        this_chunk = test.get('this-chunk')
--        test_name = '{}-{}'.format(test_name, this_chunk)
--    elif test.get('this-chunk', 1) != 1:
--        raise Exception("Unexpected chunking when 'chunked' attribute is 'false'"
--                        " for {}".format(test_name))
--
--    if test.get('suite', '') == 'talos':
--        variant = get_variant(test['test-platform'])
--
--        # On beta and release, we run nightly builds on-push; the talos
--        # builders need to run against non-nightly buildernames
--        if variant == 'nightly':
--            variant = ''
--
--        # this variant name has branch after the variant type in BBB bug 1338871
--        if variant in ('stylo', 'stylo-sequential', 'devedition'):
--            name = '{prefix} {variant} {branch} talos {test_name}'
--        elif variant:
--            name = '{prefix} {branch} {variant} talos {test_name}'
--        else:
--            name = '{prefix} {branch} talos {test_name}'
--
--        buildername = name.format(
--            prefix=BUILDER_NAME_PREFIX[test_platform],
--            variant=variant,
--            branch=branch,
--            test_name=test_name
--        )
--
--        if buildername.startswith('Ubuntu'):
--            buildername = buildername.replace('VM', 'HW')
--    else:
--        variant = get_variant(test['test-platform'])
--        # If we are a pgo type, munge the build_type for the
--        # Unittest builder name generation
--        if 'pgo' in variant:
--            build_type = variant
--        prefix = BUILDER_NAME_PREFIX.get(
--            (test_platform, test.get('virtualization')),
--            BUILDER_NAME_PREFIX[test_platform])
--        buildername = '{prefix} {branch} {build_type} test {test_name}'.format(
--            prefix=prefix,
--            branch=branch,
--            build_type=build_type,
--            test_name=test_name
--        )
--
--    worker.update({
--        'buildername': buildername,
--        'sourcestamp': {
--            'branch': branch,
--            'repository': config.params['head_repository'],
--            'revision': config.params['head_rev'],
--        },
--        'properties': {
--            'product': test.get('product', 'firefox'),
--            'who': config.params['owner'],
--            'installer_path': mozharness['build-artifact-name'],
--        }
--    })
--
--    if mozharness['requires-signed-builds']:
--        upstream_task = '<build-signing>'
--        installer_url = get_artifact_url(upstream_task, mozharness['build-artifact-name'])
--        worker['properties']['signed_installer_url'] = {'task-reference': installer_url}
-diff --git a/taskcluster/taskgraph/transforms/job/run_task.py b/taskcluster/taskgraph/transforms/job/run_task.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/run_task.py
-+++ /dev/null
-@@ -1,84 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Support for running jobs that are invoked via the `run-task` script.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.job import run_job_using
--from taskgraph.util.schema import Schema
--from taskgraph.transforms.job.common import support_vcs_checkout
--from voluptuous import Required, Any
--
--run_task_schema = Schema({
--    Required('using'): 'run-task',
--
--    # if true, add a cache at ~worker/.cache, which is where things like pip
--    # tend to hide their caches.  This cache is never added for level-1 jobs.
--    Required('cache-dotcache', default=False): bool,
--
--    # if true (the default), perform a checkout in /home/worker/checkouts/gecko
--    Required('checkout', default=True): bool,
--
--    # The command arguments to pass to the `run-task` script, after the
--    # checkout arguments.  If a list, it will be passed directly; otherwise
--    # it will be included in a single argument to `bash -cx`.
--    Required('command'): Any([basestring], basestring),
--})
--
--
--def common_setup(config, job, taskdesc):
--    run = job['run']
--    if run['checkout']:
--        support_vcs_checkout(config, job, taskdesc)
--
--
--@run_job_using("docker-worker", "run-task", schema=run_task_schema)
--def docker_worker_run_task(config, job, taskdesc):
--    run = job['run']
--    worker = taskdesc['worker'] = job['worker']
--    common_setup(config, job, taskdesc)
--
--    if run.get('cache-dotcache') and int(config.params['level']) > 1:
--        worker['caches'].append({
--            'type': 'persistent',
--            'name': 'level-{level}-{project}-dotcache'.format(**config.params),
--            'mount-point': '/home/worker/.cache',
--        })
--
--    run_command = run['command']
--    if isinstance(run_command, basestring):
--        run_command = ['bash', '-cx', run_command]
--    command = ['/home/worker/bin/run-task']
--    if run['checkout']:
--        command.append('--vcs-checkout=~/checkouts/gecko')
--    command.append('--fetch-hgfingerprint')
--    command.append('--')
--    command.extend(run_command)
--    worker['command'] = command
--
--
--@run_job_using("native-engine", "run-task", schema=run_task_schema)
--def native_engine_run_task(config, job, taskdesc):
--    run = job['run']
--    worker = taskdesc['worker'] = job['worker']
--    common_setup(config, job, taskdesc)
--
--    worker['context'] = '{}/raw-file/{}/taskcluster/docker/recipes/run-task'.format(
--        config.params['head_repository'], config.params['head_rev']
--    )
--
--    if run.get('cache-dotcache'):
--        raise Exception("No cache support on native-worker; can't use cache-dotcache")
--
--    run_command = run['command']
--    if isinstance(run_command, basestring):
--        run_command = ['bash', '-cx', run_command]
--    command = ['./run-task']
--    if run['checkout']:
--        command.append('--vcs-checkout=~/checkouts/gecko')
--    command.append('--')
--    command.extend(run_command)
--    worker['command'] = command
-diff --git a/taskcluster/taskgraph/transforms/job/spidermonkey.py b/taskcluster/taskgraph/transforms/job/spidermonkey.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/spidermonkey.py
-+++ /dev/null
-@@ -1,82 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Support for running spidermonkey jobs via dedicated scripts
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.util.schema import Schema
--from voluptuous import Required, Any
--
--from taskgraph.transforms.job import run_job_using
--from taskgraph.transforms.job.common import (
--    docker_worker_add_public_artifacts,
--    support_vcs_checkout,
--)
--
--sm_run_schema = Schema({
--    Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate'),
--
--    # The SPIDERMONKEY_VARIANT
--    Required('spidermonkey-variant'): basestring,
--})
--
--
--@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema)
--@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema)
--@run_job_using("docker-worker", "spidermonkey-mozjs-crate",
--               schema=sm_run_schema)
--def docker_worker_spidermonkey(config, job, taskdesc):
--    run = job['run']
--
--    worker = taskdesc['worker']
--    worker['artifacts'] = []
--    worker['caches'] = []
--
--    if int(config.params['level']) > 1:
--        worker['caches'].append({
--            'type': 'persistent',
--            'name': 'level-{}-{}-build-spidermonkey-workspace'.format(
--                config.params['level'], config.params['project']),
--            'mount-point': "/home/worker/workspace",
--        })
--
--    docker_worker_add_public_artifacts(config, job, taskdesc)
--
--    env = worker.setdefault('env', {})
--    env.update({
--        'MOZHARNESS_DISABLE': 'true',
--        'SPIDERMONKEY_VARIANT': run['spidermonkey-variant'],
--        'MOZ_BUILD_DATE': config.params['moz_build_date'],
--        'MOZ_SCM_LEVEL': config.params['level'],
--    })
--
--    # tooltool downloads; note that this script downloads using the API
--    # endpoiint directly, rather than via relengapi-proxy
--    worker['caches'].append({
--        'type': 'persistent',
--        'name': 'tooltool-cache',
--        'mount-point': '/home/worker/tooltool-cache',
--    })
--    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
--
--    support_vcs_checkout(config, job, taskdesc)
--
--    script = "build-sm.sh"
--    if run['using'] == 'spidermonkey-package':
--        script = "build-sm-package.sh"
--    elif run['using'] == 'spidermonkey-mozjs-crate':
--        script = "build-sm-mozjs-crate.sh"
--
--    worker['command'] = [
--        '/home/worker/bin/run-task',
--        '--chown-recursive', '/home/worker/workspace',
--        '--chown-recursive', '/home/worker/tooltool-cache',
--        '--vcs-checkout', '/home/worker/workspace/build/src',
--        '--',
--        '/bin/bash',
--        '-c',
--        'cd /home/worker && workspace/build/src/taskcluster/scripts/builder/%s' % script
--    ]
-diff --git a/taskcluster/taskgraph/transforms/job/toolchain.py b/taskcluster/taskgraph/transforms/job/toolchain.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/job/toolchain.py
-+++ /dev/null
-@@ -1,191 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Support for running toolchain-building jobs via dedicated scripts
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.util.schema import Schema
--from voluptuous import Optional, Required, Any
--
--from taskgraph.transforms.job import run_job_using
--from taskgraph.transforms.job.common import (
--    docker_worker_add_tc_vcs_cache,
--    docker_worker_add_gecko_vcs_env_vars,
--    docker_worker_add_public_artifacts,
--    support_vcs_checkout,
--)
--from taskgraph.util.hash import hash_paths
--from taskgraph import GECKO
--
--
--TOOLCHAIN_INDEX = 'gecko.cache.level-{level}.toolchains.v1.{name}.{digest}'
--
--toolchain_run_schema = Schema({
--    Required('using'): 'toolchain-script',
--
--    # the script (in taskcluster/scripts/misc) to run
--    Required('script'): basestring,
--
--    # If not false, tooltool downloads will be enabled via relengAPIProxy
--    # for either just public files, or all files.  Not supported on Windows
--    Required('tooltool-downloads', default=False): Any(
--        False,
--        'public',
--        'internal',
--    ),
--
--    # Paths/patterns pointing to files that influence the outcome of a
--    # toolchain build.
--    Optional('resources'): [basestring],
--
--    # Path to the artifact produced by the toolchain job
--    Required('toolchain-artifact'): basestring,
--})
--
--
--def add_optimizations(config, run, taskdesc):
--    files = list(run.get('resources', []))
--    # This file
--    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
--    # The script
--    files.append('taskcluster/scripts/misc/{}'.format(run['script']))
--
--    label = taskdesc['label']
--    subs = {
--        'name': label.replace('%s-' % config.kind, ''),
--        'digest': hash_paths(GECKO, files),
--    }
--
--    optimizations = taskdesc.setdefault('optimizations', [])
--
--    # We'll try to find a cached version of the toolchain at levels above
--    # and including the current level, starting at the highest level.
--    for level in reversed(range(int(config.params['level']), 4)):
--        subs['level'] = level
--        optimizations.append(['index-search', TOOLCHAIN_INDEX.format(**subs)])
--
--    # ... and cache at the lowest level.
--    taskdesc.setdefault('routes', []).append(
--        'index.{}'.format(TOOLCHAIN_INDEX.format(**subs)))
--
--
--@run_job_using("docker-worker", "toolchain-script", schema=toolchain_run_schema)
--def docker_worker_toolchain(config, job, taskdesc):
--    run = job['run']
--    taskdesc['run-on-projects'] = ['trunk', 'try']
--
--    worker = taskdesc['worker']
--    worker['artifacts'] = []
--    worker['caches'] = []
--    worker['chain-of-trust'] = True
--
--    docker_worker_add_public_artifacts(config, job, taskdesc)
--    docker_worker_add_tc_vcs_cache(config, job, taskdesc)
--    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
--    support_vcs_checkout(config, job, taskdesc)
--
--    env = worker['env']
--    env.update({
--        'MOZ_BUILD_DATE': config.params['moz_build_date'],
--        'MOZ_SCM_LEVEL': config.params['level'],
--        'TOOLS_DISABLE': 'true',
--        'MOZ_AUTOMATION': '1',
--    })
--
--    # tooltool downloads.  By default we download using the API endpoint, but
--    # the job can optionally request relengapi-proxy (for example when downloading
--    # internal tooltool resources.  So we define the tooltool cache unconditionally.
--    worker['caches'].append({
--        'type': 'persistent',
--        'name': 'tooltool-cache',
--        'mount-point': '/home/worker/tooltool-cache',
--    })
--    env['TOOLTOOL_CACHE'] = '/home/worker/tooltool-cache'
--
--    # tooltool downloads
--    worker['relengapi-proxy'] = False  # but maybe enabled for tooltool below
--    if run['tooltool-downloads']:
--        worker['relengapi-proxy'] = True
--        taskdesc['scopes'].extend([
--            'docker-worker:relengapi-proxy:tooltool.download.public',
--        ])
--        if run['tooltool-downloads'] == 'internal':
--            taskdesc['scopes'].append(
--                'docker-worker:relengapi-proxy:tooltool.download.internal')
--
--    worker['command'] = [
--        '/home/worker/bin/run-task',
--        # Various caches/volumes are default owned by root:root.
--        '--chown-recursive', '/home/worker/workspace',
--        '--chown-recursive', '/home/worker/tooltool-cache',
--        '--vcs-checkout=/home/worker/workspace/build/src',
--        '--',
--        'bash',
--        '-c',
--        'cd /home/worker && '
--        './workspace/build/src/taskcluster/scripts/misc/{}'.format(
--            run['script'])
--    ]
--
--    attributes = taskdesc.setdefault('attributes', {})
--    attributes['toolchain-artifact'] = run['toolchain-artifact']
--
--    add_optimizations(config, run, taskdesc)
--
--
--@run_job_using("generic-worker", "toolchain-script", schema=toolchain_run_schema)
--def windows_toolchain(config, job, taskdesc):
--    run = job['run']
--    taskdesc['run-on-projects'] = ['trunk', 'try']
--
--    worker = taskdesc['worker']
--
--    worker['artifacts'] = [{
--        'path': r'public\build',
--        'type': 'directory',
--    }]
--    worker['chain-of-trust'] = True
--
--    docker_worker_add_gecko_vcs_env_vars(config, job, taskdesc)
--
--    # We fetch LLVM SVN into this.
--    svn_cache = 'level-{}-toolchain-clang-cl-build-svn'.format(config.params['level'])
--    worker['mounts'] = [{
--        'cache-name': svn_cache,
--        'directory': r'llvm-sources',
--    }]
--    taskdesc['scopes'].extend([
--        'generic-worker:cache:' + svn_cache,
--    ])
--
--    env = worker['env']
--    env.update({
--        'MOZ_BUILD_DATE': config.params['moz_build_date'],
--        'MOZ_SCM_LEVEL': config.params['level'],
--        'MOZ_AUTOMATION': '1',
--    })
--
--    hg = r'c:\Program Files\Mercurial\hg.exe'
--    hg_command = ['"{}"'.format(hg)]
--    hg_command.append('robustcheckout')
--    hg_command.extend(['--sharebase', 'y:\\hg-shared'])
--    hg_command.append('--purge')
--    hg_command.extend(['--upstream', 'https://hg.mozilla.org/mozilla-unified'])
--    hg_command.extend(['--revision', '%GECKO_HEAD_REV%'])
--    hg_command.append('%GECKO_HEAD_REPOSITORY%')
--    hg_command.append('.\\build\\src')
--
--    bash = r'c:\mozilla-build\msys\bin\bash'
--    worker['command'] = [
--        ' '.join(hg_command),
--        # do something intelligent.
--        r'{} -c ./build/src/taskcluster/scripts/misc/{}'.format(bash, run['script'])
--    ]
--
--    attributes = taskdesc.setdefault('attributes', {})
--    attributes['toolchain-artifact'] = run['toolchain-artifact']
--
--    add_optimizations(config, run, taskdesc)
-diff --git a/taskcluster/taskgraph/transforms/l10n.py b/taskcluster/taskgraph/transforms/l10n.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/l10n.py
-+++ /dev/null
-@@ -1,412 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Do transforms specific to l10n kind
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import copy
--import json
--
--from mozbuild.chunkify import chunkify
--from taskgraph.transforms.base import (
--    TransformSequence,
--)
--from taskgraph.util.schema import (
--    validate_schema,
--    optionally_keyed_by,
--    resolve_keyed_by,
--    Schema,
--)
--from taskgraph.util.treeherder import split_symbol, join_symbol
--from taskgraph.transforms.job import job_description_schema
--from voluptuous import (
--    Any,
--    Optional,
--    Required,
--)
--
--
--def _by_platform(arg):
--    return optionally_keyed_by('build-platform', arg)
--
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()}
--
--l10n_description_schema = Schema({
--    # Name for this job, inferred from the dependent job before validation
--    Required('name'): basestring,
--
--    # build-platform, inferred from dependent job before validation
--    Required('build-platform'): basestring,
--
--    # max run time of the task
--    Required('run-time'): _by_platform(int),
--
--    # Locales not to repack for
--    Required('ignore-locales'): _by_platform([basestring]),
--
--    # All l10n jobs use mozharness
--    Required('mozharness'): {
--        # Script to invoke for mozharness
--        Required('script'): _by_platform(basestring),
--
--        # Config files passed to the mozharness script
--        Required('config'): _by_platform([basestring]),
--
--        # Options to pass to the mozharness script
--        Required('options'): _by_platform([basestring]),
--
--        # Action commands to provide to mozharness script
--        Required('actions'): _by_platform([basestring]),
--    },
--    # Items for the taskcluster index
--    Optional('index'): {
--        # Product to identify as in the taskcluster index
--        Required('product'): _by_platform(basestring),
--
--        # Job name to identify as in the taskcluster index
--        Required('job-name'): _by_platform(basestring),
--
--        # Type of index
--        Optional('type'): basestring,
--    },
--    # Description of the localized task
--    Required('description'): _by_platform(basestring),
--
--    Optional('run-on-projects'): job_description_schema['run-on-projects'],
--
--    # task object of the dependent task
--    Required('dependent-task'): object,
--
--    # worker-type to utilize
--    Required('worker-type'): _by_platform(basestring),
--
--    # File which contains the used locales
--    Required('locales-file'): _by_platform(basestring),
--
--    # Tooltool visibility required for task.
--    Required('tooltool'): _by_platform(Any('internal', 'public')),
--
--    # Information for treeherder
--    Required('treeherder'): {
--        # Platform to display the task on in treeherder
--        Required('platform'): _by_platform(basestring),
--
--        # Symbol to use
--        Required('symbol'): basestring,
--
--        # Tier this task is
--        Required('tier'): _by_platform(int),
--    },
--
--    # Extra environment values to pass to the worker
--    Optional('env'): _by_platform({basestring: taskref_or_string}),
--
--    # Number of chunks to split the locale repacks up into
--    Optional('chunks'): _by_platform(int),
--
--    # Task deps to chain this task with, added in transforms from dependent-task
--    # if this is a nightly
--    Optional('dependencies'): {basestring: basestring},
--
--    # Run the task when the listed files change (if present).
--    Optional('when'): {
--        'files-changed': [basestring]
--    },
--
--    # passed through directly to the job description
--    Optional('attributes'): job_description_schema['attributes'],
--    Optional('extra'): job_description_schema['extra'],
--})
--
--transforms = TransformSequence()
--
--
--def _parse_locales_file(locales_file, platform=None):
--    """ Parse the passed locales file for a list of locales.
--        If platform is unset matches all platforms.
--    """
--    locales = []
--
--    with open(locales_file, mode='r') as f:
--        if locales_file.endswith('json'):
--            all_locales = json.load(f)
--            # XXX Only single locales are fetched
--            locales = {
--                locale: data['revision']
--                for locale, data in all_locales.items()
--                if 'android' in data['platforms']
--            }
--        else:
--            all_locales = f.read().split()
--            # 'default' is the hg revision at the top of hg repo, in this context
--            locales = {locale: 'default' for locale in all_locales}
--    return locales
--
--
--def _remove_locales(locales, to_remove=None):
--    # ja-JP-mac is a mac-only locale, but there are no mac builds being repacked,
--    # so just omit it unconditionally
--    return {
--        locale: revision for locale, revision in locales.items() if locale not in to_remove
--    }
--
--
--@transforms.add
--def setup_name(config, jobs):
--    for job in jobs:
--        dep = job['dependent-task']
--        if dep.attributes.get('nightly'):
--            # Set the name to the same as the dep task, without kind name.
--            # Label will get set automatically with this kinds name.
--            job['name'] = job.get('name',
--                                  dep.task['metadata']['name'][
--                                    len(dep.kind) + 1:])
--        else:
--            # Set to match legacy use at the moment (to support documented try
--            # syntax). Set the name to same as dep task + '-l10n' but without the
--            # kind name attached, since that gets added when label is generated
--            name, jobtype = dep.task['metadata']['name'][len(dep.kind) + 1:].split('/')
--            job['name'] = "{}-l10n/{}".format(name, jobtype)
--        yield job
--
--
--@transforms.add
--def copy_in_useful_magic(config, jobs):
--    for job in jobs:
--        dep = job['dependent-task']
--        attributes = job.setdefault('attributes', {})
--        # build-platform is needed on `job` for by-build-platform
--        job['build-platform'] = dep.attributes.get("build_platform")
--        attributes['build_type'] = dep.attributes.get("build_type")
--        if dep.attributes.get("nightly"):
--            attributes['nightly'] = dep.attributes.get("nightly")
--        else:
--            # set build_platform to have l10n as well, to match older l10n setup
--            # for now
--            job['build-platform'] = "{}-l10n".format(job['build-platform'])
--
--        attributes['build_platform'] = job['build-platform']
--        yield job
--
--
--@transforms.add
--def validate_early(config, jobs):
--    for job in jobs:
--        yield validate_schema(l10n_description_schema, job,
--                              "In job {!r}:".format(job.get('name', 'unknown')))
--
--
--@transforms.add
--def setup_nightly_dependency(config, jobs):
--    """ Sets up a task dependency to the signing job this relates to """
--    for job in jobs:
--        if not job['attributes'].get('nightly'):
--            yield job
--            continue  # do not add a dep unless we're a nightly
--        job['dependencies'] = {'unsigned-build': job['dependent-task'].label}
--        if job['attributes']['build_platform'].startswith('win'):
--            # Weave these in and just assume they will be there in the resulting graph
--            job['dependencies'].update({
--                'signed-build': 'signing-{}'.format(job['name']),
--                'repackage-signed': 'repackage-signing-repackage-{}'.format(job['name'])
--            })
--        yield job
--
--
--@transforms.add
--def handle_keyed_by(config, jobs):
--    """Resolve fields that can be keyed by platform, etc."""
--    fields = [
--        "locales-file",
--        "chunks",
--        "worker-type",
--        "description",
--        "run-time",
--        "tooltool",
--        "env",
--        "ignore-locales",
--        "mozharness.config",
--        "mozharness.options",
--        "mozharness.actions",
--        "mozharness.script",
--        "treeherder.tier",
--        "treeherder.platform",
--        "index.product",
--        "index.job-name",
--        "when.files-changed",
--    ]
--    for job in jobs:
--        job = copy.deepcopy(job)  # don't overwrite dict values here
--        for field in fields:
--            resolve_keyed_by(item=job, field=field, item_name=job['name'])
--        yield job
--
--
--@transforms.add
--def all_locales_attribute(config, jobs):
--    for job in jobs:
--        locales_with_changesets = _parse_locales_file(job["locales-file"])
--        locales_with_changesets = _remove_locales(locales_with_changesets,
--                                                  to_remove=job['ignore-locales'])
--
--        locales = sorted(locales_with_changesets.keys())
--        attributes = job.setdefault('attributes', {})
--        attributes["all_locales"] = locales
--        attributes["all_locales_with_changesets"] = locales_with_changesets
--        yield job
--
--
--@transforms.add
--def chunk_locales(config, jobs):
--    """ Utilizes chunking for l10n stuff """
--    for job in jobs:
--        chunks = job.get('chunks')
--        locales_with_changesets = job['attributes']['all_locales_with_changesets']
--        if chunks:
--            if chunks > len(locales_with_changesets):
--                # Reduce chunks down to the number of locales
--                chunks = len(locales_with_changesets)
--            for this_chunk in range(1, chunks + 1):
--                chunked = copy.deepcopy(job)
--                chunked['name'] = chunked['name'].replace(
--                    '/', '-{}/'.format(this_chunk), 1
--                )
--                chunked['mozharness']['options'] = chunked['mozharness'].get('options', [])
--                # chunkify doesn't work with dicts
--                locales_with_changesets_as_list = locales_with_changesets.items()
--                chunked_locales = chunkify(locales_with_changesets_as_list, this_chunk, chunks)
--                chunked['mozharness']['options'].extend([
--                    'locale={}:{}'.format(locale, changeset)
--                    for locale, changeset in chunked_locales
--                ])
--                chunked['attributes']['l10n_chunk'] = str(this_chunk)
--                # strip revision
--                chunked['attributes']['chunk_locales'] = [locale for locale, _ in chunked_locales]
--
--                # add the chunk number to the TH symbol
--                group, symbol = split_symbol(
--                    chunked.get('treeherder', {}).get('symbol', ''))
--                symbol += str(this_chunk)
--                chunked['treeherder']['symbol'] = join_symbol(group, symbol)
--                yield chunked
--        else:
--            job['mozharness']['options'] = job['mozharness'].get('options', [])
--            job['mozharness']['options'].extend([
--                'locale={}:{}'.format(locale, changeset)
--                for locale, changeset in locales_with_changesets.items()
--            ])
--            yield job
--
--
--@transforms.add
--def mh_config_replace_project(config, jobs):
--    """ Replaces {project} in mh config entries with the current project """
--    # XXXCallek This is a bad pattern but exists to satisfy ease-of-porting for buildbot
--    for job in jobs:
--        job['mozharness']['config'] = map(
--            lambda x: x.format(project=config.params['project']),
--            job['mozharness']['config']
--            )
--        yield job
--
--
--@transforms.add
--def mh_options_replace_project(config, jobs):
--    """ Replaces {project} in mh option entries with the current project """
--    # XXXCallek This is a bad pattern but exists to satisfy ease-of-porting for buildbot
--    for job in jobs:
--        job['mozharness']['options'] = map(
--            lambda x: x.format(project=config.params['project']),
--            job['mozharness']['options']
--            )
--        yield job
--
--
--@transforms.add
--def chain_of_trust(config, jobs):
--    for job in jobs:
--        # add the docker image to the chain of trust inputs in task.extra
--        if not job['worker-type'].endswith("-b-win2012"):
--            cot = job.setdefault('extra', {}).setdefault('chainOfTrust', {})
--            cot.setdefault('inputs', {})['docker-image'] = {"task-reference": "<docker-image>"}
--        yield job
--
--
--@transforms.add
--def validate_again(config, jobs):
--    for job in jobs:
--        yield validate_schema(l10n_description_schema, job,
--                              "In job {!r}:".format(job.get('name', 'unknown')))
--
--
--@transforms.add
--def make_job_description(config, jobs):
--    for job in jobs:
--        job_description = {
--            'name': job['name'],
--            'worker-type': job['worker-type'],
--            'description': job['description'],
--            'run': {
--                'using': 'mozharness',
--                'job-script': 'taskcluster/scripts/builder/build-l10n.sh',
--                'config': job['mozharness']['config'],
--                'script': job['mozharness']['script'],
--                'actions': job['mozharness']['actions'],
--                'options': job['mozharness']['options'],
--            },
--            'attributes': job['attributes'],
--            'treeherder': {
--                'kind': 'build',
--                'tier': job['treeherder']['tier'],
--                'symbol': job['treeherder']['symbol'],
--                'platform': job['treeherder']['platform'],
--            },
--            'run-on-projects': job.get('run-on-projects') if job.get('run-on-projects') else [],
--        }
--        if job.get('extra'):
--            job_description['extra'] = job['extra']
--
--        if job['worker-type'].endswith("-b-win2012"):
--            job_description['worker'] = {
--                'os': 'windows',
--                'max-run-time': 7200,
--                'chain-of-trust': True,
--            }
--            job_description['run']['use-simple-package'] = False
--            job_description['run']['use-magic-mh-args'] = False
--        else:
--            job_description['worker'] = {
--                'docker-image': {'in-tree': 'desktop-build'},
--                'max-run-time': job['run-time'],
--                'chain-of-trust': True,
--            }
--            job_description['run']['tooltool-downloads'] = job['tooltool']
--            job_description['run']['need-xvfb'] = True
--
--        if job.get('index'):
--            job_description['index'] = {
--                'product': job['index']['product'],
--                'job-name': job['index']['job-name'],
--                'type': job['index'].get('type', 'generic'),
--            }
--
--        if job.get('dependencies'):
--            job_description['dependencies'] = job['dependencies']
--        if job.get('env'):
--            job_description['worker']['env'] = job['env']
--        if job.get('when', {}).get('files-changed'):
--            job_description.setdefault('when', {})
--            job_description['when']['files-changed'] = \
--                [job['locales-file']] + job['when']['files-changed']
--        yield job_description
-diff --git a/taskcluster/taskgraph/transforms/marionette_harness.py b/taskcluster/taskgraph/transforms/marionette_harness.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/marionette_harness.py
-+++ /dev/null
-@@ -1,37 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Set dynamic task description properties of the marionette-harness task.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def setup_task(config, tasks):
--    for task in tasks:
--        del task['name']
--        task['label'] = 'marionette-harness'
--        env = task['worker'].setdefault('env', {})
--        env.update({
--            'GECKO_BASE_REPOSITORY': config.params['base_repository'],
--            'GECKO_HEAD_REF': config.params['head_rev'],
--            'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
--            'GECKO_HEAD_REV': config.params['head_rev'],
--            'MOZ_BUILD_DATE': config.params['moz_build_date'],
--            'MOZ_SCM_LEVEL': config.params['level'],
--        })
--
--        task['worker']['caches'] = [{
--            'type': 'persistent',
--            'name': 'level-{}-{}-tc-vcs'.format(
--                config.params['level'], config.params['project']),
--            'mount-point': "/builds/worker/.tc-vcs",
--        }]
--
--        yield task
-diff --git a/taskcluster/taskgraph/transforms/nightly_l10n_signing.py b/taskcluster/taskgraph/transforms/nightly_l10n_signing.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/nightly_l10n_signing.py
-+++ /dev/null
-@@ -1,66 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.signed_artifacts import generate_specifications_of_artifacts_to_sign
--from taskgraph.util.treeherder import join_symbol
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def make_signing_description(config, jobs):
--    for job in jobs:
--        job['depname'] = 'unsigned-repack'
--
--        dep_job = job['dependent-task']
--
--        label = dep_job.label.replace("nightly-l10n-", "signing-l10n-")
--        job['label'] = label
--
--        # add the chunk number to the TH symbol
--        symbol = 'Ns{}'.format(dep_job.attributes.get('l10n_chunk'))
--        group = 'tc-L10n'
--
--        job['treeherder'] = {
--            'symbol': join_symbol(group, symbol),
--        }
--
--        yield job
--
--
--@transforms.add
--def define_upstream_artifacts(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        locale_specifications = generate_specifications_of_artifacts_to_sign(
--            dep_job.attributes.get('build_platform'),
--            is_nightly=True,
--            keep_locale_template=True
--        )
--
--        upstream_artifacts = []
--        for spec in locale_specifications:
--            upstream_artifacts.append({
--                'taskId': {'task-reference': '<unsigned-repack>'},
--                'taskType': 'l10n',
--                # Set paths based on artifacts in the specs (above) one per
--                # locale present in the chunk this is signing stuff for.
--                'paths': [
--                    path_template.format(locale=locale)
--                    for locale in dep_job.attributes.get('chunk_locales', [])
--                    for path_template in spec['artifacts']
--                ],
--                'formats': spec['formats']
--            })
--
--        job['upstream-artifacts'] = upstream_artifacts
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/push_apk.py b/taskcluster/taskgraph/transforms/push_apk.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/push_apk.py
-+++ /dev/null
-@@ -1,75 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the push-apk kind into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import functools
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.schema import Schema
--from taskgraph.util.scriptworker import get_push_apk_scope, get_push_apk_track, \
--    get_push_apk_dry_run_option, get_push_apk_rollout_percentage
--from taskgraph.util.push_apk import fill_labels_tranform, validate_jobs_schema_transform_partial, \
--    validate_dependent_tasks_transform, delete_non_required_fields_transform, generate_dependencies
--
--from voluptuous import Required
--
--
--transforms = TransformSequence()
--
--push_apk_description_schema = Schema({
--    # the dependent task (object) for this beetmover job, used to inform beetmover.
--    Required('dependent-tasks'): object,
--    Required('name'): basestring,
--    Required('label'): basestring,
--    Required('description'): basestring,
--    Required('attributes'): object,
--    Required('treeherder'): object,
--    Required('run-on-projects'): list,
--    Required('worker-type'): basestring,
--    Required('worker'): object,
--    Required('scopes'): None,
--    Required('deadline-after'): basestring,
--})
--
--validate_jobs_schema_transform = functools.partial(
--    validate_jobs_schema_transform_partial,
--    push_apk_description_schema,
--    'PushApk'
--)
--
--transforms.add(fill_labels_tranform)
--transforms.add(validate_jobs_schema_transform)
--transforms.add(validate_dependent_tasks_transform)
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    for job in jobs:
--        job['dependencies'] = generate_dependencies(job['dependent-tasks'])
--        job['worker']['upstream-artifacts'] = generate_upstream_artifacts(job['dependencies'])
--        job['worker']['google-play-track'] = get_push_apk_track(config)
--        job['worker']['dry-run'] = get_push_apk_dry_run_option(config)
--
--        rollout_percentage = get_push_apk_rollout_percentage(config)
--        if rollout_percentage is not None:
--            job['worker']['rollout-percentage'] = rollout_percentage
--
--        job['scopes'] = [get_push_apk_scope(config)]
--
--        yield job
--
--
--transforms.add(delete_non_required_fields_transform)
--
--
--def generate_upstream_artifacts(dependencies):
--    return [{
--        'taskId': {'task-reference': '<{}>'.format(task_kind)},
--        'taskType': 'signing',
--        'paths': ['public/build/target.apk'],
--    } for task_kind in dependencies.keys() if 'breakpoint' not in task_kind]
-diff --git a/taskcluster/taskgraph/transforms/push_apk_breakpoint.py b/taskcluster/taskgraph/transforms/push_apk_breakpoint.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/push_apk_breakpoint.py
-+++ /dev/null
-@@ -1,68 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the push-apk-breakpoint kind into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import functools
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.schema import Schema
--from taskgraph.util.scriptworker import get_push_apk_breakpoint_worker_type
--from taskgraph.util.push_apk import fill_labels_tranform, validate_jobs_schema_transform_partial, \
--    validate_dependent_tasks_transform, delete_non_required_fields_transform, generate_dependencies
--from voluptuous import Required
--
--
--transforms = TransformSequence()
--
--push_apk_breakpoint_description_schema = Schema({
--    # the dependent task (object) for this beetmover job, used to inform beetmover.
--    Required('dependent-tasks'): object,
--    Required('name'): basestring,
--    Required('label'): basestring,
--    Required('description'): basestring,
--    Required('attributes'): object,
--    Required('worker-type'): None,
--    Required('worker'): object,
--    Required('treeherder'): object,
--    Required('run-on-projects'): list,
--    Required('deadline-after'): basestring,
--})
--
--validate_jobs_schema_transform = functools.partial(
--    validate_jobs_schema_transform_partial,
--    push_apk_breakpoint_description_schema,
--    'PushApkBreakpoint'
--)
--
--transforms.add(fill_labels_tranform)
--transforms.add(validate_jobs_schema_transform)
--transforms.add(validate_dependent_tasks_transform)
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    for job in jobs:
--        job['dependencies'] = generate_dependencies(job['dependent-tasks'])
--
--        worker_type = get_push_apk_breakpoint_worker_type(config)
--        job['worker-type'] = worker_type
--
--        job['worker']['payload'] = {} if 'human' in worker_type else {
--                'image': 'ubuntu:16.10',
--                'command': [
--                    '/bin/bash',
--                    '-c',
--                    'echo "Dummy task while while bug 1351664 is implemented"'
--                ],
--                'maxRunTime': 600,
--            }
--
--        yield job
--
--
--transforms.add(delete_non_required_fields_transform)
-diff --git a/taskcluster/taskgraph/transforms/repackage.py b/taskcluster/taskgraph/transforms/repackage.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/repackage.py
-+++ /dev/null
-@@ -1,274 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the repackage task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--_TC_ARTIFACT_LOCATION = \
--        'https://queue.taskcluster.net/v1/task/{task_id}/artifacts/public/build/{postfix}'
--
--transforms = TransformSequence()
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--packaging_description_schema = Schema({
--    # the dependant task (object) for this  job, used to inform repackaging.
--    Required('dependent-task'): object,
--
--    # depname is used in taskref's to identify the taskID of the signed things
--    Required('depname', default='build'): basestring,
--
--    # unique label to describe this repackaging task
--    Optional('label'): basestring,
--
--    # treeherder is allowed here to override any defaults we use for repackaging.  See
--    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
--    # below transforms for defaults of various values.
--    Optional('treeherder'): task_description_schema['treeherder'],
--
--    # If a l10n task, the corresponding locale
--    Optional('locale'): basestring,
--
--    # Routes specific to this task, if defined
--    Optional('routes'): [basestring],
--
--    # passed through directly to the job description
--    Optional('extra'): task_description_schema['extra'],
--
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            packaging_description_schema, job,
--            "In packaging ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_repackage_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        label = job.get('label',
--                        dep_job.label.replace("signing-", "repackage-"))
--        job['label'] = label
--
--        yield job
--
--
--@transforms.add
--def make_job_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
--        if len(dep_job.dependencies) > 1:
--            raise NotImplementedError(
--                "Can't repackage a signing task with multiple dependencies")
--        signing_dependencies = dep_job.dependencies
--        # This is so we get the build task in our dependencies to
--        # have better beetmover support.
--        dependencies.update(signing_dependencies)
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc(Nr)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform', "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--        build_task = None
--        signing_task = None
--        for dependency in dependencies.keys():
--            if 'signing' in dependency:
--                signing_task = dependency
--            else:
--                build_task = dependency
--        if job.get('locale'):
--            # XXXCallek: todo: rewrite dependency finding
--            # Use string splice to strip out 'nightly-l10n-' .. '-<chunk>/opt'
--            # We need this additional dependency to support finding the mar binary
--            # Which is needed in order to generate a new complete.mar
--            dependencies['build'] = "build-{}/opt".format(
--                dependencies[build_task][13:dependencies[build_task].rfind('-')])
--            build_task = 'build'
--        signing_task_ref = "<{}>".format(signing_task)
--        build_task_ref = "<{}>".format(build_task)
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--        attributes['repackage_type'] = 'repackage'
--
--        locale = None
--        if job.get('locale'):
--            locale = job['locale']
--            attributes['locale'] = locale
--
--        level = config.params['level']
--
--        build_platform = attributes['build_platform']
--        run = {
--            'using': 'mozharness',
--            'script': 'mozharness/scripts/repackage.py',
--            'config': _generate_task_mozharness_config(build_platform),
--            'job-script': 'taskcluster/scripts/builder/repackage.sh',
--            'actions': ['download_input', 'setup', 'repackage'],
--            'extra-workspace-cache-key': 'repackage',
--        }
--
--        worker = {
--            'env': _generate_task_env(build_platform, build_task_ref,
--                                      signing_task_ref, locale=locale),
--            'artifacts': _generate_task_output_files(build_platform, locale=locale),
--            'chain-of-trust': True,
--            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
--        }
--
--        if locale:
--            # Make sure we specify the locale-specific upload dir
--            worker['env'].update(LOCALE=locale)
--
--        if build_platform.startswith('win'):
--            worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
--            run['use-magic-mh-args'] = False
--        else:
--            if build_platform.startswith('macosx'):
--                worker_type = 'aws-provisioner-v1/gecko-%s-b-macosx64' % level
--            elif build_platform.startswith('linux'):
--                worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
--            else:
--                raise NotImplementedError(
--                    'Unsupported build_platform: "{}"'.format(build_platform)
--                )
--
--            run['tooltool-downloads'] = 'internal'
--            worker['docker-image'] = {"in-tree": "desktop-build"},
--
--            cot = job.setdefault('extra', {}).setdefault('chainOfTrust', {})
--            cot.setdefault('inputs', {})['docker-image'] = {"task-reference": "<docker-image>"}
--
--        task = {
--            'label': job['label'],
--            'description': "{} Repackage".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': worker_type,
--            'dependencies': dependencies,
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--            'routes': job.get('routes', []),
--            'extra': job.get('extra', {}),
--            'worker': worker,
--            'run': run,
--        }
--        yield task
--
--
--def _generate_task_mozharness_config(build_platform):
--    if build_platform.startswith('macosx'):
--        return ['repackage/osx_signed.py']
--    else:
--        bits = 32 if '32' in build_platform else 64
--        if build_platform.startswith('linux'):
--            return ['repackage/linux{}_signed.py'.format(bits)]
--        elif build_platform.startswith('win'):
--            return ['repackage/win{}_signed.py'.format(bits)]
--
--    raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
--
--
--def _generate_task_env(build_platform, build_task_ref, signing_task_ref, locale=None):
--    mar_prefix = _generate_taskcluster_prefix(build_task_ref, postfix='host/bin/', locale=None)
--    signed_prefix = _generate_taskcluster_prefix(signing_task_ref, locale=locale)
--
--    if build_platform.startswith('linux') or build_platform.startswith('macosx'):
--        tarball_extension = 'bz2' if build_platform.startswith('linux') else 'gz'
--        return {
--            'SIGNED_INPUT': {'task-reference': '{}target.tar.{}'.format(
--                signed_prefix, tarball_extension
--            )},
--            'UNSIGNED_MAR': {'task-reference': '{}mar'.format(mar_prefix)},
--        }
--    elif build_platform.startswith('win'):
--        task_env = {
--            'SIGNED_ZIP': {'task-reference': '{}target.zip'.format(signed_prefix)},
--            'SIGNED_SETUP': {'task-reference': '{}setup.exe'.format(signed_prefix)},
--            'UNSIGNED_MAR': {'task-reference': '{}mar.exe'.format(mar_prefix)},
--        }
--
--        # Stub installer is only generated on win32
--        if '32' in build_platform:
--            task_env['SIGNED_SETUP_STUB'] = {
--                'task-reference': '{}setup-stub.exe'.format(signed_prefix),
--            }
--        return task_env
--
--    raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
--
--
--def _generate_taskcluster_prefix(task_id, postfix='', locale=None):
--    if locale:
--        postfix = '{}/{}'.format(locale, postfix)
--
--    return _TC_ARTIFACT_LOCATION.format(task_id=task_id, postfix=postfix)
--
--
--def _generate_task_output_files(build_platform, locale=None):
--    locale_output_path = '{}/'.format(locale) if locale else ''
--
--    if build_platform.startswith('linux') or build_platform.startswith('macosx'):
--        output_files = [{
--            'type': 'file',
--            'path': '/builds/worker/workspace/build/artifacts/{}target.complete.mar'
--                    .format(locale_output_path),
--            'name': 'public/build/{}target.complete.mar'.format(locale_output_path),
--        }]
--
--        if build_platform.startswith('macosx'):
--            output_files.append({
--                'type': 'file',
--                'path': '/builds/worker/workspace/build/artifacts/{}target.dmg'
--                        .format(locale_output_path),
--                'name': 'public/build/{}target.dmg'.format(locale_output_path),
--            })
--
--    elif build_platform.startswith('win'):
--        output_files = [{
--            'type': 'file',
--            'path': 'public/build/{}target.installer.exe'.format(locale_output_path),
--            'name': 'public/build/{}target.installer.exe'.format(locale_output_path),
--        }, {
--            'type': 'file',
--            'path': 'public/build/{}target.complete.mar'.format(locale_output_path),
--            'name': 'public/build/{}target.complete.mar'.format(locale_output_path),
--        }]
--
--        # Stub installer is only generated on win32
--        if '32' in build_platform:
--            output_files.append({
--                'type': 'file',
--                'path': 'public/build/{}target.stub-installer.exe'.format(locale_output_path),
--                'name': 'public/build/{}target.stub-installer.exe'.format(locale_output_path),
--            })
--
--    if output_files:
--        return output_files
--
--    raise NotImplementedError('Unsupported build_platform: "{}"'.format(build_platform))
-diff --git a/taskcluster/taskgraph/transforms/repackage_l10n.py b/taskcluster/taskgraph/transforms/repackage_l10n.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/repackage_l10n.py
-+++ /dev/null
-@@ -1,33 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the repackage task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def split_locales(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--        for locale in dep_job.attributes.get('chunk_locales', []):
--            label = dep_job.label.replace("signing-", "repackage-{}-".format(locale))
--            label = "repackage-{}-{}/{}".format(locale,
--                                                dep_job.attributes['build_platform'],
--                                                dep_job.attributes['build_type'],)
--
--            treeherder = job.get('treeherder', {})
--            treeherder['symbol'] = 'tc-L10n-Rpk({})'.format(locale)
--
--            yield {
--                'locale': locale,
--                'label': label,
--                'treeherder': treeherder,
--                'dependent-task': dep_job,
--            }
-diff --git a/taskcluster/taskgraph/transforms/repackage_routes.py b/taskcluster/taskgraph/transforms/repackage_routes.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/repackage_routes.py
-+++ /dev/null
-@@ -1,45 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Add indexes to repackage kinds
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.schema import validate_schema
--from taskgraph.transforms.job import job_description_schema
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job['label']
--        yield validate_schema(
--            job_description_schema, job,
--            "In repackage-signing ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def add_indexes(config, jobs):
--    for job in jobs:
--        repackage_type = job['attributes'].get('repackage_type')
--        if repackage_type:
--            build_platform = job['attributes']['build_platform']
--            job_name = '{}-{}'.format(build_platform, repackage_type)
--            product = job.get('index', {}).get('product', 'firefox')
--            index_type = 'generic'
--            if job['attributes'].get('nightly'):
--                index_type = 'nightly'
--            if job['attributes'].get('locale'):
--                index_type = 'l10n'
--            job['index'] = {
--                'job-name': job_name,
--                'product': product,
--                'type': index_type
--            }
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/repackage_signing.py b/taskcluster/taskgraph/transforms/repackage_signing.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/repackage_signing.py
-+++ /dev/null
-@@ -1,137 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the repackage signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Required, Optional
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--repackage_signing_description_schema = Schema({
--    Required('dependent-task'): object,
--    Required('depname', default='repackage'): basestring,
--    Optional('label'): basestring,
--    Optional('treeherder'): task_description_schema['treeherder'],
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            repackage_signing_description_schema, job,
--            "In repackage-signing ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_repackage_signing_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        treeherder = job.get('treeherder', {})
--        treeherder.setdefault('symbol', 'tc-rs(N)')
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        treeherder.setdefault('platform',
--                              "{}/opt".format(dep_th_platform))
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--
--        label = job.get('label', "repackage-signing-{}".format(dep_job.label))
--        dependencies = {"repackage": dep_job.label}
--
--        signing_dependencies = dep_job.dependencies
--        # This is so we get the build task etc in our dependencies to
--        # have better beetmover support.
--        dependencies.update(signing_dependencies)
--        attributes = copy_attributes_from_dependent_job(dep_job)
--        attributes['repackage_type'] = 'repackage-signing'
--
--        locale_str = ""
--        if dep_job.attributes.get('locale'):
--            treeherder['symbol'] = 'tc-rs({})'.format(dep_job.attributes.get('locale'))
--            attributes['locale'] = dep_job.attributes.get('locale')
--            locale_str = "{}/".format(dep_job.attributes.get('locale'))
--
--        build_platform = dep_job.attributes.get('build_platform')
--        is_nightly = dep_job.attributes.get('nightly')
--        signing_cert_scope = get_signing_cert_scope_per_platform(
--            build_platform, is_nightly, config
--        )
--        scopes = [signing_cert_scope, 'project:releng:signing:format:mar_sha384']
--
--        upstream_artifacts = [{
--            "taskId": {"task-reference": "<repackage>"},
--            "taskType": "repackage",
--            "paths": [
--                "public/build/{}target.complete.mar".format(locale_str),
--            ],
--            "formats": ["mar_sha384"]
--        }]
--        if 'win' in build_platform:
--            upstream_artifacts.append({
--                "taskId": {"task-reference": "<repackage>"},
--                "taskType": "repackage",
--                "paths": [
--                    "public/build/{}target.installer.exe".format(locale_str),
--                ],
--                "formats": ["sha2signcode"]
--            })
--            scopes.append("project:releng:signing:format:sha2signcode")
--
--            # Stub installer is only generated on win32
--            if '32' in build_platform:
--                upstream_artifacts.append({
--                    "taskId": {"task-reference": "<repackage>"},
--                    "taskType": "repackage",
--                    "paths": [
--                        "public/build/{}target.stub-installer.exe".format(locale_str),
--                    ],
--                    "formats": ["sha2signcodestub"]
--                })
--                scopes.append("project:releng:signing:format:sha2signcodestub")
--
--        task = {
--            'label': label,
--            'description': "Repackage signing {} ".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': "scriptworker-prov-v1/signing-linux-v1",
--            'worker': {'implementation': 'scriptworker-signing',
--                       'upstream-artifacts': upstream_artifacts,
--                       'max-run-time': 3600},
--            'scopes': scopes,
--            'dependencies': dependencies,
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--        }
--
--        funsize_platforms = [
--            'linux-nightly',
--            'linux64-nightly',
--            'macosx64-nightly',
--            'win32-nightly',
--            'win64-nightly'
--        ]
--        if build_platform in funsize_platforms and is_nightly:
--            route_template = "project.releng.funsize.level-{level}.{project}"
--            task['routes'] = [
--                route_template.format(project=config.params['project'],
--                                      level=config.params['level'])
--            ]
--
--        yield task
-diff --git a/taskcluster/taskgraph/transforms/signing.py b/taskcluster/taskgraph/transforms/signing.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/signing.py
-+++ /dev/null
-@@ -1,145 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the signing task into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.attributes import copy_attributes_from_dependent_job
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import get_signing_cert_scope_per_platform
--from taskgraph.transforms.task import task_description_schema
--from voluptuous import Any, Required, Optional
--
--
--# Voluptuous uses marker objects as dictionary *keys*, but they are not
--# comparable, so we cast all of the keys back to regular strings
--task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}
--
--transforms = TransformSequence()
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--signing_description_schema = Schema({
--    # the dependant task (object) for this signing job, used to inform signing.
--    Required('dependent-task'): object,
--
--    # Artifacts from dep task to sign - Sync with taskgraph/transforms/task.py
--    # because this is passed directly into the signingscript worker
--    Required('upstream-artifacts'): [{
--        # taskId of the task with the artifact
--        Required('taskId'): taskref_or_string,
--
--        # type of signing task (for CoT)
--        Required('taskType'): basestring,
--
--        # Paths to the artifacts to sign
--        Required('paths'): [basestring],
--
--        # Signing formats to use on each of the paths
--        Required('formats'): [basestring],
--    }],
--
--    # depname is used in taskref's to identify the taskID of the unsigned things
--    Required('depname', default='build'): basestring,
--
--    # unique label to describe this signing task, defaults to {dep.label}-signing
--    Optional('label'): basestring,
--
--    # treeherder is allowed here to override any defaults we use for signing.  See
--    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
--    # below transforms for defaults of various values.
--    Optional('treeherder'): task_description_schema['treeherder'],
--
--    # Routes specific to this task, if defined
--    Optional('routes'): [basestring],
--})
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
--        yield validate_schema(
--            signing_description_schema, job,
--            "In signing ({!r} kind) task for {!r}:".format(config.kind, label))
--
--
--@transforms.add
--def make_task_description(config, jobs):
--    for job in jobs:
--        dep_job = job['dependent-task']
--
--        signing_format_scopes = []
--        formats = set([])
--        for artifacts in job['upstream-artifacts']:
--            for f in artifacts['formats']:
--                formats.add(f)  # Add each format only once
--        for format in formats:
--            signing_format_scopes.append("project:releng:signing:format:{}".format(format))
--
--        treeherder = job.get('treeherder', {})
--        is_nightly = dep_job.attributes.get('nightly', False)
--        treeherder.setdefault('symbol', _generate_treeherder_symbol(is_nightly))
--
--        dep_th_platform = dep_job.task.get('extra', {}).get(
--            'treeherder', {}).get('machine', {}).get('platform', '')
--        build_type = dep_job.attributes.get('build_type')
--        build_platform = dep_job.attributes.get('build_platform')
--        treeherder.setdefault('platform', _generate_treeherder_platform(
--            dep_th_platform, build_platform, build_type
--        ))
--
--        treeherder.setdefault('tier', 1)
--        treeherder.setdefault('kind', 'build')
--
--        label = job.get('label', "{}-signing".format(dep_job.label))
--
--        attributes = copy_attributes_from_dependent_job(dep_job)
--        attributes['signed'] = True
--
--        if dep_job.attributes.get('chunk_locales'):
--            # Used for l10n attribute passthrough
--            attributes['chunk_locales'] = dep_job.attributes.get('chunk_locales')
--
--        signing_cert_scope = get_signing_cert_scope_per_platform(
--            dep_job.attributes.get('build_platform'), is_nightly, config
--        )
--
--        task = {
--            'label': label,
--            'description': "{} Signing".format(
--                dep_job.task["metadata"]["description"]),
--            'worker-type': _generate_worker_type(signing_cert_scope),
--            'worker': {'implementation': 'scriptworker-signing',
--                       'upstream-artifacts': job['upstream-artifacts'],
--                       'max-run-time': 3600},
--            'scopes': [signing_cert_scope] + signing_format_scopes,
--            'dependencies': {job['depname']: dep_job.label},
--            'attributes': attributes,
--            'run-on-projects': dep_job.attributes.get('run_on_projects'),
--            'treeherder': treeherder,
--            'routes': job.get('routes', []),
--        }
--
--        yield task
--
--
--def _generate_treeherder_platform(dep_th_platform, build_platform, build_type):
--    actual_build_type = 'pgo' if '-pgo' in build_platform else build_type
--    return '{}/{}'.format(dep_th_platform, actual_build_type)
--
--
--def _generate_treeherder_symbol(is_nightly):
--    return 'tc(Ns)' if is_nightly else 'tc(Bs)'
--
--
--def _generate_worker_type(signing_cert_scope):
--    worker_type = 'depsigning' if 'dep-signing' in signing_cert_scope else 'signing-linux-v1'
--    return 'scriptworker-prov-v1/{}'.format(worker_type)
-diff --git a/taskcluster/taskgraph/transforms/source_test.py b/taskcluster/taskgraph/transforms/source_test.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/source_test.py
-+++ /dev/null
-@@ -1,145 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Source-test jobs can run on multiple platforms.  These transforms allow jobs
--with either `platform` or a list of `platforms`, and set the appropriate
--treeherder configuration and attributes for that platform.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import copy
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.transforms.job import job_description_schema
--from taskgraph.util.attributes import keymatch
--from taskgraph.util.schema import (
--    validate_schema,
--    resolve_keyed_by,
--)
--from voluptuous import (
--    Any,
--    Extra,
--    Required,
--    Schema,
--)
--
--ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
--
--job_description_schema = {str(k): v for k, v in job_description_schema.schema.iteritems()}
--
--source_test_description_schema = Schema({
--    # most fields are passed directly through as job fields, and are not
--    # repeated here
--    Extra: object,
--
--    # The platform on which this task runs.  This will be used to set up attributes
--    # (for try selection) and treeherder metadata (for display).  If given as a list,
--    # the job will be "split" into multiple tasks, one with each platform.
--    Required('platform'): Any(basestring, [basestring]),
--
--    # Whether the job requires a build artifact or not. If True, the task will
--    # depend on a build task and the installer url will be saved to the
--    # GECKO_INSTALLER_URL environment variable. Build labels are determined by the
--    # `dependent-build-platforms` config in kind.yml.
--    Required('require-build', default=False): bool,
--
--    # These fields can be keyed by "platform", and are otherwise identical to
--    # job descriptions.
--    Required('worker-type'): Any(
--        job_description_schema['worker-type'],
--        {'by-platform': {basestring: job_description_schema['worker-type']}},
--    ),
--    Required('worker'): Any(
--        job_description_schema['worker'],
--        {'by-platform': {basestring: job_description_schema['worker']}},
--    ),
--})
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def validate(config, jobs):
--    for job in jobs:
--        yield validate_schema(source_test_description_schema, job,
--                              "In job {!r}:".format(job['name']))
--
--
--@transforms.add
--def set_job_try_name(config, jobs):
--    for job in jobs:
--        job.setdefault('attributes', {}).setdefault('job_try_name', job['name'])
--        yield job
--
--
--@transforms.add
--def expand_platforms(config, jobs):
--    for job in jobs:
--        if isinstance(job['platform'], basestring):
--            yield job
--            continue
--
--        for platform in job['platform']:
--            pjob = copy.deepcopy(job)
--            pjob['platform'] = platform
--
--            if 'name' in pjob:
--                pjob['name'] = '{}-{}'.format(pjob['name'], platform)
--            else:
--                pjob['label'] = '{}-{}'.format(pjob['label'], platform)
--            yield pjob
--
--
--def add_build_dependency(config, job):
--    """
--    Add build dependency to the job and installer_url to env.
--    """
--    key = job['platform']
--    build_labels = config.config.get('dependent-build-platforms', {})
--    matches = keymatch(build_labels, key)
--    if not matches:
--        raise Exception("No build platform found for '{}'. "
--                        "Define 'dependent-build-platforms' in the kind config.".format(key))
--
--    if len(matches) > 1:
--        raise Exception("More than one build platform found for '{}'.".format(key))
--
--    label = matches[0]['label']
--    target = matches[0]['target-name']
--    deps = job.setdefault('dependencies', {})
--    deps.update({'build': label})
--
--    build_artifact = 'public/build/{}'.format(target)
--    installer_url = ARTIFACT_URL.format('<build>', build_artifact)
--
--    env = job['worker'].setdefault('env', {})
--    env.update({'GECKO_INSTALLER_URL': {'task-reference': installer_url}})
--
--
--@transforms.add
--def handle_platform(config, jobs):
--    """
--    Handle the 'platform' property, setting up treeherder context as well as
--    try-related attributes.
--    """
--    fields = [
--        'worker-type',
--        'worker',
--    ]
--
--    for job in jobs:
--        platform = job['platform']
--
--        for field in fields:
--            resolve_keyed_by(job, field, item_name=job['name'])
--
--        if 'treeherder' in job:
--            job['treeherder']['platform'] = platform
--
--        if job.pop('require-build'):
--            add_build_dependency(config, job)
--
--        del job['platform']
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/task.py b/taskcluster/taskgraph/transforms/task.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/task.py
-+++ /dev/null
-@@ -1,1095 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--These transformations take a task description and turn it into a TaskCluster
--task definition (along with attributes, label, etc.).  The input to these
--transformations is generic to any kind of task, but abstracts away some of the
--complexities of worker implementations, scopes, and treeherder annotations.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import json
--import os
--import time
--from copy import deepcopy
--
--from taskgraph.util.attributes import TRUNK_PROJECTS
--from taskgraph.util.treeherder import split_symbol
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.schema import validate_schema, Schema
--from taskgraph.util.scriptworker import get_release_config
--from voluptuous import Any, Required, Optional, Extra
--from taskgraph import GECKO
--
--from .gecko_v2_whitelist import JOB_NAME_WHITELIST, JOB_NAME_WHITELIST_ERROR
--
--
--# shortcut for a string where task references are allowed
--taskref_or_string = Any(
--    basestring,
--    {Required('task-reference'): basestring})
--
--# A task description is a general description of a TaskCluster task
--task_description_schema = Schema({
--    # the label for this task
--    Required('label'): basestring,
--
--    # description of the task (for metadata)
--    Required('description'): basestring,
--
--    # attributes for this task
--    Optional('attributes'): {basestring: object},
--
--    # dependencies of this task, keyed by name; these are passed through
--    # verbatim and subject to the interpretation of the Task's get_dependencies
--    # method.
--    Optional('dependencies'): {basestring: object},
--
--    # expiration and deadline times, relative to task creation, with units
--    # (e.g., "14 days").  Defaults are set based on the project.
--    Optional('expires-after'): basestring,
--    Optional('deadline-after'): basestring,
--
--    # custom routes for this task; the default treeherder routes will be added
--    # automatically
--    Optional('routes'): [basestring],
--
--    # custom scopes for this task; any scopes required for the worker will be
--    # added automatically
--    Optional('scopes'): [basestring],
--
--    # Tags
--    Optional('tags'): {basestring: object},
--
--    # custom "task.extra" content
--    Optional('extra'): {basestring: object},
--
--    # treeherder-related information; see
--    # https://schemas.taskcluster.net/taskcluster-treeherder/v1/task-treeherder-config.json
--    # If not specified, no treeherder extra information or routes will be
--    # added to the task
--    Optional('treeherder'): {
--        # either a bare symbol, or "grp(sym)".
--        'symbol': basestring,
--
--        # the job kind
--        'kind': Any('build', 'test', 'other'),
--
--        # tier for this task
--        'tier': int,
--
--        # task platform, in the form platform/collection, used to set
--        # treeherder.machine.platform and treeherder.collection or
--        # treeherder.labels
--        'platform': basestring,
--
--        # treeherder environments (defaults to both staging and production)
--        Required('environments', default=['production', 'staging']): ['production', 'staging'],
--    },
--
--    # information for indexing this build so its artifacts can be discovered;
--    # if omitted, the build will not be indexed.
--    Optional('index'): {
--        # the name of the product this build produces
--        'product': Any('firefox', 'mobile', 'static-analysis', 'devedition'),
--
--        # the names to use for this job in the TaskCluster index
--        'job-name': basestring,
--
--        # Type of gecko v2 index to use
--        'type': Any('generic', 'nightly', 'l10n', 'nightly-with-multi-l10n'),
--
--        # The rank that the task will receive in the TaskCluster
--        # index.  A newly completed task supercedes the currently
--        # indexed task iff it has a higher rank.  If unspecified,
--        # 'by-tier' behavior will be used.
--        'rank': Any(
--            # Rank is equal the timestamp of the build_date for tier-1
--            # tasks, and zero for non-tier-1.  This sorts tier-{2,3}
--            # builds below tier-1 in the index.
--            'by-tier',
--
--            # Rank is given as an integer constant (e.g. zero to make
--            # sure a task is last in the index).
--            int,
--
--            # Rank is equal to the timestamp of the build_date.  This
--            # option can be used to override the 'by-tier' behavior
--            # for non-tier-1 tasks.
--            'build_date',
--        ),
--    },
--
--    # The `run_on_projects` attribute, defaulting to "all".  This dictates the
--    # projects on which this task should be included in the target task set.
--    # See the attributes documentation for details.
--    Optional('run-on-projects'): [basestring],
--
--    # If the task can be coalesced, this is the name used in the coalesce key
--    # the project, etc. will be added automatically.  Note that try (level 1)
--    # tasks are never coalesced
--    Optional('coalesce-name'): basestring,
--
--    # Optimizations to perform on this task during the optimization phase,
--    # specified in order.  These optimizations are defined in
--    # taskcluster/taskgraph/optimize.py.
--    Optional('optimizations'): [Any(
--        # search the index for the given index namespace, and replace this task if found
--        ['index-search', basestring],
--        # consult SETA and skip this task if it is low-value
--        ['seta'],
--        # skip this task if none of the given file patterns match
--        ['skip-unless-changed', [basestring]],
--    )],
--
--    # the provisioner-id/worker-type for the task.  The following parameters will
--    # be substituted in this string:
--    #  {level} -- the scm level of this push
--    'worker-type': basestring,
--
--    # Whether the job should use sccache compiler caching.
--    Required('needs-sccache', default=False): bool,
--
--    # information specific to the worker implementation that will run this task
--    'worker': Any({
--        Required('implementation'): Any('docker-worker', 'docker-engine'),
--        Required('os'): 'linux',
--
--        # For tasks that will run in docker-worker or docker-engine, this is the
--        # name of the docker image or in-tree docker image to run the task in.  If
--        # in-tree, then a dependency will be created automatically.  This is
--        # generally `desktop-test`, or an image that acts an awful lot like it.
--        Required('docker-image'): Any(
--            # a raw Docker image path (repo/image:tag)
--            basestring,
--            # an in-tree generated docker image (from `taskcluster/docker/<name>`)
--            {'in-tree': basestring}
--        ),
--
--        # worker features that should be enabled
--        Required('relengapi-proxy', default=False): bool,
--        Required('chain-of-trust', default=False): bool,
--        Required('taskcluster-proxy', default=False): bool,
--        Required('allow-ptrace', default=False): bool,
--        Required('loopback-video', default=False): bool,
--        Required('loopback-audio', default=False): bool,
--        Required('docker-in-docker', default=False): bool,  # (aka 'dind')
--
--        # caches to set up for the task
--        Optional('caches'): [{
--            # only one type is supported by any of the workers right now
--            'type': 'persistent',
--
--            # name of the cache, allowing re-use by subsequent tasks naming the
--            # same cache
--            'name': basestring,
--
--            # location in the task image where the cache will be mounted
--            'mount-point': basestring,
--        }],
--
--        # artifacts to extract from the task image after completion
--        Optional('artifacts'): [{
--            # type of artifact -- simple file, or recursive directory
--            'type': Any('file', 'directory'),
--
--            # task image path from which to read artifact
--            'path': basestring,
--
--            # name of the produced artifact (root of the names for
--            # type=directory)
--            'name': basestring,
--        }],
--
--        # environment variables
--        Required('env', default={}): {basestring: taskref_or_string},
--
--        # the command to run; if not given, docker-worker will default to the
--        # command in the docker image
--        Optional('command'): [taskref_or_string],
--
--        # the maximum time to run, in seconds
--        Required('max-run-time'): int,
--
--        # the exit status code that indicates the task should be retried
--        Optional('retry-exit-status'): int,
--    }, {
--        Required('implementation'): 'generic-worker',
--        Required('os'): Any('windows', 'macosx'),
--        # see http://schemas.taskcluster.net/generic-worker/v1/payload.json
--        # and https://docs.taskcluster.net/reference/workers/generic-worker/payload
--
--        # command is a list of commands to run, sequentially
--        # on Windows, each command is a string, on OS X and Linux, each command is
--        # a string array
--        Required('command'): Any(
--            [taskref_or_string],   # Windows
--            [[taskref_or_string]]  # Linux / OS X
--        ),
--
--        # artifacts to extract from the task image after completion; note that artifacts
--        # for the generic worker cannot have names
--        Optional('artifacts'): [{
--            # type of artifact -- simple file, or recursive directory
--            'type': Any('file', 'directory'),
--
--            # filesystem path from which to read artifact
--            'path': basestring,
--
--            # if not specified, path is used for artifact name
--            Optional('name'): basestring
--        }],
--
--        # Directories and/or files to be mounted.
--        # The actual allowed combinations are stricter than the model below,
--        # but this provides a simple starting point.
--        # See https://docs.taskcluster.net/reference/workers/generic-worker/payload
--        Optional('mounts'): [{
--            # A unique name for the cache volume, implies writable cache directory
--            # (otherwise mount is a read-only file or directory).
--            Optional('cache-name'): basestring,
--            # Optional content for pre-loading cache, or mandatory content for
--            # read-only file or directory. Pre-loaded content can come from either
--            # a task artifact or from a URL.
--            Optional('content'): {
--
--                # *** Either (artifact and task-id) or url must be specified. ***
--
--                # Artifact name that contains the content.
--                Optional('artifact'): basestring,
--                # Task ID that has the artifact that contains the content.
--                Optional('task-id'): taskref_or_string,
--                # URL that supplies the content in response to an unauthenticated
--                # GET request.
--                Optional('url'): basestring
--            },
--
--            # *** Either file or directory must be specified. ***
--
--            # If mounting a cache or read-only directory, the filesystem location of
--            # the directory should be specified as a relative path to the task
--            # directory here.
--            Optional('directory'): basestring,
--            # If mounting a file, specify the relative path within the task
--            # directory to mount the file (the file will be read only).
--            Optional('file'): basestring,
--            # Required if and only if `content` is specified and mounting a
--            # directory (not a file). This should be the archive format of the
--            # content (either pre-loaded cache or read-only directory).
--            Optional('format'): Any('rar', 'tar.bz2', 'tar.gz', 'zip')
--        }],
--
--        # environment variables
--        Required('env', default={}): {basestring: taskref_or_string},
--
--        # the maximum time to run, in seconds
--        Required('max-run-time'): int,
--
--        # os user groups for test task workers
--        Optional('os-groups', default=[]): [basestring],
--
--        # optional features
--        Required('chain-of-trust', default=False): bool,
--    }, {
--        Required('implementation'): 'buildbot-bridge',
--
--        # see
--        # https://github.com/mozilla/buildbot-bridge/blob/master/bbb/schemas/payload.yml
--        Required('buildername'): basestring,
--        Required('sourcestamp'): {
--            'branch': basestring,
--            Optional('revision'): basestring,
--            Optional('repository'): basestring,
--            Optional('project'): basestring,
--        },
--        Required('properties'): {
--            'product': basestring,
--            Extra: taskref_or_string,  # additional properties are allowed
--        },
--    }, {
--        Required('implementation'): 'native-engine',
--        Required('os'): Any('macosx', 'linux'),
--
--        # A link for an executable to download
--        Optional('context'): basestring,
--
--        # Tells the worker whether machine should reboot
--        # after the task is finished.
--        Optional('reboot'):
--            Any('always', 'on-exception', 'on-failure'),
--
--        # the command to run
--        Optional('command'): [taskref_or_string],
--
--        # environment variables
--        Optional('env'): {basestring: taskref_or_string},
--
--        # artifacts to extract from the task image after completion
--        Optional('artifacts'): [{
--            # type of artifact -- simple file, or recursive directory
--            Required('type'): Any('file', 'directory'),
--
--            # task image path from which to read artifact
--            Required('path'): basestring,
--
--            # name of the produced artifact (root of the names for
--            # type=directory)
--            Required('name'): basestring,
--        }],
--    }, {
--        Required('implementation'): 'scriptworker-signing',
--
--        # the maximum time to spend signing, in seconds
--        Required('max-run-time', default=600): int,
--
--        # list of artifact URLs for the artifacts that should be signed
--        Required('upstream-artifacts'): [{
--            # taskId of the task with the artifact
--            Required('taskId'): taskref_or_string,
--
--            # type of signing task (for CoT)
--            Required('taskType'): basestring,
--
--            # Paths to the artifacts to sign
--            Required('paths'): [basestring],
--
--            # Signing formats to use on each of the paths
--            Required('formats'): [basestring],
--        }],
--    }, {
--        Required('implementation'): 'beetmover',
--
--        # the maximum time to spend signing, in seconds
--        Required('max-run-time', default=600): int,
--
--        # locale key, if this is a locale beetmover job
--        Optional('locale'): basestring,
--
--        # list of artifact URLs for the artifacts that should be beetmoved
--        Required('upstream-artifacts'): [{
--            # taskId of the task with the artifact
--            Required('taskId'): taskref_or_string,
--
--            # type of signing task (for CoT)
--            Required('taskType'): basestring,
--
--            # Paths to the artifacts to sign
--            Required('paths'): [basestring],
--
--            # locale is used to map upload path and allow for duplicate simple names
--            Required('locale'): basestring,
--        }],
--    }, {
--        Required('implementation'): 'balrog',
--
--        # list of artifact URLs for the artifacts that should be beetmoved
--        Required('upstream-artifacts'): [{
--            # taskId of the task with the artifact
--            Required('taskId'): taskref_or_string,
--
--            # type of signing task (for CoT)
--            Required('taskType'): basestring,
--
--            # Paths to the artifacts to sign
--            Required('paths'): [basestring],
--        }],
--    }, {
--        Required('implementation'): 'push-apk-breakpoint',
--        Required('payload'): object,
--
--    }, {
--        Required('implementation'): 'invalid',
--        # an invalid task is one which should never actually be created; this is used in
--        # release automation on branches where the task just doesn't make sense
--        Extra: object,
--
--    }, {
--        Required('implementation'): 'push-apk',
--
--        # list of artifact URLs for the artifacts that should be beetmoved
--        Required('upstream-artifacts'): [{
--            # taskId of the task with the artifact
--            Required('taskId'): taskref_or_string,
--
--            # type of signing task (for CoT)
--            Required('taskType'): basestring,
--
--            # Paths to the artifacts to sign
--            Required('paths'): [basestring],
--        }],
--
--        # "Invalid" is a noop for try and other non-supported branches
--        Required('google-play-track'): Any('production', 'beta', 'alpha', 'rollout', 'invalid'),
--        Required('dry-run', default=True): bool,
--        Optional('rollout-percentage'): int,
--    }),
--})
--
--GROUP_NAMES = {
--    'mocha': 'Mocha unit tests',
--    'py': 'Python unit tests',
--    'tc': 'Executed by TaskCluster',
--    'tc-e10s': 'Executed by TaskCluster with e10s',
--    'tc-Fxfn-l': 'Firefox functional tests (local) executed by TaskCluster',
--    'tc-Fxfn-l-e10s': 'Firefox functional tests (local) executed by TaskCluster with e10s',
--    'tc-Fxfn-r': 'Firefox functional tests (remote) executed by TaskCluster',
--    'tc-Fxfn-r-e10s': 'Firefox functional tests (remote) executed by TaskCluster with e10s',
--    'tc-M': 'Mochitests executed by TaskCluster',
--    'tc-M-e10s': 'Mochitests executed by TaskCluster with e10s',
--    'tc-M-V': 'Mochitests on Valgrind executed by TaskCluster',
--    'tc-R': 'Reftests executed by TaskCluster',
--    'tc-R-e10s': 'Reftests executed by TaskCluster with e10s',
--    'tc-T': 'Talos performance tests executed by TaskCluster',
--    'tc-T-e10s': 'Talos performance tests executed by TaskCluster with e10s',
--    'tc-tt-c': 'Telemetry client marionette tests',
--    'tc-tt-c-e10s': 'Telemetry client marionette tests with e10s',
--    'tc-SY-e10s': 'Are we slim yet tests by TaskCluster with e10s',
--    'tc-VP': 'VideoPuppeteer tests executed by TaskCluster',
--    'tc-W': 'Web platform tests executed by TaskCluster',
--    'tc-W-e10s': 'Web platform tests executed by TaskCluster with e10s',
--    'tc-X': 'Xpcshell tests executed by TaskCluster',
--    'tc-X-e10s': 'Xpcshell tests executed by TaskCluster with e10s',
--    'tc-L10n': 'Localised Repacks executed by Taskcluster',
--    'tc-L10n-Rpk': 'Localized Repackaged Repacks executed by Taskcluster',
--    'tc-BM-L10n': 'Beetmover for locales executed by Taskcluster',
--    'tc-BMR-L10n': 'Beetmover repackages for locales executed by Taskcluster',
--    'tc-Up': 'Balrog submission of updates, executed by Taskcluster',
--    'tc-cs': 'Checksum signing executed by Taskcluster',
--    'tc-rs': 'Repackage signing executed by Taskcluster',
--    'tc-BMcs': 'Beetmover checksums, executed by Taskcluster',
--    'Aries': 'Aries Device Image',
--    'Nexus 5-L': 'Nexus 5-L Device Image',
--    'I': 'Docker Image Builds',
--    'TL': 'Toolchain builds for Linux 64-bits',
--    'TM': 'Toolchain builds for OSX',
--    'TW32': 'Toolchain builds for Windows 32-bits',
--    'TW64': 'Toolchain builds for Windows 64-bits',
--    'SM-tc': 'Spidermonkey builds',
--    'pub': 'APK publishing',
--}
--UNKNOWN_GROUP_NAME = "Treeherder group {} has no name; add it to " + __file__
--
--V2_ROUTE_TEMPLATES = [
--    "index.gecko.v2.{project}.latest.{product}.{job-name}",
--    "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}.{job-name}",
--    "index.gecko.v2.{project}.pushlog-id.{pushlog_id}.{product}.{job-name}",
--    "index.gecko.v2.{project}.revision.{head_rev}.{product}.{job-name}",
--]
--
--# {central, inbound, autoland} write to a "trunk" index prefix. This facilitates
--# walking of tasks with similar configurations.
--V2_TRUNK_ROUTE_TEMPLATES = [
--    "index.gecko.v2.trunk.revision.{head_rev}.{product}.{job-name}",
--]
--
--V2_NIGHTLY_TEMPLATES = [
--    "index.gecko.v2.{project}.nightly.latest.{product}.{job-name}",
--    "index.gecko.v2.{project}.nightly.{build_date}.revision.{head_rev}.{product}.{job-name}",
--    "index.gecko.v2.{project}.nightly.{build_date}.latest.{product}.{job-name}",
--    "index.gecko.v2.{project}.nightly.revision.{head_rev}.{product}.{job-name}",
--]
--
--V2_L10N_TEMPLATES = [
--    "index.gecko.v2.{project}.revision.{head_rev}.{product}-l10n.{job-name}.{locale}",
--    "index.gecko.v2.{project}.pushdate.{build_date_long}.{product}-l10n.{job-name}.{locale}",
--    "index.gecko.v2.{project}.latest.{product}-l10n.{job-name}.{locale}",
--]
--
--# the roots of the treeherder routes, keyed by treeherder environment
--TREEHERDER_ROUTE_ROOTS = {
--    'production': 'tc-treeherder',
--    'staging': 'tc-treeherder-stage',
--}
--
--COALESCE_KEY = 'builds.{project}.{name}'
--
--DEFAULT_BRANCH_PRIORITY = 'low'
--BRANCH_PRIORITIES = {
--    'mozilla-release': 'highest',
--    'comm-esr45': 'highest',
--    'comm-esr52': 'highest',
--    'mozilla-esr45': 'very-high',
--    'mozilla-esr52': 'very-high',
--    'mozilla-beta': 'high',
--    'comm-beta': 'high',
--    'mozilla-central': 'medium',
--    'comm-central': 'medium',
--    'comm-aurora': 'medium',
--    'autoland': 'low',
--    'mozilla-inbound': 'low',
--    'try': 'very-low',
--    'try-comm-central': 'very-low',
--    'alder': 'very-low',
--    'ash': 'very-low',
--    'birch': 'very-low',
--    'cedar': 'very-low',
--    'cypress': 'very-low',
--    'date': 'very-low',
--    'elm': 'very-low',
--    'fig': 'very-low',
--    'gum': 'very-low',
--    'holly': 'very-low',
--    'jamun': 'very-low',
--    'larch': 'very-low',
--    'maple': 'very-low',
--    'oak': 'very-low',
--    'pine': 'very-low',
--    'graphics': 'very-low',
--    'ux': 'very-low',
--}
--
--# define a collection of payload builders, depending on the worker implementation
--payload_builders = {}
--
--
--def payload_builder(name):
--    def wrap(func):
--        payload_builders[name] = func
--        return func
--    return wrap
--
--
--# define a collection of index builders, depending on the type implementation
--index_builders = {}
--
--
--def index_builder(name):
--    def wrap(func):
--        index_builders[name] = func
--        return func
--    return wrap
--
--
--@payload_builder('docker-worker')
--def build_docker_worker_payload(config, task, task_def):
--    worker = task['worker']
--
--    image = worker['docker-image']
--    if isinstance(image, dict):
--        docker_image_task = 'build-docker-image-' + image['in-tree']
--        task.setdefault('dependencies', {})['docker-image'] = docker_image_task
--        image = {
--            "path": "public/image.tar.zst",
--            "taskId": {"task-reference": "<docker-image>"},
--            "type": "task-image",
--        }
--
--    features = {}
--
--    if worker.get('relengapi-proxy'):
--        features['relengAPIProxy'] = True
--
--    if worker.get('taskcluster-proxy'):
--        features['taskclusterProxy'] = True
--
--    if worker.get('allow-ptrace'):
--        features['allowPtrace'] = True
--        task_def['scopes'].append('docker-worker:feature:allowPtrace')
--
--    if worker.get('chain-of-trust'):
--        features['chainOfTrust'] = True
--
--    if worker.get('docker-in-docker'):
--        features['dind'] = True
--
--    if task.get('needs-sccache'):
--        features['taskclusterProxy'] = True
--        task_def['scopes'].append(
--            'assume:project:taskcluster:level-{level}-sccache-buckets'.format(
--                level=config.params['level'])
--        )
--        worker['env']['USE_SCCACHE'] = '1'
--    else:
--        worker['env']['SCCACHE_DISABLE'] = '1'
--
--    capabilities = {}
--
--    for lo in 'audio', 'video':
--        if worker.get('loopback-' + lo):
--            capitalized = 'loopback' + lo.capitalize()
--            devices = capabilities.setdefault('devices', {})
--            devices[capitalized] = True
--            task_def['scopes'].append('docker-worker:capability:device:' + capitalized)
--
--    task_def['payload'] = payload = {
--        'image': image,
--        'env': worker['env'],
--    }
--    if 'command' in worker:
--        payload['command'] = worker['command']
--
--    if 'max-run-time' in worker:
--        payload['maxRunTime'] = worker['max-run-time']
--
--    if 'retry-exit-status' in worker:
--        payload['onExitStatus'] = {'retry': [worker['retry-exit-status']]}
--
--    if 'artifacts' in worker:
--        artifacts = {}
--        for artifact in worker['artifacts']:
--            artifacts[artifact['name']] = {
--                'path': artifact['path'],
--                'type': artifact['type'],
--                'expires': task_def['expires'],  # always expire with the task
--            }
--        payload['artifacts'] = artifacts
--
--    if 'caches' in worker:
--        caches = {}
--        for cache in worker['caches']:
--            caches[cache['name']] = cache['mount-point']
--            task_def['scopes'].append('docker-worker:cache:' + cache['name'])
--        payload['cache'] = caches
--
--    if features:
--        payload['features'] = features
--    if capabilities:
--        payload['capabilities'] = capabilities
--
--    # coalesce / superseding
--    if 'coalesce-name' in task and int(config.params['level']) > 1:
--        key = COALESCE_KEY.format(
--            project=config.params['project'],
--            name=task['coalesce-name'])
--        payload['supersederUrl'] = "https://coalesce.mozilla-releng.net/v1/list/" + key
--
--
--@payload_builder('generic-worker')
--def build_generic_worker_payload(config, task, task_def):
--    worker = task['worker']
--
--    artifacts = []
--
--    for artifact in worker['artifacts']:
--        a = {
--            'path': artifact['path'],
--            'type': artifact['type'],
--            'expires': task_def['expires'],  # always expire with the task
--        }
--        if 'name' in artifact:
--            a['name'] = artifact['name']
--        artifacts.append(a)
--
--    # Need to copy over mounts, but rename keys to respect naming convention
--    #   * 'cache-name' -> 'cacheName'
--    #   * 'task-id'    -> 'taskId'
--    # All other key names are already suitable, and don't need renaming.
--    mounts = deepcopy(worker.get('mounts', []))
--    for mount in mounts:
--        if 'cache-name' in mount:
--            mount['cacheName'] = mount.pop('cache-name')
--        if 'content' in mount:
--            if 'task-id' in mount['content']:
--                mount['content']['taskId'] = mount['content'].pop('task-id')
--
--    task_def['payload'] = {
--        'command': worker['command'],
--        'artifacts': artifacts,
--        'env': worker.get('env', {}),
--        'mounts': mounts,
--        'maxRunTime': worker['max-run-time'],
--        'osGroups': worker.get('os-groups', []),
--    }
--
--    # needs-sccache is handled in mozharness_on_windows
--
--    if 'retry-exit-status' in worker:
--        raise Exception("retry-exit-status not supported in generic-worker")
--
--    # currently only support one feature (chain of trust) but this will likely grow
--    features = {}
--
--    if worker.get('chain-of-trust'):
--        features['chainOfTrust'] = True
--
--    if features:
--        task_def['payload']['features'] = features
--
--
--@payload_builder('scriptworker-signing')
--def build_scriptworker_signing_payload(config, task, task_def):
--    worker = task['worker']
--
--    task_def['payload'] = {
--        'maxRunTime': worker['max-run-time'],
--        'upstreamArtifacts':  worker['upstream-artifacts']
--    }
--
--
--@payload_builder('beetmover')
--def build_beetmover_payload(config, task, task_def):
--    worker = task['worker']
--    release_config = get_release_config(config)
--
--    task_def['payload'] = {
--        'maxRunTime': worker['max-run-time'],
--        'upload_date': config.params['build_date'],
--        'upstreamArtifacts':  worker['upstream-artifacts']
--    }
--    if worker.get('locale'):
--        task_def['payload']['locale'] = worker['locale']
--    if release_config:
--        task_def['payload'].update(release_config)
--
--
--@payload_builder('balrog')
--def build_balrog_payload(config, task, task_def):
--    worker = task['worker']
--
--    task_def['payload'] = {
--        'upstreamArtifacts':  worker['upstream-artifacts']
--    }
--
--
--@payload_builder('push-apk')
--def build_push_apk_payload(config, task, task_def):
--    worker = task['worker']
--
--    task_def['payload'] = {
--        'dry_run': worker['dry-run'],
--        'upstreamArtifacts':  worker['upstream-artifacts'],
--        'google_play_track': worker['google-play-track'],
--    }
--
--    if worker.get('rollout-percentage', None):
--        task_def['payload']['rollout_percentage'] = worker['rollout-percentage']
--
--
--@payload_builder('push-apk-breakpoint')
--def build_push_apk_breakpoint_payload(config, task, task_def):
--    task_def['payload'] = task['worker']['payload']
--
--
--@payload_builder('invalid')
--def build_invalid_payload(config, task, task_def):
--    task_def['payload'] = 'invalid task - should never be created'
--
--
--@payload_builder('native-engine')
--def build_macosx_engine_payload(config, task, task_def):
--    worker = task['worker']
--    artifacts = map(lambda artifact: {
--        'name': artifact['name'],
--        'path': artifact['path'],
--        'type': artifact['type'],
--        'expires': task_def['expires'],
--    }, worker.get('artifacts', []))
--
--    task_def['payload'] = {
--        'context': worker['context'],
--        'command': worker['command'],
--        'env': worker['env'],
--        'artifacts': artifacts,
--    }
--    if worker.get('reboot'):
--        task_def['payload'] = worker['reboot']
--
--    if task.get('needs-sccache'):
--        raise Exception('needs-sccache not supported in native-engine')
--
--
--@payload_builder('buildbot-bridge')
--def build_buildbot_bridge_payload(config, task, task_def):
--    del task['extra']['treeherder']
--    del task['extra']['treeherderEnv']
--    worker = task['worker']
--    task_def['payload'] = {
--        'buildername': worker['buildername'],
--        'sourcestamp': worker['sourcestamp'],
--        'properties': worker['properties'],
--    }
--
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def validate(config, tasks):
--    for task in tasks:
--        yield validate_schema(
--            task_description_schema, task,
--            "In task {!r}:".format(task.get('label', '?no-label?')))
--
--
--@index_builder('generic')
--def add_generic_index_routes(config, task):
--    index = task.get('index')
--    routes = task.setdefault('routes', [])
--
--    job_name = index['job-name']
--    if job_name not in JOB_NAME_WHITELIST:
--        raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
--
--    subs = config.params.copy()
--    subs['job-name'] = job_name
--    subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
--                                            time.gmtime(config.params['build_date']))
--    subs['product'] = index['product']
--
--    project = config.params.get('project')
--
--    for tpl in V2_ROUTE_TEMPLATES:
--        routes.append(tpl.format(**subs))
--
--    # Additionally alias all tasks for "trunk" repos into a common
--    # namespace.
--    if project and project in TRUNK_PROJECTS:
--        for tpl in V2_TRUNK_ROUTE_TEMPLATES:
--            routes.append(tpl.format(**subs))
--
--    return task
--
--
--@index_builder('nightly')
--def add_nightly_index_routes(config, task):
--    index = task.get('index')
--    routes = task.setdefault('routes', [])
--
--    job_name = index['job-name']
--    if job_name not in JOB_NAME_WHITELIST:
--        raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
--
--    subs = config.params.copy()
--    subs['job-name'] = job_name
--    subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
--                                            time.gmtime(config.params['build_date']))
--    subs['build_date'] = time.strftime("%Y.%m.%d",
--                                       time.gmtime(config.params['build_date']))
--    subs['product'] = index['product']
--
--    for tpl in V2_NIGHTLY_TEMPLATES:
--        routes.append(tpl.format(**subs))
--
--    # Also add routes for en-US
--    task = add_l10n_index_routes(config, task, force_locale="en-US")
--
--    return task
--
--
--@index_builder('nightly-with-multi-l10n')
--def add_nightly_multi_index_routes(config, task):
--    task = add_nightly_index_routes(config, task)
--    task = add_l10n_index_routes(config, task, force_locale="multi")
--    return task
--
--
--@index_builder('l10n')
--def add_l10n_index_routes(config, task, force_locale=None):
--    index = task.get('index')
--    routes = task.setdefault('routes', [])
--
--    job_name = index['job-name']
--    if job_name not in JOB_NAME_WHITELIST:
--        raise Exception(JOB_NAME_WHITELIST_ERROR.format(job_name))
--
--    subs = config.params.copy()
--    subs['job-name'] = job_name
--    subs['build_date_long'] = time.strftime("%Y.%m.%d.%Y%m%d%H%M%S",
--                                            time.gmtime(config.params['build_date']))
--    subs['product'] = index['product']
--
--    locales = task['attributes'].get('chunk_locales',
--                                     task['attributes'].get('all_locales'))
--    # Some tasks has only one locale set
--    if task['attributes'].get('locale'):
--        locales = [task['attributes']['locale']]
--
--    if force_locale:
--        # Used for en-US and multi-locale
--        locales = [force_locale]
--
--    if not locales:
--        raise Exception("Error: Unable to use l10n index for tasks without locales")
--
--    # If there are too many locales, we can't write a route for all of them
--    # See Bug 1323792
--    if len(locales) > 18:  # 18 * 3 = 54, max routes = 64
--        return task
--
--    for locale in locales:
--        for tpl in V2_L10N_TEMPLATES:
--            routes.append(tpl.format(locale=locale, **subs))
--
--    return task
--
--
--@transforms.add
--def add_index_routes(config, tasks):
--    for task in tasks:
--        index = task.get('index')
--
--        if not index:
--            yield task
--            continue
--
--        index_type = index.get('type', 'generic')
--        task = index_builders[index_type](config, task)
--
--        # The default behavior is to rank tasks according to their tier
--        extra_index = task.setdefault('extra', {}).setdefault('index', {})
--        rank = index.get('rank', 'by-tier')
--
--        if rank == 'by-tier':
--            # rank is zero for non-tier-1 tasks and based on pushid for others;
--            # this sorts tier-{2,3} builds below tier-1 in the index
--            tier = task.get('treeherder', {}).get('tier', 3)
--            extra_index['rank'] = 0 if tier > 1 else int(config.params['build_date'])
--        elif rank == 'build_date':
--            extra_index['rank'] = int(config.params['build_date'])
--        else:
--            extra_index['rank'] = rank
--
--        del task['index']
--        yield task
--
--
--@transforms.add
--def build_task(config, tasks):
--    for task in tasks:
--        worker_type = task['worker-type'].format(level=str(config.params['level']))
--        provisioner_id, worker_type = worker_type.split('/', 1)
--
--        routes = task.get('routes', [])
--        scopes = task.get('scopes', [])
--
--        # set up extra
--        extra = task.get('extra', {})
--        task_th = task.get('treeherder')
--        if task_th:
--            extra['treeherderEnv'] = task_th['environments']
--
--            treeherder = extra.setdefault('treeherder', {})
--
--            machine_platform, collection = task_th['platform'].split('/', 1)
--            treeherder['machine'] = {'platform': machine_platform}
--            treeherder['collection'] = {collection: True}
--
--            groupSymbol, symbol = split_symbol(task_th['symbol'])
--            if groupSymbol != '?':
--                treeherder['groupSymbol'] = groupSymbol
--                if groupSymbol not in GROUP_NAMES:
--                    raise Exception(UNKNOWN_GROUP_NAME.format(groupSymbol))
--                treeherder['groupName'] = GROUP_NAMES[groupSymbol]
--            treeherder['symbol'] = symbol
--            treeherder['jobKind'] = task_th['kind']
--            treeherder['tier'] = task_th['tier']
--
--            routes.extend([
--                '{}.v2.{}.{}.{}'.format(TREEHERDER_ROUTE_ROOTS[env],
--                                        config.params['project'],
--                                        config.params['head_rev'],
--                                        config.params['pushlog_id'])
--                for env in task_th['environments']
--            ])
--
--        if 'expires-after' not in task:
--            task['expires-after'] = '28 days' if config.params['project'] == 'try' else '1 year'
--
--        if 'deadline-after' not in task:
--            task['deadline-after'] = '1 day'
--
--        if 'coalesce-name' in task and int(config.params['level']) > 1:
--            key = COALESCE_KEY.format(
--                project=config.params['project'],
--                name=task['coalesce-name'])
--            routes.append('coalesce.v1.' + key)
--
--        if 'priority' not in task:
--            task['priority'] = BRANCH_PRIORITIES.get(
--                config.params['project'],
--                DEFAULT_BRANCH_PRIORITY)
--
--        tags = task.get('tags', {})
--        tags.update({'createdForUser': config.params['owner']})
--
--        task_def = {
--            'provisionerId': provisioner_id,
--            'workerType': worker_type,
--            'routes': routes,
--            'created': {'relative-datestamp': '0 seconds'},
--            'deadline': {'relative-datestamp': task['deadline-after']},
--            'expires': {'relative-datestamp': task['expires-after']},
--            'scopes': scopes,
--            'metadata': {
--                'description': task['description'],
--                'name': task['label'],
--                'owner': config.params['owner'],
--                'source': '{}/file/{}/{}'.format(
--                    config.params['head_repository'],
--                    config.params['head_rev'],
--                    config.path),
--            },
--            'extra': extra,
--            'tags': tags,
--            'priority': task['priority'],
--        }
--
--        if task_th:
--            # link back to treeherder in description
--            th_push_link = 'https://treeherder.mozilla.org/#/jobs?repo={}&revision={}'.format(
--                config.params['project'], config.params['head_rev'])
--            task_def['metadata']['description'] += ' ([Treeherder push]({}))'.format(
--                th_push_link)
--
--        # add the payload and adjust anything else as required (e.g., scopes)
--        payload_builders[task['worker']['implementation']](config, task, task_def)
--
--        attributes = task.get('attributes', {})
--        attributes['run_on_projects'] = task.get('run-on-projects', ['all'])
--
--        # Set MOZ_AUTOMATION on all jobs.
--        if task['worker']['implementation'] in (
--            'generic-worker',
--            'docker-engine',
--            'native-engine',
--            'docker-worker',
--        ):
--            payload = task_def.get('payload')
--            if payload:
--                env = payload.setdefault('env', {})
--                env['MOZ_AUTOMATION'] = '1'
--
--        yield {
--            'label': task['label'],
--            'task': task_def,
--            'dependencies': task.get('dependencies', {}),
--            'attributes': attributes,
--            'optimizations': task.get('optimizations', []),
--        }
--
--
--# Check that the v2 route templates match those used by Mozharness.  This can
--# go away once Mozharness builds are no longer performed in Buildbot, and the
--# Mozharness code referencing routes.json is deleted.
--def check_v2_routes():
--    with open(os.path.join(GECKO, "testing/mozharness/configs/routes.json"), "rb") as f:
--        routes_json = json.load(f)
--
--    for key in ('routes', 'nightly', 'l10n'):
--        if key == 'routes':
--            tc_template = V2_ROUTE_TEMPLATES
--        elif key == 'nightly':
--            tc_template = V2_NIGHTLY_TEMPLATES
--        elif key == 'l10n':
--            tc_template = V2_L10N_TEMPLATES
--
--        routes = routes_json[key]
--
--        # we use different variables than mozharness
--        for mh, tg in [
--                ('{index}', 'index'),
--                ('{build_product}', '{product}'),
--                ('{build_name}-{build_type}', '{job-name}'),
--                ('{year}.{month}.{day}.{pushdate}', '{build_date_long}'),
--                ('{pushid}', '{pushlog_id}'),
--                ('{year}.{month}.{day}', '{build_date}')]:
--            routes = [r.replace(mh, tg) for r in routes]
--
--        if sorted(routes) != sorted(tc_template):
--            raise Exception("V2 TEMPLATES do not match Mozharness's routes.json: "
--                            "(tc):%s vs (mh):%s" % (tc_template, routes))
--
--
--check_v2_routes()
-diff --git a/taskcluster/taskgraph/transforms/tests.py b/taskcluster/taskgraph/transforms/tests.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/tests.py
-+++ /dev/null
-@@ -1,938 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--These transforms construct a task description to run the given test, based on a
--test description.  The implementation here is shared among all test kinds, but
--contains specific support for how we run tests in Gecko (via mozharness,
--invoked in particular ways).
--
--This is a good place to translate a test-description option such as
--`single-core: true` to the implementation of that option in a task description
--(worker options, mozharness commandline, environment variables, etc.)
--
--The test description should be fully formed by the time it reaches these
--transforms, and these transforms should not embody any specific knowledge about
--what should run where. this is the wrong place for special-casing platforms,
--for example - use `all_tests.py` instead.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--from taskgraph.util.schema import resolve_keyed_by
--from taskgraph.util.treeherder import split_symbol, join_symbol
--from taskgraph.util.schema import (
--    validate_schema,
--    optionally_keyed_by,
--    Schema,
--)
--from voluptuous import (
--    Any,
--    Optional,
--    Required,
--)
--
--import copy
--import logging
--
--# default worker types keyed by instance-size
--LINUX_WORKER_TYPES = {
--    'large': 'aws-provisioner-v1/gecko-t-linux-large',
--    'xlarge': 'aws-provisioner-v1/gecko-t-linux-xlarge',
--    'legacy': 'aws-provisioner-v1/gecko-t-linux-medium',
--    'default': 'aws-provisioner-v1/gecko-t-linux-large',
--}
--
--# windows worker types keyed by test-platform and virtualization
--WINDOWS_WORKER_TYPES = {
--    'windows7-32': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win7-32',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
--      'hardware': 'releng-hardware/gecko-t-win7-32-hw',
--    },
--    'windows7-32-pgo': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win7-32',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
--      'hardware': 'releng-hardware/gecko-t-win7-32-hw',
--    },
--    'windows7-32-nightly': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win7-32',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
--      'hardware': 'releng-hardware/gecko-t-win7-32-hw',
--    },
--    'windows7-32-devedition': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win7-32',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
--      'hardware': 'releng-hardware/gecko-t-win7-32-hw',
--    },
--    'windows7-32-stylo': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win7-32',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win7-32-gpu',
--      'hardware': 'releng-hardware/gecko-t-win7-32-hw',
--    },
--    'windows10-64': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows10-64-pgo': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows10-64-devedition': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows10-64-nightly': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows10-64-stylo': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows10-64-asan': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    # These values don't really matter since BBB will be executing them
--    'windows8-64': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows8-64-pgo': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--    'windows8-64-nightly': {
--      'virtual': 'aws-provisioner-v1/gecko-t-win10-64',
--      'virtual-with-gpu': 'aws-provisioner-v1/gecko-t-win10-64-gpu',
--      'hardware': 'releng-hardware/gecko-t-win10-64-hw',
--    },
--}
--
--# os x worker types keyed by test-platform
--MACOSX_WORKER_TYPES = {
--    'macosx64': 'releng-hardware/gecko-t-osx-1010',
--}
--
--logger = logging.getLogger(__name__)
--
--transforms = TransformSequence()
--
--# Schema for a test description
--#
--# *****WARNING*****
--#
--# This is a great place for baffling cruft to accumulate, and that makes
--# everyone move more slowly.  Be considerate of your fellow hackers!
--# See the warnings in taskcluster/docs/how-tos.rst
--#
--# *****WARNING*****
--test_description_schema = Schema({
--    # description of the suite, for the task metadata
--    'description': basestring,
--
--    # test suite name, or <suite>/<flavor>
--    Required('suite'): optionally_keyed_by(
--        'test-platform',
--        basestring),
--
--    # the name by which this test suite is addressed in try syntax; defaults to
--    # the test-name.  This will translate to the `unittest_try_name` or
--    # `talos_try_name` attribute.
--    Optional('try-name'): basestring,
--
--    # additional tags to mark up this type of test
--    Optional('tags'): {basestring: object},
--
--    # the symbol, or group(symbol), under which this task should appear in
--    # treeherder.
--    'treeherder-symbol': basestring,
--
--    # the value to place in task.extra.treeherder.machine.platform; ideally
--    # this is the same as build-platform, and that is the default, but in
--    # practice it's not always a match.
--    Optional('treeherder-machine-platform'): basestring,
--
--    # attributes to appear in the resulting task (later transforms will add the
--    # common attributes)
--    Optional('attributes'): {basestring: object},
--
--    # The `run_on_projects` attribute, defaulting to "all".  This dictates the
--    # projects on which this task should be included in the target task set.
--    # See the attributes documentation for details.
--    #
--    # Note that the special case 'built-projects', the default, uses the parent
--    # build task's run-on-projects, meaning that tests run only on platforms
--    # that are built.
--    Optional('run-on-projects', default='built-projects'): optionally_keyed_by(
--        'test-platform',
--        Any([basestring], 'built-projects')),
--
--    # the sheriffing tier for this task (default: set based on test platform)
--    Optional('tier'): optionally_keyed_by(
--        'test-platform',
--        Any(int, 'default')),
--
--    # number of chunks to create for this task.  This can be keyed by test
--    # platform by passing a dictionary in the `by-test-platform` key.  If the
--    # test platform is not found, the key 'default' will be tried.
--    Required('chunks', default=1): optionally_keyed_by(
--        'test-platform',
--        int),
--
--    # the time (with unit) after which this task is deleted; default depends on
--    # the branch (see below)
--    Optional('expires-after'): basestring,
--
--    # Whether to run this task with e10s (desktop-test only).  If false, run
--    # without e10s; if true, run with e10s; if 'both', run one task with and
--    # one task without e10s.  E10s tasks have "-e10s" appended to the test name
--    # and treeherder group.
--    Required('e10s', default='both'): optionally_keyed_by(
--        'test-platform', 'project',
--        Any(bool, 'both')),
--
--    # Whether the task should run with WebRender enabled or not.
--    Optional('webrender', default=False): bool,
--
--    # The EC2 instance size to run these tests on.
--    Required('instance-size', default='default'): optionally_keyed_by(
--        'test-platform',
--        Any('default', 'large', 'xlarge', 'legacy')),
--
--    # type of virtualization or hardware required by test.
--    Required('virtualization', default='virtual'): optionally_keyed_by(
--        'test-platform',
--        Any('virtual', 'virtual-with-gpu', 'hardware')),
--
--    # Whether the task requires loopback audio or video (whatever that may mean
--    # on the platform)
--    Required('loopback-audio', default=False): bool,
--    Required('loopback-video', default=False): bool,
--
--    # Whether the test can run using a software GL implementation on Linux
--    # using the GL compositor. May not be used with "legacy" sized instances
--    # due to poor LLVMPipe performance (bug 1296086).  Defaults to true for
--    # unit tests on linux platforms and false otherwise
--    Optional('allow-software-gl-layers'): bool,
--
--    # For tasks that will run in docker-worker or docker-engine, this is the
--    # name of the docker image or in-tree docker image to run the task in.  If
--    # in-tree, then a dependency will be created automatically.  This is
--    # generally `desktop-test`, or an image that acts an awful lot like it.
--    Required('docker-image', default={'in-tree': 'desktop-test'}): optionally_keyed_by(
--        'test-platform',
--        Any(
--            # a raw Docker image path (repo/image:tag)
--            basestring,
--            # an in-tree generated docker image (from `taskcluster/docker/<name>`)
--            {'in-tree': basestring}
--        )
--    ),
--
--    # seconds of runtime after which the task will be killed.  Like 'chunks',
--    # this can be keyed by test pltaform.
--    Required('max-run-time', default=3600): optionally_keyed_by(
--        'test-platform',
--        int),
--
--    # the exit status code that indicates the task should be retried
--    Optional('retry-exit-status'): int,
--
--    # Whether to perform a gecko checkout.
--    Required('checkout', default=False): bool,
--
--    # Wheter to perform a machine reboot after test is done
--    Optional('reboot', default=False):
--        Any(False, 'always', 'on-exception', 'on-failure'),
--
--    # What to run
--    Required('mozharness'): optionally_keyed_by(
--        'test-platform', {
--            # the mozharness script used to run this task
--            Required('script'): basestring,
--
--            # the config files required for the task
--            Required('config'): optionally_keyed_by(
--                'test-platform',
--                [basestring]),
--
--            # mochitest flavor for mochitest runs
--            Optional('mochitest-flavor'): basestring,
--
--            # any additional actions to pass to the mozharness command
--            Optional('actions'): [basestring],
--
--            # additional command-line options for mozharness, beyond those
--            # automatically added
--            Required('extra-options', default=[]): optionally_keyed_by(
--                'test-platform',
--                [basestring]),
--
--            # the artifact name (including path) to test on the build task; this is
--            # generally set in a per-kind transformation
--            Optional('build-artifact-name'): basestring,
--
--            # If true, tooltool downloads will be enabled via relengAPIProxy.
--            Required('tooltool-downloads', default=False): bool,
--
--            # This mozharness script also runs in Buildbot and tries to read a
--            # buildbot config file, so tell it not to do so in TaskCluster
--            Required('no-read-buildbot-config', default=False): bool,
--
--            # Add --blob-upload-branch=<project> mozharness parameter
--            Optional('include-blob-upload-branch'): bool,
--
--            # The setting for --download-symbols (if omitted, the option will not
--            # be passed to mozharness)
--            Optional('download-symbols'): Any(True, 'ondemand'),
--
--            # If set, then MOZ_NODE_PATH=/usr/local/bin/node is included in the
--            # environment.  This is more than just a helpful path setting -- it
--            # causes xpcshell tests to start additional servers, and runs
--            # additional tests.
--            Required('set-moz-node-path', default=False): bool,
--
--            # If true, include chunking information in the command even if the number
--            # of chunks is 1
--            Required('chunked', default=False): optionally_keyed_by(
--                'test-platform',
--                bool),
--
--            # The chunking argument format to use
--            Required('chunking-args', default='this-chunk'): Any(
--                # Use the usual --this-chunk/--total-chunk arguments
--                'this-chunk',
--                # Use --test-suite=<suite>-<chunk-suffix>; see chunk-suffix, below
--                'test-suite-suffix',
--            ),
--
--            # the string to append to the `--test-suite` arugment when
--            # chunking-args = test-suite-suffix; "<CHUNK>" in this string will
--            # be replaced with the chunk number.
--            Optional('chunk-suffix'): basestring,
--
--            Required('requires-signed-builds', default=False): optionally_keyed_by(
--                'test-platform',
--                bool),
--        }
--    ),
--
--    # The current chunk; this is filled in by `all_kinds.py`
--    Optional('this-chunk'): int,
--
--    # os user groups for test task workers; required scopes, will be
--    # added automatically
--    Optional('os-groups', default=[]): optionally_keyed_by(
--        'test-platform',
--        [basestring]),
--
--    # -- values supplied by the task-generation infrastructure
--
--    # the platform of the build this task is testing
--    'build-platform': basestring,
--
--    # the label of the build task generating the materials to test
--    'build-label': basestring,
--
--    # the label of the signing task generating the materials to test.
--    # Signed builds are used in xpcshell tests on Windows, for instance.
--    Optional('build-signing-label'): basestring,
--
--    # the build's attributes
--    'build-attributes': {basestring: object},
--
--    # the platform on which the tests will run
--    'test-platform': basestring,
--
--    # the name of the test (the key in tests.yml)
--    'test-name': basestring,
--
--    # the product name, defaults to firefox
--    Optional('product'): basestring,
--
--    # conditional files to determine when these tests should be run
--    Optional('when'): Any({
--        Optional('files-changed'): [basestring],
--    }),
--
--    Optional('worker-type'): optionally_keyed_by(
--        'test-platform',
--        Any(basestring, None),
--    ),
--
--}, required=True)
--
--
--@transforms.add
--def validate(config, tests):
--    for test in tests:
--        yield validate_schema(test_description_schema, test,
--                              "In test {!r}:".format(test['test-name']))
--
--
--@transforms.add
--def handle_keyed_by_mozharness(config, tests):
--    """Resolve a mozharness field if it is keyed by something"""
--    for test in tests:
--        resolve_keyed_by(test, 'mozharness', item_name=test['test-name'])
--        yield test
--
--
--@transforms.add
--def set_defaults(config, tests):
--    for test in tests:
--        build_platform = test['build-platform']
--        if build_platform.startswith('android'):
--            # all Android test tasks download internal objects from tooltool
--            test['mozharness']['tooltool-downloads'] = True
--            test['mozharness']['actions'] = ['get-secrets']
--            # Android doesn't do e10s
--            test['e10s'] = False
--            # loopback-video is always true for Android, but false for other
--            # platform phyla
--            test['loopback-video'] = True
--        else:
--            # all non-android tests want to run the bits that require node
--            test['mozharness']['set-moz-node-path'] = True
--            test.setdefault('e10s', 'both')
--
--        # software-gl-layers is only meaningful on linux unittests, where it defaults to True
--        if test['test-platform'].startswith('linux') and test['suite'] != 'talos':
--            test.setdefault('allow-software-gl-layers', True)
--        else:
--            test['allow-software-gl-layers'] = False
--
--        # Enable WebRender by default on the QuantumRender test platform, since
--        # the whole point of QuantumRender is to run with WebRender enabled.
--        # If other *-qr test platforms are added they should also be checked for
--        # here; currently linux64-qr is the only one.
--        if test['test-platform'].startswith('linux64-qr'):
--            test['webrender'] = True
--        else:
--            test.setdefault('webrender', False)
--
--        test.setdefault('try-name', test['test-name'])
--
--        test.setdefault('os-groups', [])
--        test.setdefault('chunks', 1)
--        test.setdefault('run-on-projects', 'built-projects')
--        test.setdefault('instance-size', 'default')
--        test.setdefault('max-run-time', 3600)
--        test.setdefault('reboot', True)
--        test['mozharness'].setdefault('extra-options', [])
--        yield test
--
--
--@transforms.add
--def setup_talos(config, tests):
--    """Add options that are specific to talos jobs (identified by suite=talos)"""
--    for test in tests:
--        if test['suite'] != 'talos':
--            yield test
--            continue
--
--        extra_options = test.setdefault('mozharness', {}).setdefault('extra-options', [])
--        extra_options.append('--add-option')
--        extra_options.append('--webServer,localhost')
--        extra_options.append('--use-talos-json')
--
--        # Per https://bugzilla.mozilla.org/show_bug.cgi?id=1357753#c3, branch
--        # name is only required for try
--        if config.params['project'] == 'try':
--            extra_options.append('--branch-name')
--            extra_options.append('Try')
--
--        yield test
--
--
--@transforms.add
--def set_target(config, tests):
--    for test in tests:
--        build_platform = test['build-platform']
--        if build_platform.startswith('macosx'):
--            target = 'target.dmg'
--        elif build_platform.startswith('android'):
--            if 'geckoview' in test['test-name']:
--                target = 'geckoview_example.apk'
--            else:
--                target = 'target.apk'
--        elif build_platform.startswith('win'):
--            target = 'target.zip'
--        else:
--            target = 'target.tar.bz2'
--        test['mozharness']['build-artifact-name'] = 'public/build/' + target
--
--        yield test
--
--
--@transforms.add
--def set_treeherder_machine_platform(config, tests):
--    """Set the appropriate task.extra.treeherder.machine.platform"""
--    translation = {
--        # Linux64 build platforms for asan and pgo are specified differently to
--        # treeherder.
--        'linux64-asan/opt': 'linux64/asan',
--        'linux64-pgo/opt': 'linux64/pgo',
--        'macosx64/debug': 'osx-10-10/debug',
--        'macosx64/opt': 'osx-10-10/opt',
--        'win64-asan/opt': 'windows10-64/asan',
--        'win32-pgo/opt': 'windows7-32/pgo',
--        'win64-pgo/opt': 'windows10-64/pgo',
--        # The build names for Android platforms have partially evolved over the
--        # years and need to be translated.
--        'android-api-16/debug': 'android-4-3-armv7-api16/debug',
--        'android-api-16/opt': 'android-4-3-armv7-api16/opt',
--        'android-x86/opt': 'android-4-2-x86/opt',
--        'android-api-16-gradle/opt': 'android-api-16-gradle/opt',
--    }
--    for test in tests:
--        # For most desktop platforms, the above table is not used for "regular"
--        # builds, so we'll always pick the test platform here.
--        # On macOS though, the regular builds are in the table.  This causes a
--        # conflict in `verify_task_graph_symbol` once you add a new test
--        # platform based on regular macOS builds, such as for Stylo.
--        # Since it's unclear if the regular macOS builds can be removed from
--        # the table, workaround the issue for Stylo.
--        if '-stylo' in test['test-platform']:
--            test['treeherder-machine-platform'] = test['test-platform']
--        else:
--            test['treeherder-machine-platform'] = translation.get(
--                test['build-platform'], test['test-platform'])
--        yield test
--
--
--@transforms.add
--def set_tier(config, tests):
--    """Set the tier based on policy for all test descriptions that do not
--    specify a tier otherwise."""
--    for test in tests:
--        if 'tier' in test:
--            resolve_keyed_by(test, 'tier', item_name=test['test-name'])
--
--        # only override if not set for the test
--        if 'tier' not in test or test['tier'] == 'default':
--            if test['test-platform'] in ['linux32/opt',
--                                         'linux32/debug',
--                                         'linux32-nightly/opt',
--                                         'linux32-devedition/opt',
--                                         'linux64/opt',
--                                         'linux64-nightly/opt',
--                                         'linux64/debug',
--                                         'linux64-pgo/opt',
--                                         'linux64-devedition/opt',
--                                         'linux64-asan/opt',
--                                         'windows7-32/debug',
--                                         'windows7-32/opt',
--                                         'windows7-32-pgo/opt',
--                                         'windows7-32-devedition/opt',
--                                         'windows7-32-nightly/opt',
--                                         'windows10-64/debug',
--                                         'windows10-64/opt',
--                                         'windows10-64-pgo/opt',
--                                         'windows10-64-devedition/opt',
--                                         'windows10-64-nightly/opt',
--                                         'macosx64/opt',
--                                         'macosx64/debug',
--                                         'android-4.3-arm7-api-16/opt',
--                                         'android-4.3-arm7-api-16/debug',
--                                         'android-4.2-x86/opt']:
--                test['tier'] = 1
--            else:
--                test['tier'] = 2
--        yield test
--
--
--@transforms.add
--def set_expires_after(config, tests):
--    """Try jobs expire after 2 weeks; everything else lasts 1 year.  This helps
--    keep storage costs low."""
--    for test in tests:
--        if 'expires-after' not in test:
--            if config.params['project'] == 'try':
--                test['expires-after'] = "14 days"
--            else:
--                test['expires-after'] = "1 year"
--        yield test
--
--
--@transforms.add
--def set_download_symbols(config, tests):
--    """In general, we download symbols immediately for debug builds, but only
--    on demand for everything else. ASAN builds shouldn't download
--    symbols since they don't product symbol zips see bug 1283879"""
--    for test in tests:
--        if test['test-platform'].split('/')[-1] == 'debug':
--            test['mozharness']['download-symbols'] = True
--        elif test['build-platform'] == 'linux64-asan/opt' or \
--                test['build-platform'] == 'windows10-64-asan/opt':
--            if 'download-symbols' in test['mozharness']:
--                del test['mozharness']['download-symbols']
--        else:
--            test['mozharness']['download-symbols'] = 'ondemand'
--        yield test
--
--
--@transforms.add
--def handle_keyed_by(config, tests):
--    """Resolve fields that can be keyed by platform, etc."""
--    fields = [
--        'instance-size',
--        'docker-image',
--        'max-run-time',
--        'chunks',
--        'e10s',
--        'suite',
--        'run-on-projects',
--        'os-groups',
--        'mozharness.chunked',
--        'mozharness.config',
--        'mozharness.extra-options',
--        'mozharness.requires-signed-builds',
--        'worker-type',
--    ]
--    for test in tests:
--        for field in fields:
--            resolve_keyed_by(test, field, item_name=test['test-name'],
--                             project=config.params['project'])
--        yield test
--
--
--@transforms.add
--def enable_code_coverage(config, tests):
--    """Enable code coverage for the linux64-ccov/opt & linux64-jsdcov/opt build-platforms"""
--    for test in tests:
--        if test['build-platform'] == 'linux64-ccov/opt':
--            test['mozharness'].setdefault('extra-options', []).append('--code-coverage')
--            test['when'] = {}
--            test['instance-size'] = 'xlarge'
--            test['run-on-projects'] = ['mozilla-central']
--
--            if test['test-name'].startswith('talos'):
--                test['max-run-time'] = 7200
--                test['docker-image'] = {"in-tree": "desktop1604-test"}
--                test['mozharness']['config'] = ['talos/linux64_config_taskcluster.py']
--                test['mozharness']['extra-options'].append('--add-option')
--                test['mozharness']['extra-options'].append('--cycles,1')
--                test['mozharness']['extra-options'].append('--add-option')
--                test['mozharness']['extra-options'].append('--tppagecycles,1')
--                test['mozharness']['extra-options'].append('--add-option')
--                test['mozharness']['extra-options'].append('--no-upload-results')
--                test['mozharness']['extra-options'].append('--add-option')
--                test['mozharness']['extra-options'].append('--tptimeout,15000')
--        elif test['build-platform'] == 'linux64-jsdcov/opt':
--            test['run-on-projects'] = ['mozilla-central']
--        yield test
--
--
--@transforms.add
--def handle_run_on_projects(config, tests):
--    """Handle translating `built-projects` appropriately"""
--    for test in tests:
--        if test['run-on-projects'] == 'built-projects':
--            test['run-on-projects'] = test['build-attributes'].get('run_on_projects', ['all'])
--        yield test
--
--
--@transforms.add
--def split_e10s(config, tests):
--    for test in tests:
--        e10s = test['e10s']
--
--        test.setdefault('attributes', {})
--        test['e10s'] = False
--        test['attributes']['e10s'] = False
--
--        if e10s == 'both':
--            yield copy.deepcopy(test)
--            e10s = True
--        if e10s:
--            test['test-name'] += '-e10s'
--            test['try-name'] += '-e10s'
--            test['e10s'] = True
--            test['attributes']['e10s'] = True
--            group, symbol = split_symbol(test['treeherder-symbol'])
--            if group != '?':
--                group += '-e10s'
--            test['treeherder-symbol'] = join_symbol(group, symbol)
--            if test['suite'] == 'talos':
--                for i, option in enumerate(test['mozharness']['extra-options']):
--                    if option.startswith('--suite='):
--                        test['mozharness']['extra-options'][i] += '-e10s'
--            else:
--                test['mozharness']['extra-options'].append('--e10s')
--        yield test
--
--
--@transforms.add
--def split_chunks(config, tests):
--    """Based on the 'chunks' key, split tests up into chunks by duplicating
--    them and assigning 'this-chunk' appropriately and updating the treeherder
--    symbol."""
--    for test in tests:
--        if test['chunks'] == 1:
--            test['this-chunk'] = 1
--            yield test
--            continue
--
--        for this_chunk in range(1, test['chunks'] + 1):
--            # copy the test and update with the chunk number
--            chunked = copy.deepcopy(test)
--            chunked['this-chunk'] = this_chunk
--
--            # add the chunk number to the TH symbol
--            group, symbol = split_symbol(chunked['treeherder-symbol'])
--            symbol += str(this_chunk)
--            chunked['treeherder-symbol'] = join_symbol(group, symbol)
--
--            yield chunked
--
--
--@transforms.add
--def allow_software_gl_layers(config, tests):
--    """
--    Handle the "allow-software-gl-layers" property for platforms where it
--    applies.
--    """
--    for test in tests:
--        if test.get('allow-software-gl-layers'):
--            assert test['instance-size'] != 'legacy',\
--                'Software GL layers on a legacy instance is disallowed (bug 1296086).'
--
--            # This should be set always once bug 1296086 is resolved.
--            test['mozharness'].setdefault('extra-options', [])\
--                              .append("--allow-software-gl-layers")
--
--        yield test
--
--
--@transforms.add
--def enable_webrender(config, tests):
--    """
--    Handle the "webrender" property by passing a flag to mozharness if it is
--    enabled.
--    """
--    for test in tests:
--        if test.get('webrender'):
--            test['mozharness'].setdefault('extra-options', [])\
--                              .append("--enable-webrender")
--
--        yield test
--
--
--@transforms.add
--def set_retry_exit_status(config, tests):
--    """Set the retry exit status to TBPL_RETRY, the value returned by mozharness
--       scripts to indicate a transient failure that should be retried."""
--    for test in tests:
--        test['retry-exit-status'] = 4
--        yield test
--
--
--@transforms.add
--def set_profile(config, tests):
--    """Set profiling mode for tests."""
--    for test in tests:
--        if config.config['args'].profile and test['suite'] == 'talos':
--            test['mozharness']['extra-options'].append('--geckoProfile')
--        yield test
--
--
--@transforms.add
--def set_tag(config, tests):
--    """Set test for a specific tag."""
--    for test in tests:
--        tag = config.config['args'].tag
--        if tag:
--            test['mozharness']['extra-options'].extend(['--tag', tag])
--        yield test
--
--
--@transforms.add
--def set_test_type(config, tests):
--    for test in tests:
--        for test_type in ['mochitest', 'reftest']:
--            if test_type in test['suite'] and 'web-platform' not in test['suite']:
--                test.setdefault('tags', {})['test-type'] = test_type
--        yield test
--
--
--@transforms.add
--def enable_stylo(config, tests):
--    """
--    Force Stylo on for all its tests, except Stylo vs. Gecko reftests where the
--    test harness will handle this.
--    """
--    for test in tests:
--        if '-stylo' not in test['test-platform']:
--            yield test
--            continue
--
--        if 'reftest-stylo' not in test['suite']:
--            test['mozharness'].setdefault('extra-options', []).append('--enable-stylo')
--
--        yield test
--
--
--@transforms.add
--def single_stylo_traversal_tests(config, tests):
--    """Enable single traversal for all tests on the sequential Stylo platform."""
--
--    for test in tests:
--        if not test['test-platform'].startswith('linux64-stylo-sequential/'):
--            yield test
--            continue
--
--        # Bug 1356122 - Run Stylo tests in sequential mode
--        test['mozharness'].setdefault('extra-options', [])\
--                          .append('--single-stylo-traversal')
--        yield test
--
--
--@transforms.add
--def set_worker_type(config, tests):
--    """Set the worker type based on the test platform."""
--    for test in tests:
--        # during the taskcluster migration, this is a bit tortured, but it
--        # will get simpler eventually!
--        test_platform = test['test-platform']
--        if test.get('worker-type'):
--            # This test already has its worker type defined, so just use that (yields below)
--            pass
--        elif test_platform.startswith('macosx'):
--            test['worker-type'] = MACOSX_WORKER_TYPES['macosx64']
--        elif test_platform.startswith('win'):
--            if test.get('suite', '') == 'talos' and \
--                    not any('taskcluster' in cfg for cfg in test['mozharness']['config']):
--                test['worker-type'] = 'buildbot-bridge/buildbot-bridge'
--            else:
--                test['worker-type'] = \
--                    WINDOWS_WORKER_TYPES[test_platform.split('/')[0]][test['virtualization']]
--        elif test_platform.startswith('linux') or test_platform.startswith('android'):
--            if test.get('suite', '') == 'talos' and test['build-platform'] != 'linux64-ccov/opt':
--                if config.config['args'].taskcluster_worker:
--                    test['worker-type'] = 'releng-hardware/gecko-t-linux-talos'
--                else:
--                    test['worker-type'] = 'buildbot-bridge/buildbot-bridge'
--            else:
--                test['worker-type'] = LINUX_WORKER_TYPES[test['instance-size']]
--        else:
--            raise Exception("unknown test_platform {}".format(test_platform))
--
--        yield test
--
--
--@transforms.add
--def skip_win10_hardware(config, tests):
--    """Windows 10 hardware isn't ready yet, don't even bother scheduling
--    unless we're on try"""
--    for test in tests:
--        if 'releng-hardware/gecko-t-win10-64-hw' not in test['worker-type']:
--            yield test
--        if config.params == 'try':
--            yield test
--        # Silently drop the test on the floor if its win10 hardware and we're not try
--
--
--@transforms.add
--def make_job_description(config, tests):
--    """Convert *test* descriptions to *job* descriptions (input to
--    taskgraph.transforms.job)"""
--
--    for test in tests:
--        label = '{}-{}-{}'.format(config.kind, test['test-platform'], test['test-name'])
--        if test['chunks'] > 1:
--            label += '-{}'.format(test['this-chunk'])
--
--        build_label = test['build-label']
--
--        try_name = test['try-name']
--        if test['suite'] == 'talos':
--            attr_try_name = 'talos_try_name'
--        else:
--            attr_try_name = 'unittest_try_name'
--
--        attr_build_platform, attr_build_type = test['build-platform'].split('/', 1)
--
--        suite = test['suite']
--        if '/' in suite:
--            suite, flavor = suite.split('/', 1)
--        else:
--            flavor = suite
--
--        attributes = test.get('attributes', {})
--        attributes.update({
--            'build_platform': attr_build_platform,
--            'build_type': attr_build_type,
--            'test_platform': test['test-platform'],
--            'test_chunk': str(test['this-chunk']),
--            'unittest_suite': suite,
--            'unittest_flavor': flavor,
--            attr_try_name: try_name,
--        })
--
--        jobdesc = {}
--        name = '{}-{}'.format(test['test-platform'], test['test-name'])
--        jobdesc['name'] = name
--        jobdesc['label'] = label
--        jobdesc['description'] = test['description']
--        jobdesc['when'] = test.get('when', {})
--        jobdesc['attributes'] = attributes
--        jobdesc['dependencies'] = {'build': build_label}
--
--        if test['mozharness']['requires-signed-builds'] is True:
--            jobdesc['dependencies']['build-signing'] = test['build-signing-label']
--
--        jobdesc['expires-after'] = test['expires-after']
--        jobdesc['routes'] = []
--        jobdesc['run-on-projects'] = test['run-on-projects']
--        jobdesc['scopes'] = []
--        jobdesc['tags'] = test.get('tags', {})
--        jobdesc['extra'] = {
--            'chunks': {
--                'current': test['this-chunk'],
--                'total': test['chunks'],
--            },
--            'suite': {
--                'name': suite,
--                'flavor': flavor,
--            },
--        }
--        jobdesc['treeherder'] = {
--            'symbol': test['treeherder-symbol'],
--            'kind': 'test',
--            'tier': test['tier'],
--            'platform': test.get('treeherder-machine-platform', test['build-platform']),
--        }
--
--        # run SETA unless this is a try push
--        jobdesc['optimizations'] = optimizations = []
--        if config.params['project'] != 'try':
--            optimizations.append(['seta'])
--
--        run = jobdesc['run'] = {}
--        run['using'] = 'mozharness-test'
--        run['test'] = test
--
--        jobdesc['worker-type'] = test.pop('worker-type')
--
--        yield jobdesc
--
--
--def normpath(path):
--    return path.replace('/', '\\')
--
--
--def get_firefox_version():
--    with open('browser/config/version.txt', 'r') as f:
--        return f.readline().strip()
-diff --git a/taskcluster/taskgraph/transforms/toolchain.py b/taskcluster/taskgraph/transforms/toolchain.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/toolchain.py
-+++ /dev/null
-@@ -1,76 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--from taskgraph.transforms.base import TransformSequence
--
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def use_toolchains(config, jobs):
--    """Add dependencies corresponding to toolchains to use, and pass a list
--    of corresponding artifacts to jobs using toolchains.
--    """
--    artifacts = {}
--    # Toolchain jobs can depend on other toolchain jobs, but we don't have full
--    # tasks for them, since they're being transformed. So scan the jobs list in
--    # that case, otherwise, use the list of tasks for the kind dependencies.
--    if config.kind == 'toolchain':
--        jobs = list(jobs)
--        for job in jobs:
--            artifact = job.get('run', {}).get('toolchain-artifact')
--            if artifact:
--                artifacts[job['name']] = artifact
--    else:
--        for task in config.kind_dependencies_tasks:
--            if task.kind != 'toolchain':
--                continue
--            artifact = task.attributes.get('toolchain-artifact')
--            if artifact:
--                artifacts[task.label.replace('%s-' % task.kind, '')] = artifact
--
--    for job in jobs:
--        env = job.setdefault('worker', {}).setdefault('env', {})
--
--        toolchains = job.pop('toolchains', [])
--
--        if config.kind == 'toolchain' and job['name'] in toolchains:
--            raise Exception("Toolchain job %s can't use itself as toolchain"
--                            % job['name'])
--
--        filenames = {}
--        for t in toolchains:
--            if t not in artifacts:
--                raise Exception('Missing toolchain job for %s-%s: %s'
--                                % (config.kind, job['name'], t))
--
--            f = os.path.basename(artifacts[t])
--            if f in filenames:
--                # Build jobs don't support toolchain artifacts with the same
--                # name: they would overwrite one with the other.
--                raise Exception('%s-%s cannot use both %s and %s toolchains: '
--                                'they both have the same artifact name %s'
--                                % (config.kind, job['name'], filenames[f],
--                                   t, f))
--            filenames[f] = t
--
--        if toolchains:
--            job.setdefault('dependencies', {}).update(
--                ('toolchain-%s' % t, 'toolchain-%s' % t)
--                for t in toolchains
--            )
--            # Pass a list of artifact-path@task-id to the job for all the
--            # toolchain artifacts it's going to need, where task-id is
--            # corresponding to the (possibly optimized) toolchain job, and
--            # artifact-path to the toolchain-artifact defined for that
--            # toolchain job.
--            env['MOZ_TOOLCHAINS'] = {'task-reference': ' '.join(
--                '%s@<toolchain-%s>' % (artifacts[t], t)
--                for t in toolchains
--            )}
--
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/try_job.py b/taskcluster/taskgraph/transforms/try_job.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/try_job.py
-+++ /dev/null
-@@ -1,19 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def set_job_try_name(config, jobs):
--    """
--    For a task which is governed by `-j` in try syntax, set the `job_try_name`
--    attribute based on the job name.
--    """
--    for job in jobs:
--        job.setdefault('attributes', {}).setdefault('job_try_name', job['name'])
--        yield job
-diff --git a/taskcluster/taskgraph/transforms/upload_symbols.py b/taskcluster/taskgraph/transforms/upload_symbols.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/transforms/upload_symbols.py
-+++ /dev/null
-@@ -1,53 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Transform the upload-symbols task description template,
--  taskcluster/ci/upload-symbols/job-template.yml
--into an actual task description.
--"""
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--from taskgraph.transforms.base import TransformSequence
--
--
--transforms = TransformSequence()
--
--
--@transforms.add
--def fill_template(config, tasks):
--    for task in tasks:
--        dep = task['dependent-task']
--
--        # Fill out the dynamic fields in the task description
--        task['label'] = dep.label + '-upload-symbols'
--        task['dependencies'] = {'build': dep.label}
--        task['worker']['env']['GECKO_HEAD_REPOSITORY'] = config.params['head_repository']
--        task['worker']['env']['GECKO_HEAD_REV'] = config.params['head_rev']
--
--        build_platform = dep.attributes.get('build_platform')
--        build_type = dep.attributes.get('build_type')
--        attributes = task.setdefault('attributes', {})
--        attributes['build_platform'] = build_platform
--        attributes['build_type'] = build_type
--        if 'nightly' in build_platform:
--            attributes['nightly'] = True
--
--        treeherder = task.get('treeherder', {})
--        th = dep.task.get('extra')['treeherder']
--        treeherder.setdefault('platform',
--                              "{}/{}".format(th['machine']['platform'],
--                                             build_type))
--        treeherder.setdefault('tier', th['tier'])
--        treeherder.setdefault('kind', th['jobKind'])
--        if dep.attributes.get('nightly'):
--            treeherder.setdefault('symbol', 'tc(SymN)')
--        else:
--            treeherder.setdefault('symbol', 'tc(Sym)')
--        task['treeherder'] = treeherder
--
--        # clear out the stuff that's not part of a task description
--        del task['dependent-task']
--
--        yield task
-diff --git a/taskcluster/taskgraph/try_option_syntax.py b/taskcluster/taskgraph/try_option_syntax.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/try_option_syntax.py
-+++ /dev/null
-@@ -1,643 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import argparse
--import copy
--import logging
--import re
--import shlex
--
--logger = logging.getLogger(__name__)
--
--TRY_DELIMITER = 'try:'
--
--# The build type aliases are very cryptic and only used in try flags these are
--# mappings from the single char alias to a longer more recognizable form.
--BUILD_TYPE_ALIASES = {
--    'o': 'opt',
--    'd': 'debug'
--}
--
--# consider anything in this whitelist of kinds to be governed by -b/-p
--BUILD_KINDS = set([
--    'build',
--    'artifact-build',
--    'hazard',
--    'l10n',
--    'valgrind',
--    'static-analysis',
--    'spidermonkey',
--])
--
--
--# mapping from shortcut name (usable with -u) to a boolean function identifying
--# matching test names
--def alias_prefix(prefix):
--    return lambda name: name.startswith(prefix)
--
--
--def alias_contains(infix):
--    return lambda name: infix in name
--
--
--def alias_matches(pattern):
--    pattern = re.compile(pattern)
--    return lambda name: pattern.match(name)
--
--
--UNITTEST_ALIASES = {
--    # Aliases specify shorthands that can be used in try syntax.  The shorthand
--    # is the dictionary key, with the value representing a pattern for matching
--    # unittest_try_names.
--    #
--    # Note that alias expansion is performed in the absence of any chunk
--    # prefixes.  For example, the first example above would replace "foo-7"
--    # with "foobar-7".  Note that a few aliases allowed chunks to be specified
--    # without a leading `-`, for example 'mochitest-dt1'. That's no longer
--    # supported.
--    'cppunit': alias_prefix('cppunit'),
--    'crashtest': alias_prefix('crashtest'),
--    'crashtest-e10s': alias_prefix('crashtest-e10s'),
--    'e10s': alias_contains('e10s'),
--    'firefox-ui-functional': alias_prefix('firefox-ui-functional'),
--    'firefox-ui-functional-e10s': alias_prefix('firefox-ui-functional-e10s'),
--    'gaia-js-integration': alias_contains('gaia-js-integration'),
--    'gtest': alias_prefix('gtest'),
--    'jittest': alias_prefix('jittest'),
--    'jittests': alias_prefix('jittest'),
--    'jsreftest': alias_prefix('jsreftest'),
--    'jsreftest-e10s': alias_prefix('jsreftest-e10s'),
--    'marionette': alias_prefix('marionette'),
--    'marionette-e10s': alias_prefix('marionette-e10s'),
--    'mochitest': alias_prefix('mochitest'),
--    'mochitests': alias_prefix('mochitest'),
--    'mochitest-e10s': alias_prefix('mochitest-e10s'),
--    'mochitests-e10s': alias_prefix('mochitest-e10s'),
--    'mochitest-debug': alias_prefix('mochitest-debug-'),
--    'mochitest-a11y': alias_contains('mochitest-a11y'),
--    'mochitest-bc': alias_prefix('mochitest-browser-chrome'),
--    'mochitest-e10s-bc': alias_prefix('mochitest-browser-chrome-e10s'),
--    'mochitest-browser-chrome': alias_prefix('mochitest-browser-chrome'),
--    'mochitest-e10s-browser-chrome': alias_prefix('mochitest-browser-chrome-e10s'),
--    'mochitest-chrome': alias_contains('mochitest-chrome'),
--    'mochitest-dt': alias_prefix('mochitest-devtools-chrome'),
--    'mochitest-e10s-dt': alias_prefix('mochitest-devtools-chrome-e10s'),
--    'mochitest-gl': alias_prefix('mochitest-webgl'),
--    'mochitest-gl-e10s': alias_prefix('mochitest-webgl-e10s'),
--    'mochitest-gpu': alias_prefix('mochitest-gpu'),
--    'mochitest-gpu-e10s': alias_prefix('mochitest-gpu-e10s'),
--    'mochitest-clipboard': alias_prefix('mochitest-clipboard'),
--    'mochitest-clipboard-e10s': alias_prefix('mochitest-clipboard-e10s'),
--    'mochitest-jetpack': alias_prefix('mochitest-jetpack'),
--    'mochitest-media': alias_prefix('mochitest-media'),
--    'mochitest-media-e10s': alias_prefix('mochitest-media-e10s'),
--    'mochitest-vg': alias_prefix('mochitest-valgrind'),
--    'reftest': alias_matches(r'^(plain-)?reftest.*$'),
--    'reftest-no-accel': alias_matches(r'^(plain-)?reftest-no-accel.*$'),
--    'reftests': alias_matches(r'^(plain-)?reftest.*$'),
--    'reftests-e10s': alias_matches(r'^(plain-)?reftest-e10s.*$'),
--    'reftest-stylo': alias_matches(r'^(plain-)?reftest-stylo.*$'),
--    'reftest-gpu': alias_matches(r'^(plain-)?reftest-gpu.*$'),
--    'robocop': alias_prefix('robocop'),
--    'web-platform-test': alias_prefix('web-platform-tests'),
--    'web-platform-tests': alias_prefix('web-platform-tests'),
--    'web-platform-tests-e10s': alias_prefix('web-platform-tests-e10s'),
--    'web-platform-tests-reftests': alias_prefix('web-platform-tests-reftests'),
--    'web-platform-tests-reftests-e10s': alias_prefix('web-platform-tests-reftests-e10s'),
--    'xpcshell': alias_prefix('xpcshell'),
--}
--
--# unittest platforms can be specified by substring of the "pretty name", which
--# is basically the old Buildbot builder name.  This dict has {pretty name,
--# [test_platforms]} translations, This includes only the most commonly-used
--# substrings.  This is intended only for backward-compatibility.  New test
--# platforms should have their `test_platform` spelled out fully in try syntax.
--# Note that the test platforms here are only the prefix up to the `/`.
--UNITTEST_PLATFORM_PRETTY_NAMES = {
--    'Ubuntu': ['linux32', 'linux64', 'linux64-asan'],
--    'x64': ['linux64', 'linux64-asan'],
--    'Android 4.3': ['android-4.3-arm7-api-16'],
--    '10.10': ['macosx64'],
--    # other commonly-used substrings for platforms not yet supported with
--    # in-tree taskgraphs:
--    # '10.10.5': [..TODO..],
--    # '10.6': [..TODO..],
--    # '10.8': [..TODO..],
--    # 'Android 2.3 API9': [..TODO..],
--    'Windows 7':  ['windows7-32'],
--    'Windows 7 VM':  ['windows7-32-vm'],
--    'Windows 8':  ['windows8-64'],
--    'Windows 10':  ['windows10-64'],
--    # 'Windows XP': [..TODO..],
--    # 'win32': [..TODO..],
--    # 'win64': [..TODO..],
--}
--
--# We have a few platforms for which we want to do some "extra" builds, or at
--# least build-ish things.  Sort of.  Anyway, these other things are implemented
--# as different "platforms".  These do *not* automatically ride along with "-p
--# all"
--RIDEALONG_BUILDS = {
--    'android-api-16': [
--        'android-api-16-l10n',
--    ],
--    'linux': [
--        'linux-l10n',
--    ],
--    'linux64': [
--        'linux64-l10n',
--        'sm-plain',
--        'sm-nonunified',
--        'sm-arm-sim',
--        'sm-arm64-sim',
--        'sm-compacting',
--        'sm-rootanalysis',
--        'sm-package',
--        'sm-tsan',
--        'sm-asan',
--        'sm-mozjs-sys',
--        'sm-msan',
--        'sm-fuzzing',
--    ],
--}
--
--TEST_CHUNK_SUFFIX = re.compile('(.*)-([0-9]+)$')
--
--
--def escape_whitespace_in_brackets(input_str):
--    '''
--    In tests you may restrict them by platform [] inside of the brackets
--    whitespace may occur this is typically invalid shell syntax so we escape it
--    with backslash sequences    .
--    '''
--    result = ""
--    in_brackets = False
--    for char in input_str:
--        if char == '[':
--            in_brackets = True
--            result += char
--            continue
--
--        if char == ']':
--            in_brackets = False
--            result += char
--            continue
--
--        if char == ' ' and in_brackets:
--            result += '\ '
--            continue
--
--        result += char
--
--    return result
--
--
--def split_try_msg(message):
--    try:
--        try_idx = message.index('try:')
--    except ValueError:
--        return []
--    message = message[try_idx:].split('\n')[0]
--    # shlex used to ensure we split correctly when giving values to argparse.
--    return shlex.split(escape_whitespace_in_brackets(message))
--
--
--def parse_message(message):
--    parts = split_try_msg(message)
--
--    # Argument parser based on try flag flags
--    parser = argparse.ArgumentParser()
--    parser.add_argument('-b', '--build', dest='build_types')
--    parser.add_argument('-p', '--platform', nargs='?',
--                        dest='platforms', const='all', default='all')
--    parser.add_argument('-u', '--unittests', nargs='?',
--                        dest='unittests', const='all', default='all')
--    parser.add_argument('-t', '--talos', nargs='?', dest='talos', const='all', default='none')
--    parser.add_argument('-i', '--interactive',
--                        dest='interactive', action='store_true', default=False)
--    parser.add_argument('-e', '--all-emails',
--                        dest='notifications', action='store_const', const='all')
--    parser.add_argument('-f', '--failure-emails',
--                        dest='notifications', action='store_const', const='failure')
--    parser.add_argument('-j', '--job', dest='jobs', action='append')
--    parser.add_argument('--rebuild-talos', dest='talos_trigger_tests', action='store',
--                        type=int, default=1)
--    parser.add_argument('--setenv', dest='env', action='append')
--    parser.add_argument('--geckoProfile', dest='profile', action='store_true')
--    parser.add_argument('--tag', dest='tag', action='store', default=None)
--    parser.add_argument('--no-retry', dest='no_retry', action='store_true')
--    parser.add_argument('--include-nightly', dest='include_nightly', action='store_true')
--
--    # While we are transitioning from BB to TC, we want to push jobs to tc-worker
--    # machines but not overload machines with every try push. Therefore, we add
--    # this temporary option to be able to push jobs to tc-worker.
--    parser.add_argument('-w', '--taskcluster-worker',
--                        dest='taskcluster_worker', action='store_true', default=False)
--
--    # In order to run test jobs multiple times
--    parser.add_argument('--rebuild', dest='trigger_tests', type=int, default=1)
--    args, _ = parser.parse_known_args(parts)
--    return args
--
--
--class TryOptionSyntax(object):
--
--    def __init__(self, message, full_task_graph):
--        """
--        Parse a "try syntax" formatted commit message.  This is the old "-b do -p
--        win32 -u all" format.  Aliases are applied to map short names to full
--        names.
--
--        The resulting object has attributes:
--
--        - build_types: a list containing zero or more of 'opt' and 'debug'
--        - platforms: a list of selected platform names, or None for all
--        - unittests: a list of tests, of the form given below, or None for all
--        - jobs: a list of requested job names, or None for all
--        - trigger_tests: the number of times tests should be triggered (--rebuild)
--        - interactive: true if --interactive
--        - notifications: either None if no notifications or one of 'all' or 'failure'
--        - talos_trigger_tests: the number of time talos tests should be triggered (--rebuild-talos)
--        - env: additional environment variables (ENV=value)
--        - profile: run talos in profile mode
--        - tag: restrict tests to the specified tag
--        - no_retry: do not retry failed jobs
--
--        The unittests and talos lists contain dictionaries of the form:
--
--        {
--            'test': '<suite name>',
--            'platforms': [..platform names..], # to limit to only certain platforms
--            'only_chunks': set([..chunk numbers..]), # to limit only to certain chunks
--        }
--        """
--        self.jobs = []
--        self.build_types = []
--        self.platforms = []
--        self.unittests = []
--        self.talos = []
--        self.trigger_tests = 0
--        self.interactive = False
--        self.notifications = None
--        self.talos_trigger_tests = 0
--        self.env = []
--        self.profile = False
--        self.tag = None
--        self.no_retry = False
--
--        parts = split_try_msg(message)
--        if not parts:
--            return None
--
--        args = parse_message(message)
--        assert args is not None
--
--        self.jobs = self.parse_jobs(args.jobs)
--        self.build_types = self.parse_build_types(args.build_types, full_task_graph)
--        self.platforms = self.parse_platforms(args.platforms, full_task_graph)
--        self.unittests = self.parse_test_option(
--            "unittest_try_name", args.unittests, full_task_graph)
--        self.talos = self.parse_test_option("talos_try_name", args.talos, full_task_graph)
--        self.trigger_tests = args.trigger_tests
--        self.interactive = args.interactive
--        self.notifications = args.notifications
--        self.talos_trigger_tests = args.talos_trigger_tests
--        self.env = args.env
--        self.profile = args.profile
--        self.tag = args.tag
--        self.no_retry = args.no_retry
--        self.include_nightly = args.include_nightly
--
--    def parse_jobs(self, jobs_arg):
--        if not jobs_arg or jobs_arg == ['all']:
--            return None
--        expanded = []
--        for job in jobs_arg:
--            expanded.extend(j.strip() for j in job.split(','))
--        return expanded
--
--    def parse_build_types(self, build_types_arg, full_task_graph):
--        if build_types_arg is None:
--            build_types_arg = []
--
--        build_types = filter(None, [BUILD_TYPE_ALIASES.get(build_type) for
--                             build_type in build_types_arg])
--
--        all_types = set(t.attributes['build_type']
--                        for t in full_task_graph.tasks.itervalues()
--                        if 'build_type' in t.attributes)
--        bad_types = set(build_types) - all_types
--        if bad_types:
--            raise Exception("Unknown build type(s) [%s] specified for try" % ','.join(bad_types))
--
--        return build_types
--
--    def parse_platforms(self, platform_arg, full_task_graph):
--        if platform_arg == 'all':
--            return None
--
--        results = []
--        for build in platform_arg.split(','):
--            results.append(build)
--            if build in RIDEALONG_BUILDS:
--                results.extend(RIDEALONG_BUILDS[build])
--                logger.info("platform %s triggers ridealong builds %s" %
--                            (build, ', '.join(RIDEALONG_BUILDS[build])))
--
--        test_platforms = set(t.attributes['test_platform']
--                             for t in full_task_graph.tasks.itervalues()
--                             if 'test_platform' in t.attributes)
--        build_platforms = set(t.attributes['build_platform']
--                              for t in full_task_graph.tasks.itervalues()
--                              if 'build_platform' in t.attributes)
--        all_platforms = test_platforms | build_platforms
--        bad_platforms = set(results) - all_platforms
--        if bad_platforms:
--            raise Exception("Unknown platform(s) [%s] specified for try" % ','.join(bad_platforms))
--
--        return results
--
--    def parse_test_option(self, attr_name, test_arg, full_task_graph):
--        '''
--
--        Parse a unittest (-u) or talos (-t) option, in the context of a full
--        task graph containing available `unittest_try_name` or `talos_try_name`
--        attributes.  There are three cases:
--
--            - test_arg is == 'none' (meaning an empty list)
--            - test_arg is == 'all' (meaning use the list of jobs for that job type)
--            - test_arg is comma string which needs to be parsed
--        '''
--
--        # Empty job list case...
--        if test_arg is None or test_arg == 'none':
--            return []
--
--        all_platforms = set(t.attributes['test_platform'].split('/')[0]
--                            for t in full_task_graph.tasks.itervalues()
--                            if 'test_platform' in t.attributes)
--
--        tests = self.parse_test_opts(test_arg, all_platforms)
--
--        if not tests:
--            return []
--
--        all_tests = set(t.attributes[attr_name]
--                        for t in full_task_graph.tasks.itervalues()
--                        if attr_name in t.attributes)
--
--        # Special case where tests is 'all' and must be expanded
--        if tests[0]['test'] == 'all':
--            results = []
--            all_entry = tests[0]
--            for test in all_tests:
--                entry = {'test': test}
--                # If there are platform restrictions copy them across the list.
--                if 'platforms' in all_entry:
--                    entry['platforms'] = list(all_entry['platforms'])
--                results.append(entry)
--            return self.parse_test_chunks(all_tests, results)
--        else:
--            return self.parse_test_chunks(all_tests, tests)
--
--    def parse_test_opts(self, input_str, all_platforms):
--        '''
--        Parse `testspec,testspec,..`, where each testspec is a test name
--        optionally followed by a list of test platforms or negated platforms in
--        `[]`.
--
--        No brackets indicates that tests should run on all platforms for which
--        builds are available.  If testspecs are provided, then each is treated,
--        from left to right, as an instruction to include or (if negated)
--        exclude a set of test platforms.  A single spec may expand to multiple
--        test platforms via UNITTEST_PLATFORM_PRETTY_NAMES.  If the first test
--        spec is negated, processing begins with the full set of available test
--        platforms; otherwise, processing begins with an empty set of test
--        platforms.
--        '''
--
--        # Final results which we will return.
--        tests = []
--
--        cur_test = {}
--        token = ''
--        in_platforms = False
--
--        def normalize_platforms():
--            if 'platforms' not in cur_test:
--                return
--            # if the first spec is a negation, start with all platforms
--            if cur_test['platforms'][0][0] == '-':
--                platforms = all_platforms.copy()
--            else:
--                platforms = []
--            for platform in cur_test['platforms']:
--                if platform[0] == '-':
--                    platforms = [p for p in platforms if p != platform[1:]]
--                else:
--                    platforms.append(platform)
--            cur_test['platforms'] = platforms
--
--        def add_test(value):
--            normalize_platforms()
--            cur_test['test'] = value.strip()
--            tests.insert(0, cur_test)
--
--        def add_platform(value):
--            platform = value.strip()
--            if platform[0] == '-':
--                negated = True
--                platform = platform[1:]
--            else:
--                negated = False
--            platforms = UNITTEST_PLATFORM_PRETTY_NAMES.get(platform, [platform])
--            if negated:
--                platforms = ["-" + p for p in platforms]
--            cur_test['platforms'] = platforms + cur_test.get('platforms', [])
--
--        # This might be somewhat confusing but we parse the string _backwards_ so
--        # there is no ambiguity over what state we are in.
--        for char in reversed(input_str):
--
--            # , indicates exiting a state
--            if char == ',':
--
--                # Exit a particular platform.
--                if in_platforms:
--                    add_platform(token)
--
--                # Exit a particular test.
--                else:
--                    add_test(token)
--                    cur_test = {}
--
--                # Token must always be reset after we exit a state
--                token = ''
--            elif char == '[':
--                # Exiting platform state entering test state.
--                add_platform(token)
--                token = ''
--                in_platforms = False
--            elif char == ']':
--                # Entering platform state.
--                in_platforms = True
--            else:
--                # Accumulator.
--                token = char + token
--
--        # Handle any left over tokens.
--        if token:
--            add_test(token)
--
--        return tests
--
--    def handle_alias(self, test, all_tests):
--        '''
--        Expand a test if its name refers to an alias, returning a list of test
--        dictionaries cloned from the first (to maintain any metadata).
--        '''
--        if test['test'] not in UNITTEST_ALIASES:
--            return [test]
--
--        alias = UNITTEST_ALIASES[test['test']]
--
--        def mktest(name):
--            newtest = copy.deepcopy(test)
--            newtest['test'] = name
--            return newtest
--
--        def exprmatch(alias):
--            return [t for t in all_tests if alias(t)]
--
--        return [mktest(t) for t in exprmatch(alias)]
--
--    def parse_test_chunks(self, all_tests, tests):
--        '''
--        Test flags may include parameters to narrow down the number of chunks in a
--        given push. We don't model 1 chunk = 1 job in taskcluster so we must check
--        each test flag to see if it is actually specifying a chunk.
--        '''
--        results = []
--        seen_chunks = {}
--        for test in tests:
--            matches = TEST_CHUNK_SUFFIX.match(test['test'])
--            if matches:
--                name = matches.group(1)
--                chunk = matches.group(2)
--                if name in seen_chunks:
--                    seen_chunks[name].add(chunk)
--                else:
--                    seen_chunks[name] = {chunk}
--                    test['test'] = name
--                    test['only_chunks'] = seen_chunks[name]
--                    results.append(test)
--            else:
--                results.extend(self.handle_alias(test, all_tests))
--
--        # uniquify the results over the test names
--        results = {test['test']: test for test in results}.values()
--        return results
--
--    def find_all_attribute_suffixes(self, graph, prefix):
--        rv = set()
--        for t in graph.tasks.itervalues():
--            for a in t.attributes:
--                if a.startswith(prefix):
--                    rv.add(a[len(prefix):])
--        return sorted(rv)
--
--    def task_matches(self, task):
--        attr = task.attributes.get
--
--        def check_run_on_projects():
--            if attr('nightly') and not self.include_nightly:
--                return False
--            return set(['try', 'all']) & set(attr('run_on_projects', []))
--
--        def match_test(try_spec, attr_name):
--            run_by_default = True
--            if attr('build_type') not in self.build_types:
--                return False
--            if self.platforms is not None:
--                if attr('build_platform') not in self.platforms:
--                    return False
--            else:
--                if not check_run_on_projects():
--                    run_by_default = False
--            if try_spec is None:
--                return run_by_default
--            # TODO: optimize this search a bit
--            for test in try_spec:
--                if attr(attr_name) == test['test']:
--                    break
--            else:
--                return False
--            if 'only_chunks' in test and attr('test_chunk') not in test['only_chunks']:
--                return False
--            if 'platforms' in test:
--                platform = attr('test_platform', '').split('/')[0]
--                # Platforms can be forced by syntax like "-u xpcshell[Windows 8]"
--                return platform in test['platforms']
--            elif run_by_default:
--                return check_run_on_projects()
--            else:
--                return False
--
--        job_try_name = attr('job_try_name')
--        if job_try_name:
--            # Beware the subtle distinction between [] and None for self.jobs and self.platforms.
--            # They will be [] if there was no try syntax, and None if try syntax was detected but
--            # they remained unspecified.
--            if self.jobs and job_try_name not in self.jobs:
--                return False
--            elif not self.jobs and 'build' in task.dependencies:
--                # We exclude tasks with build dependencies from the default set of jobs because
--                # they will schedule their builds even if they end up optimized away. This means
--                # to run these tasks on try, they'll need to be explicitly specified by -j until
--                # we find a better solution (see bug 1372510).
--                return False
--            elif not self.jobs and attr('build_platform'):
--                if self.platforms is None or attr('build_platform') in self.platforms:
--                    return True
--                return False
--            return True
--        elif attr('kind') == 'test':
--            return match_test(self.unittests, 'unittest_try_name') \
--                 or match_test(self.talos, 'talos_try_name')
--        elif attr('kind') in BUILD_KINDS:
--            if attr('build_type') not in self.build_types:
--                return False
--            elif self.platforms is None:
--                # for "-p all", look for try in the 'run_on_projects' attribute
--                return check_run_on_projects()
--            else:
--                if attr('build_platform') not in self.platforms:
--                    return False
--            return True
--        else:
--            return False
--
--    def __str__(self):
--        def none_for_all(list):
--            if list is None:
--                return '<all>'
--            return ', '.join(str(e) for e in list)
--
--        return "\n".join([
--            "build_types: " + ", ".join(self.build_types),
--            "platforms: " + none_for_all(self.platforms),
--            "unittests: " + none_for_all(self.unittests),
--            "talos: " + none_for_all(self.talos),
--            "jobs: " + none_for_all(self.jobs),
--            "trigger_tests: " + str(self.trigger_tests),
--            "interactive: " + str(self.interactive),
--            "notifications: " + str(self.notifications),
--            "talos_trigger_tests: " + str(self.talos_trigger_tests),
--            "env: " + str(self.env),
--            "profile: " + str(self.profile),
--            "tag: " + str(self.tag),
--            "no_retry: " + str(self.no_retry),
--        ])
-diff --git a/taskcluster/taskgraph/util/__init__.py b/taskcluster/taskgraph/util/__init__.py
-deleted file mode 100644
-diff --git a/taskcluster/taskgraph/util/attributes.py b/taskcluster/taskgraph/util/attributes.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/attributes.py
-+++ /dev/null
-@@ -1,99 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import re
--
--
--INTEGRATION_PROJECTS = {
--    'mozilla-inbound',
--    'autoland',
--}
--
--TRUNK_PROJECTS = INTEGRATION_PROJECTS | {'mozilla-central', }
--
--RELEASE_PROJECTS = {
--    'mozilla-central',
--    'mozilla-aurora',
--    'mozilla-beta',
--    'mozilla-release',
--}
--
--_OPTIONAL_ATTRIBUTES = ('nightly', 'signed')
--
--
--def attrmatch(attributes, **kwargs):
--    """Determine whether the given set of task attributes matches.  The
--    conditions are given as keyword arguments, where each keyword names an
--    attribute.  The keyword value can be a literal, a set, or a callable.  A
--    literal must match the attribute exactly.  Given a set, the attribute value
--    must be in the set.  A callable is called with the attribute value.  If an
--    attribute is specified as a keyword argument but not present in the
--    attributes, the result is False."""
--    for kwkey, kwval in kwargs.iteritems():
--        if kwkey not in attributes:
--            return False
--        attval = attributes[kwkey]
--        if isinstance(kwval, set):
--            if attval not in kwval:
--                return False
--        elif callable(kwval):
--            if not kwval(attval):
--                return False
--        elif kwval != attributes[kwkey]:
--            return False
--    return True
--
--
--def keymatch(attributes, target):
--    """Determine if any keys in attributes are a match to target, then return
--    a list of matching values. First exact matches will be checked. Failing
--    that, regex matches and finally a default key.
--    """
--    # exact match
--    if target in attributes:
--        return [attributes[target]]
--
--    # regular expression match
--    matches = [v for k, v in attributes.iteritems() if re.match(k + '$', target)]
--    if matches:
--        return matches
--
--    # default
--    if 'default' in attributes:
--        return [attributes['default']]
--
--    return []
--
--
--def match_run_on_projects(project, run_on_projects):
--    """Determine whether the given project is included in the `run-on-projects`
--    parameter, applying expansions for things like "integration" mentioned in
--    the attribute documentation."""
--    if 'all' in run_on_projects:
--        return True
--    if 'integration' in run_on_projects:
--        if project in INTEGRATION_PROJECTS:
--            return True
--    if 'release' in run_on_projects:
--        if project in RELEASE_PROJECTS:
--            return True
--    if 'trunk' in run_on_projects:
--        if project in TRUNK_PROJECTS:
--            return True
--
--    return project in run_on_projects
--
--
--def copy_attributes_from_dependent_job(dep_job):
--    attributes = {
--        'build_platform': dep_job.attributes.get('build_platform'),
--        'build_type': dep_job.attributes.get('build_type'),
--    }
--
--    attributes.update({
--        attr: dep_job.attributes[attr]
--        for attr in _OPTIONAL_ATTRIBUTES if attr in dep_job.attributes
--    })
--
--    return attributes
-diff --git a/taskcluster/taskgraph/util/docker.py b/taskcluster/taskgraph/util/docker.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/docker.py
-+++ /dev/null
-@@ -1,165 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import hashlib
--import os
--import shutil
--import subprocess
--import tarfile
--import tempfile
--
--from mozpack.archive import (
--    create_tar_gz_from_files,
--)
--from .. import GECKO
--
--
--IMAGE_DIR = os.path.join(GECKO, 'taskcluster', 'docker')
--INDEX_PREFIX = 'docker.images.v2'
--
--
--def docker_image(name, by_tag=False):
--    '''
--        Resolve in-tree prebuilt docker image to ``<registry>/<repository>@sha256:<digest>``,
--        or ``<registry>/<repository>:<tag>`` if `by_tag` is `True`.
--    '''
--    try:
--        with open(os.path.join(IMAGE_DIR, name, 'REGISTRY')) as f:
--            registry = f.read().strip()
--    except IOError:
--        with open(os.path.join(IMAGE_DIR, 'REGISTRY')) as f:
--            registry = f.read().strip()
--
--    if not by_tag:
--        hashfile = os.path.join(IMAGE_DIR, name, 'HASH')
--        try:
--            with open(hashfile) as f:
--                return '{}/{}@{}'.format(registry, name, f.read().strip())
--        except IOError:
--            raise Exception('Failed to read HASH file {}'.format(hashfile))
--
--    try:
--        with open(os.path.join(IMAGE_DIR, name, 'VERSION')) as f:
--            tag = f.read().strip()
--    except IOError:
--        tag = 'latest'
--    return '{}/{}:{}'.format(registry, name, tag)
--
--
--def generate_context_hash(topsrcdir, image_path, image_name):
--    """Generates a sha256 hash for context directory used to build an image."""
--
--    # It is a bit unfortunate we have to create a temp file here - it would
--    # be nicer to use an in-memory buffer.
--    fd, p = tempfile.mkstemp()
--    os.close(fd)
--    try:
--        return create_context_tar(topsrcdir, image_path, p, image_name)
--    finally:
--        os.unlink(p)
--
--
--def create_context_tar(topsrcdir, context_dir, out_path, prefix):
--    """Create a context tarball.
--
--    A directory ``context_dir`` containing a Dockerfile will be assembled into
--    a gzipped tar file at ``out_path``. Files inside the archive will be
--    prefixed by directory ``prefix``.
--
--    We also scan the source Dockerfile for special syntax that influences
--    context generation.
--
--    If a line in the Dockerfile has the form ``# %include <path>``,
--    the relative path specified on that line will be matched against
--    files in the source repository and added to the context under the
--    path ``topsrcdir/``. If an entry is a directory, we add all files
--    under that directory.
--
--    Returns the SHA-256 hex digest of the created archive.
--    """
--    archive_files = {}
--
--    for root, dirs, files in os.walk(context_dir):
--        for f in files:
--            source_path = os.path.join(root, f)
--            rel = source_path[len(context_dir) + 1:]
--            archive_path = os.path.join(prefix, rel)
--            archive_files[archive_path] = source_path
--
--    # Parse Dockerfile for special syntax of extra files to include.
--    with open(os.path.join(context_dir, 'Dockerfile'), 'rb') as fh:
--        for line in fh:
--            line = line.rstrip()
--            if not line.startswith('# %include'):
--                continue
--
--            p = line[len('# %include '):].strip()
--            if os.path.isabs(p):
--                raise Exception('extra include path cannot be absolute: %s' % p)
--
--            fs_path = os.path.normpath(os.path.join(topsrcdir, p))
--            # Check for filesystem traversal exploits.
--            if not fs_path.startswith(topsrcdir):
--                raise Exception('extra include path outside topsrcdir: %s' % p)
--
--            if not os.path.exists(fs_path):
--                raise Exception('extra include path does not exist: %s' % p)
--
--            if os.path.isdir(fs_path):
--                for root, dirs, files in os.walk(fs_path):
--                    for f in files:
--                        source_path = os.path.join(root, f)
--                        archive_path = os.path.join(prefix, 'topsrcdir', p, f)
--                        archive_files[archive_path] = source_path
--            else:
--                archive_path = os.path.join(prefix, 'topsrcdir', p)
--                archive_files[archive_path] = fs_path
--
--    with open(out_path, 'wb') as fh:
--        create_tar_gz_from_files(fh, archive_files, '%s.tar.gz' % prefix)
--
--    h = hashlib.sha256()
--    with open(out_path, 'rb') as fh:
--        while True:
--            data = fh.read(32768)
--            if not data:
--                break
--            h.update(data)
--    return h.hexdigest()
--
--
--def build_from_context(docker_bin, context_path, prefix, tag=None):
--    """Build a Docker image from a context archive.
--
--    Given the path to a `docker` binary, a image build tar.gz (produced with
--    ``create_context_tar()``, a prefix in that context containing files, and
--    an optional ``tag`` for the produced image, build that Docker image.
--    """
--    d = tempfile.mkdtemp()
--    try:
--        with tarfile.open(context_path, 'r:gz') as tf:
--            tf.extractall(d)
--
--        # If we wanted to do post-processing of the Dockerfile, this is
--        # where we'd do it.
--
--        args = [
--            docker_bin,
--            'build',
--            # Use --no-cache so we always get the latest package updates.
--            '--no-cache',
--        ]
--
--        if tag:
--            args.extend(['-t', tag])
--
--        args.append('.')
--
--        res = subprocess.call(args, cwd=os.path.join(d, prefix))
--        if res:
--            raise Exception('error building image')
--    finally:
--        shutil.rmtree(d)
-diff --git a/taskcluster/taskgraph/util/hash.py b/taskcluster/taskgraph/util/hash.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/hash.py
-+++ /dev/null
-@@ -1,37 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--from mozbuild.util import memoize
--from mozpack.files import FileFinder
--import mozpack.path as mozpath
--import hashlib
--
--
--@memoize
--def _hash_path(path):
--    with open(path) as fh:
--        return hashlib.sha256(fh.read()).hexdigest()
--
--
--def hash_paths(base_path, patterns):
--    """
--    Give a list of path patterns, return a digest of the contents of all
--    the corresponding files, similarly to git tree objects or mercurial
--    manifests.
--
--    Each file is hashed. The list of all hashes and file paths is then
--    itself hashed to produce the result.
--    """
--    finder = FileFinder(base_path)
--    h = hashlib.sha256()
--    files = {}
--    for pattern in patterns:
--        files.update(finder.find(pattern))
--    for path in sorted(files.keys()):
--        h.update('{} {}\n'.format(
--            _hash_path(mozpath.abspath(mozpath.join(base_path, path))),
--            mozpath.normsep(path)
--        ))
--    return h.hexdigest()
-diff --git a/taskcluster/taskgraph/util/parameterization.py b/taskcluster/taskgraph/util/parameterization.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/parameterization.py
-+++ /dev/null
-@@ -1,48 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import re
--
--from taskgraph.util.time import json_time_from_now
--
--TASK_REFERENCE_PATTERN = re.compile('<([^>]+)>')
--
--
--def _recurse(val, param_name, param_fn):
--    param_keys = [param_name]
--
--    def recurse(val):
--        if isinstance(val, list):
--            return [recurse(v) for v in val]
--        elif isinstance(val, dict):
--            if val.keys() == param_keys:
--                return param_fn(val[param_name])
--            else:
--                return {k: recurse(v) for k, v in val.iteritems()}
--        else:
--            return val
--    return recurse(val)
--
--
--def resolve_timestamps(now, task_def):
--    """Resolve all instances of `{'relative-datestamp': '..'}` in the given task definition"""
--    return _recurse(task_def, 'relative-datestamp', lambda v: json_time_from_now(v, now))
--
--
--def resolve_task_references(label, task_def, dependencies):
--    """Resolve all instances of `{'task-reference': '..<..>..'}` in the given task
--    definition, using the given dependencies"""
--    def repl(match):
--        key = match.group(1)
--        try:
--            return dependencies[key]
--        except KeyError:
--            # handle escaping '<'
--            if key == '<':
--                return key
--            raise KeyError("task '{}' has no dependency named '{}'".format(label, key))
--
--    return _recurse(task_def, 'task-reference', lambda v: TASK_REFERENCE_PATTERN.sub(repl, v))
-diff --git a/taskcluster/taskgraph/util/push_apk.py b/taskcluster/taskgraph/util/push_apk.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/push_apk.py
-+++ /dev/null
-@@ -1,70 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Common functions for both push-apk and push-apk-breakpoint.
--"""
--
--import re
--
--from taskgraph.util.schema import validate_schema
--
--REQUIRED_ARCHITECTURES = {
--    'android-x86-nightly',
--    'android-api-16-nightly',
--}
--PLATFORM_REGEX = re.compile(r'signing-android-(\S+)-nightly')
--
--
--def fill_labels_tranform(_, jobs):
--    for job in jobs:
--        job['label'] = job['name']
--
--        yield job
--
--
--def validate_jobs_schema_transform_partial(description_schema, transform_type, config, jobs):
--    for job in jobs:
--        label = job.get('label', '?no-label?')
--        yield validate_schema(
--            description_schema, job,
--            "In {} ({!r} kind) task for {!r}:".format(transform_type, config.kind, label)
--        )
--
--
--def validate_dependent_tasks_transform(_, jobs):
--    for job in jobs:
--        check_every_architecture_is_present_in_dependent_tasks(job['dependent-tasks'])
--        yield job
--
--
--def check_every_architecture_is_present_in_dependent_tasks(dependent_tasks):
--    dep_platforms = set(t.attributes.get('build_platform') for t in dependent_tasks)
--    missed_architectures = REQUIRED_ARCHITECTURES - dep_platforms
--    if missed_architectures:
--        raise Exception('''One or many required architectures are missing.
--
--Required architectures: {}.
--Given dependencies: {}.
--'''.format(REQUIRED_ARCHITECTURES, dependent_tasks)
--        )
--
--
--def delete_non_required_fields_transform(_, jobs):
--    for job in jobs:
--        del job['name']
--        del job['dependent-tasks']
--
--        yield job
--
--
--def generate_dependencies(dependent_tasks):
--    # Because we depend on several tasks that have the same kind, we introduce the platform
--    dependencies = {}
--    for task in dependent_tasks:
--        platform_match = PLATFORM_REGEX.match(task.label)
--        # platform_match is None when the breakpoint task is given
--        task_kind = task.kind if platform_match is None else \
--            '{}-{}'.format(task.kind, platform_match.group(1))
--        dependencies[task_kind] = task.label
--    return dependencies
-diff --git a/taskcluster/taskgraph/util/python_path.py b/taskcluster/taskgraph/util/python_path.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/python_path.py
-+++ /dev/null
-@@ -1,27 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--
--def find_object(path):
--    """
--    Find a Python object given a path of the form <modulepath>:<objectpath>.
--    Conceptually equivalent to
--
--        def find_object(modulepath, objectpath):
--            import <modulepath> as mod
--            return mod.<objectpath>
--    """
--    if path.count(':') != 1:
--        raise ValueError(
--            'python path {!r} does not have the form "module:object"'.format(path))
--
--    modulepath, objectpath = path.split(':')
--    obj = __import__(modulepath)
--    for a in modulepath.split('.')[1:]:
--        obj = getattr(obj, a)
--    for a in objectpath.split('.'):
--        obj = getattr(obj, a)
--    return obj
-diff --git a/taskcluster/taskgraph/util/schema.py b/taskcluster/taskgraph/util/schema.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/schema.py
-+++ /dev/null
-@@ -1,176 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import re
--import copy
--import pprint
--import collections
--import voluptuous
--
--from .attributes import keymatch
--
--
--def validate_schema(schema, obj, msg_prefix):
--    """
--    Validate that object satisfies schema.  If not, generate a useful exception
--    beginning with msg_prefix.
--    """
--    try:
--        # deep copy the result since it may include mutable defaults
--        return copy.deepcopy(schema(obj))
--    except voluptuous.MultipleInvalid as exc:
--        msg = [msg_prefix]
--        for error in exc.errors:
--            msg.append(str(error))
--        raise Exception('\n'.join(msg) + '\n' + pprint.pformat(obj))
--
--
--def optionally_keyed_by(*arguments):
--    """
--    Mark a schema value as optionally keyed by any of a number of fields.  The
--    schema is the last argument, and the remaining fields are taken to be the
--    field names.  For example:
--
--        'some-value': optionally_keyed_by(
--            'test-platform', 'build-platform',
--            Any('a', 'b', 'c'))
--
--    The resulting schema will allow nesting of `by-test-platform` and
--    `by-build-platform` in either order.
--    """
--    schema = arguments[-1]
--    fields = arguments[:-1]
--
--    # build the nestable schema by generating schema = Any(schema,
--    # by-fld1, by-fld2, by-fld3) once for each field.  So we don't allow
--    # infinite nesting, but one level of nesting for each field.
--    for _ in arguments:
--        options = [schema]
--        for field in fields:
--            options.append({'by-' + field: {basestring: schema}})
--        schema = voluptuous.Any(*options)
--    return schema
--
--
--def resolve_keyed_by(item, field, item_name, **extra_values):
--    """
--    For values which can either accept a literal value, or be keyed by some
--    other attribute of the item, perform that lookup and replacement in-place
--    (modifying `item` directly).  The field is specified using dotted notation
--    to traverse dictionaries.
--
--    For example, given item::
--
--        job:
--            test-platform: linux128
--            chunks:
--                by-test-platform:
--                    macosx-10.11/debug: 13
--                    win.*: 6
--                    default: 12
--
--    a call to `resolve_keyed_by(item, 'job.chunks', item['thing-name'])
--    would mutate item in-place to::
--
--        job:
--            chunks: 12
--
--    The `item_name` parameter is used to generate useful error messages.
--
--    If extra_values are supplied, they represent additional values available
--    for reference from by-<field>.
--
--    Items can be nested as deeply as the schema will allow::
--
--        chunks:
--            by-test-platform:
--                win.*:
--                    by-project:
--                        ash: ..
--                        cedar: ..
--                linux: 13
--                default: 12
--    """
--    # find the field, returning the item unchanged if anything goes wrong
--    container, subfield = item, field
--    while '.' in subfield:
--        f, subfield = subfield.split('.', 1)
--        if f not in container:
--            return item
--        container = container[f]
--        if not isinstance(container, dict):
--            return item
--
--    if subfield not in container:
--        return item
--    value = container[subfield]
--    while True:
--        if not isinstance(value, dict) or len(value) != 1 or not value.keys()[0].startswith('by-'):
--            return item
--
--        keyed_by = value.keys()[0][3:]  # strip off 'by-' prefix
--        key = extra_values.get(keyed_by) if keyed_by in extra_values else item[keyed_by]
--        alternatives = value.values()[0]
--
--        matches = keymatch(alternatives, key)
--        if len(matches) > 1:
--            raise Exception(
--                "Multiple matching values for {} {!r} found while "
--                "determining item {} in {}".format(
--                    keyed_by, key, field, item_name))
--        elif matches:
--            value = container[subfield] = matches[0]
--            continue
--
--        raise Exception(
--            "No {} matching {!r} nor 'default' found while determining item {} in {}".format(
--                keyed_by, key, field, item_name))
--
--
--# Schemas for YAML files should use dashed identifiers by default.  If there are
--# components of the schema for which there is a good reason to use another format,
--# they can be whitelisted here.
--WHITELISTED_SCHEMA_IDENTIFIERS = [
--    # upstream-artifacts are handed directly to scriptWorker, which expects interCaps
--    lambda path: "[u'upstream-artifacts']" in path,
--]
--
--
--def check_schema(schema):
--    identifier_re = re.compile('^[a-z][a-z0-9-]*$')
--
--    def whitelisted(path):
--        return any(f(path) for f in WHITELISTED_SCHEMA_IDENTIFIERS)
--
--    def iter(path, sch):
--        if isinstance(sch, collections.Mapping):
--            for k, v in sch.iteritems():
--                child = "{}[{!r}]".format(path, k)
--                if isinstance(k, (voluptuous.Optional, voluptuous.Required)):
--                    k = str(k)
--                if isinstance(k, basestring):
--                    if not identifier_re.match(k) and not whitelisted(child):
--                        raise RuntimeError(
--                            'YAML schemas should use dashed lower-case identifiers, '
--                            'not {!r} @ {}'.format(k, child))
--                iter(child, v)
--        elif isinstance(sch, (list, tuple)):
--            for i, v in enumerate(sch):
--                iter("{}[{}]".format(path, i), v)
--        elif isinstance(sch, voluptuous.Any):
--            for v in sch.validators:
--                iter(path, v)
--    iter('schema', schema.schema)
--
--
--def Schema(*args, **kwargs):
--    """
--    Operates identically to voluptuous.Schema, but applying some taskgraph-specific checks
--    in the process.
--    """
--    schema = voluptuous.Schema(*args, **kwargs)
--    check_schema(schema)
--    return schema
-diff --git a/taskcluster/taskgraph/util/scriptworker.py b/taskcluster/taskgraph/util/scriptworker.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/scriptworker.py
-+++ /dev/null
-@@ -1,433 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""Make scriptworker.cot.verify more user friendly by making scopes dynamic.
--
--Scriptworker uses certain scopes to determine which sets of credentials to use.
--Certain scopes are restricted by branch in chain of trust verification, and are
--checked again at the script level.  This file provides functions to adjust
--these scopes automatically by project; this makes pushing to try, forking a
--project branch, and merge day uplifts more user friendly.
--
--In the future, we may adjust scopes by other settings as well, e.g. different
--scopes for `push-to-candidates` rather than `push-to-releases`, even if both
--happen on mozilla-beta and mozilla-release.
--"""
--from __future__ import absolute_import, print_function, unicode_literals
--import functools
--import os
--
--
--# constants {{{1
--GECKO = os.path.realpath(os.path.join(__file__, '..', '..', '..', '..'))
--VERSION_PATH = os.path.join(GECKO, "browser", "config", "version_display.txt")
--
--"""Map signing scope aliases to sets of projects.
--
--Currently m-c and DevEdition on m-b use nightly signing; Beta on m-b and m-r
--use release signing. These data structures aren't set-up to handle different
--scopes on the same repo, so we use a different set of them for DevEdition, and
--callers are responsible for using the correct one (by calling the appropriate
--helper below). More context on this in https://bugzilla.mozilla.org/show_bug.cgi?id=1358601.
--
--We will need to add esr support at some point. Eventually we want to add
--nuance so certain m-b and m-r tasks use dep or nightly signing, and we only
--release sign when we have a signed-off set of candidate builds.  This current
--approach works for now, though.
--
--This is a list of list-pairs, for ordering.
--"""
--SIGNING_SCOPE_ALIAS_TO_PROJECT = [[
--    'all-nightly-branches', set([
--        'mozilla-central',
--    ])
--], [
--    'all-release-branches', set([
--        'mozilla-beta',
--        'mozilla-release',
--    ])
--]]
--
--"""Map the signing scope aliases to the actual scopes.
--"""
--SIGNING_CERT_SCOPES = {
--    'all-release-branches': 'project:releng:signing:cert:release-signing',
--    'all-nightly-branches': 'project:releng:signing:cert:nightly-signing',
--    'default': 'project:releng:signing:cert:dep-signing',
--}
--
--DEVEDITION_SIGNING_SCOPE_ALIAS_TO_PROJECT = [[
--    'beta', set([
--        'mozilla-beta',
--    ])
--]]
--
--DEVEDITION_SIGNING_CERT_SCOPES = {
--    'beta': 'project:releng:signing:cert:nightly-signing',
--    'default': 'project:releng:signing:cert:dep-signing',
--}
--
--"""Map beetmover scope aliases to sets of projects.
--"""
--BEETMOVER_SCOPE_ALIAS_TO_PROJECT = [[
--    'all-nightly-branches', set([
--        'mozilla-central',
--        'mozilla-beta',
--        'mozilla-release',
--    ])
--], [
--    'all-release-branches', set([
--        'mozilla-beta',
--        'mozilla-release',
--    ])
--]]
--
--"""The set of all beetmover release target tasks.
--
--Used for both `BEETMOVER_SCOPE_ALIAS_TO_TARGET_TASK` and `get_release_build_number`
--"""
--BEETMOVER_RELEASE_TARGET_TASKS = set([
--    'candidates_fennec',
--])
--
--"""Map beetmover tasks aliases to sets of target task methods.
--
--This is a list of list-pairs, for ordering.
--"""
--BEETMOVER_SCOPE_ALIAS_TO_TARGET_TASK = [[
--    'all-nightly-tasks', set([
--        'nightly_fennec',
--        'nightly_linux',
--        'nightly_macosx',
--        'nightly_win',
--        'nightly_desktop',
--        'mozilla_beta_tasks',
--        'mozilla_release_tasks',
--    ])
--], [
--    'all-release-tasks', BEETMOVER_RELEASE_TARGET_TASKS
--]]
--
--"""Map the beetmover scope aliases to the actual scopes.
--"""
--BEETMOVER_BUCKET_SCOPES = {
--    'all-release-tasks': {
--        'all-release-branches': 'project:releng:beetmover:bucket:release',
--    },
--    'all-nightly-tasks': {
--        'all-nightly-branches': 'project:releng:beetmover:bucket:nightly',
--    },
--    'default': 'project:releng:beetmover:bucket:dep',
--}
--
--"""Map the beetmover tasks aliases to the actual action scopes.
--"""
--BEETMOVER_ACTION_SCOPES = {
--    'all-release-tasks': 'project:releng:beetmover:action:push-to-candidates',
--    'all-nightly-tasks': 'project:releng:beetmover:action:push-to-nightly',
--    'default': 'project:releng:beetmover:action:push-to-staging',
--}
--
--"""Map balrog scope aliases to sets of projects.
--
--This is a list of list-pairs, for ordering.
--"""
--BALROG_SCOPE_ALIAS_TO_PROJECT = [[
--    'nightly', set([
--        'mozilla-central',
--    ])
--], [
--    'beta', set([
--        'mozilla-beta',
--    ])
--], [
--    'release', set([
--        'mozilla-release',
--    ])
--], [
--    'esr', set([
--        'mozilla-esr52',
--    ])
--]]
--
--"""Map the balrog scope aliases to the actual scopes.
--"""
--BALROG_SERVER_SCOPES = {
--    'nightly': 'project:releng:balrog:server:nightly',
--    'aurora': 'project:releng:balrog:server:aurora',
--    'beta': 'project:releng:balrog:server:beta',
--    'release': 'project:releng:balrog:server:release',
--    'esr': 'project:releng:balrog:server:esr',
--    'default': 'project:releng:balrog:server:dep',
--}
--
--"""Map the balrog scope aliases to the actual channel scopes.
--"""
--BALROG_CHANNEL_SCOPES = {
--    'nightly': [
--        'project:releng:balrog:channel:nightly',
--        'project:releng:balrog:channel:nightly-old-id',
--        'project:releng:balrog:channel:aurora'
--    ],
--    'aurora': [
--        'project:releng:balrog:channel:aurora'
--    ],
--    'beta': [
--        'project:releng:balrog:channel:beta',
--        'project:releng:balrog:channel:beta-localtest',
--        'project:releng:balrog:channel:beta-cdntest'
--    ],
--    'release': [
--        'project:releng:balrog:channel:release',
--        'project:releng:balrog:channel:release-localtest',
--        'project:releng:balrog:channel:release-cdntest'
--    ],
--    'esr': [
--        'project:releng:balrog:channel:esr',
--        'project:releng:balrog:channel:esr-localtest',
--        'project:releng:balrog:channel:esr-cdntest'
--    ],
--    'default': [
--        'project:releng:balrog:channel:nightly',
--        'project:releng:balrog:channel:nightly-old-id',
--        'project:releng:balrog:channel:aurora'
--        'project:releng:balrog:channel:beta',
--        'project:releng:balrog:channel:beta-localtest',
--        'project:releng:balrog:channel:beta-cdntest',
--        'project:releng:balrog:channel:release',
--        'project:releng:balrog:channel:release-localtest',
--        'project:releng:balrog:channel:release-cdntest',
--        'project:releng:balrog:channel:esr',
--        'project:releng:balrog:channel:esr-localtest',
--        'project:releng:balrog:channel:esr-cdntest'
--    ]
--}
--
--
--PUSH_APK_SCOPE_ALIAS_TO_PROJECT = [[
--    'central', set([
--        'mozilla-central',
--    ])
--], [
--    'beta', set([
--        'mozilla-beta',
--    ])
--], [
--    'release', set([
--        'mozilla-release',
--    ])
--]]
--
--
--PUSH_APK_SCOPES = {
--    'central': 'project:releng:googleplay:aurora',
--    'beta': 'project:releng:googleplay:beta',
--    'release': 'project:releng:googleplay:release',
--    'default': 'project:releng:googleplay:invalid',
--}
--
--# See https://github.com/mozilla-releng/pushapkscript#aurora-beta-release-vs-alpha-beta-production
--PUSH_APK_GOOGLE_PLAY_TRACT = {
--    'central': 'beta',
--    'beta': 'production',
--    'release': 'rollout',
--    'default': 'invalid',
--}
--
--PUSH_APK_BREAKPOINT_WORKER_TYPE = {
--    'central': 'aws-provisioner-v1/taskcluster-generic',
--    'beta': 'null-provisioner/human-breakpoint',
--    'release': 'null-provisioner/human-breakpoint',
--    'default': 'invalid/invalid',
--}
--
--PUSH_APK_DRY_RUN_OPTION = {
--    'central': False,
--    'beta': False,
--    'release': False,
--    'default': True,
--}
--
--PUSH_APK_ROLLOUT_PERCENTAGE = {
--    # XXX Please make sure to change PUSH_APK_GOOGLE_PLAY_TRACT to 'rollout' if you add a new
--    # supported project
--    'release': 10,
--    'default': None,
--}
--
--
--# scope functions {{{1
--def get_scope_from_project(alias_to_project_map, alias_to_scope_map, config):
--    """Determine the restricted scope from `config.params['project']`.
--
--    Args:
--        alias_to_project_map (list of lists): each list pair contains the
--            alias and the set of projects that match.  This is ordered.
--        alias_to_scope_map (dict): the alias alias to scope
--        config (dict): the task config that defines the project.
--
--    Returns:
--        string: the scope to use.
--    """
--    for alias, projects in alias_to_project_map:
--        if config.params['project'] in projects and alias in alias_to_scope_map:
--            return alias_to_scope_map[alias]
--    return alias_to_scope_map['default']
--
--
--def get_scope_from_target_method(alias_to_tasks_map, alias_to_scope_map, config):
--    """Determine the restricted scope from `config.params['target_tasks_method']`.
--
--    Args:
--        alias_to_tasks_map (list of lists): each list pair contains the
--            alias and the set of target methods that match. This is ordered.
--        alias_to_scope_map (dict): the alias alias to scope
--        config (dict): the task config that defines the target task method.
--
--    Returns:
--        string: the scope to use.
--    """
--    for alias, tasks in alias_to_tasks_map:
--        if config.params['target_tasks_method'] in tasks and alias in alias_to_scope_map:
--            return alias_to_scope_map[alias]
--    return alias_to_scope_map['default']
--
--
--def get_scope_from_target_method_and_project(alias_to_tasks_map, alias_to_project_map,
--                                             aliases_to_scope_map, config):
--    """Determine the restricted scope from both `target_tasks_method` and `project`.
--
--    On certain branches, we'll need differing restricted scopes based on
--    `target_tasks_method`.  However, we can't key solely on that, since that
--    `target_tasks_method` might be run on an unprivileged branch.  This method
--    checks both.
--
--    Args:
--        alias_to_tasks_map (list of lists): each list pair contains the
--            alias and the set of target methods that match. This is ordered.
--        alias_to_project_map (list of lists): each list pair contains the
--            alias and the set of projects that match.  This is ordered.
--        aliases_to_scope_map (dict of dicts): the task alias to project alias to scope
--        config (dict): the task config that defines the target task method and project.
--
--    Returns:
--        string: the scope to use.
--    """
--    project = config.params['project']
--    target = config.params['target_tasks_method']
--    for alias1, tasks in alias_to_tasks_map:
--        for alias2, projects in alias_to_project_map:
--            if target in tasks and project in projects and \
--                    aliases_to_scope_map.get(alias1, {}).get(alias2):
--                return aliases_to_scope_map[alias1][alias2]
--    return aliases_to_scope_map['default']
--
--
--get_signing_cert_scope = functools.partial(
--    get_scope_from_project,
--    SIGNING_SCOPE_ALIAS_TO_PROJECT,
--    SIGNING_CERT_SCOPES
--)
--
--get_devedition_signing_cert_scope = functools.partial(
--    get_scope_from_project,
--    DEVEDITION_SIGNING_SCOPE_ALIAS_TO_PROJECT,
--    DEVEDITION_SIGNING_CERT_SCOPES
--)
--
--get_beetmover_bucket_scope = functools.partial(
--    get_scope_from_target_method_and_project,
--    BEETMOVER_SCOPE_ALIAS_TO_TARGET_TASK,
--    BEETMOVER_SCOPE_ALIAS_TO_PROJECT,
--    BEETMOVER_BUCKET_SCOPES
--)
--
--get_beetmover_action_scope = functools.partial(
--    get_scope_from_target_method,
--    BEETMOVER_SCOPE_ALIAS_TO_TARGET_TASK,
--    BEETMOVER_ACTION_SCOPES
--)
--
--get_balrog_server_scope = functools.partial(
--    get_scope_from_project,
--    BALROG_SCOPE_ALIAS_TO_PROJECT,
--    BALROG_SERVER_SCOPES
--)
--
--get_balrog_channel_scopes = functools.partial(
--    get_scope_from_project,
--    BALROG_SCOPE_ALIAS_TO_PROJECT,
--    BALROG_CHANNEL_SCOPES
--)
--
--get_push_apk_scope = functools.partial(
--    get_scope_from_project,
--    PUSH_APK_SCOPE_ALIAS_TO_PROJECT,
--    PUSH_APK_SCOPES
--)
--
--get_push_apk_track = functools.partial(
--    get_scope_from_project,
--    PUSH_APK_SCOPE_ALIAS_TO_PROJECT,
--    PUSH_APK_GOOGLE_PLAY_TRACT
--)
--
--get_push_apk_breakpoint_worker_type = functools.partial(
--    get_scope_from_project,
--    PUSH_APK_SCOPE_ALIAS_TO_PROJECT,
--    PUSH_APK_BREAKPOINT_WORKER_TYPE
--)
--
--get_push_apk_dry_run_option = functools.partial(
--    get_scope_from_project,
--    PUSH_APK_SCOPE_ALIAS_TO_PROJECT,
--    PUSH_APK_DRY_RUN_OPTION
--)
--
--get_push_apk_rollout_percentage = functools.partial(
--    get_scope_from_project,
--    PUSH_APK_SCOPE_ALIAS_TO_PROJECT,
--    PUSH_APK_ROLLOUT_PERCENTAGE
--)
--
--
--# release_config {{{1
--def get_release_config(config):
--    """Get the build number and version for a release task.
--
--    Currently only applies to beetmover tasks.
--
--    Args:
--        config (dict): the task config that defines the target task method.
--
--    Raises:
--        ValueError: if a release graph doesn't define a valid
--            `os.environ['BUILD_NUMBER']`
--
--    Returns:
--        dict: containing both `build_number` and `version`.  This can be used to
--            update `task.payload`.
--    """
--    release_config = {}
--    if config.params['target_tasks_method'] in BEETMOVER_RELEASE_TARGET_TASKS:
--        build_number = str(os.environ.get("BUILD_NUMBER", ""))
--        if not build_number.isdigit():
--            raise ValueError("Release graphs must specify `BUILD_NUMBER` in the environment!")
--        release_config['build_number'] = int(build_number)
--        with open(VERSION_PATH, "r") as fh:
--            version = fh.readline().rstrip()
--        release_config['version'] = version
--    return release_config
--
--
--def get_signing_cert_scope_per_platform(build_platform, is_nightly, config):
--    if build_platform in (
--        'linux-devedition-nightly', 'linux64-devedition-nightly',
--        'macosx64-devedition-nightly',
--        'win32-devedition-nightly', 'win64-devedition-nightly',
--    ):
--        return get_devedition_signing_cert_scope(config)
--    elif is_nightly:
--        return get_signing_cert_scope(config)
--    else:
--        return 'project:releng:signing:cert:dep-signing'
-diff --git a/taskcluster/taskgraph/util/seta.py b/taskcluster/taskgraph/util/seta.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/seta.py
-+++ /dev/null
-@@ -1,205 +0,0 @@
--import json
--import logging
--import requests
--from collections import defaultdict
--from redo import retry
--from requests import exceptions
--
--logger = logging.getLogger(__name__)
--
--# It's a list of project name which SETA is useful on
--SETA_PROJECTS = ['mozilla-inbound', 'autoland']
--PROJECT_SCHEDULE_ALL_EVERY_PUSHES = {'mozilla-inbound': 5, 'autoland': 5}
--PROJECT_SCHEDULE_ALL_EVERY_MINUTES = {'mozilla-inbound': 60, 'autoland': 60}
--
--SETA_ENDPOINT = "https://treeherder.mozilla.org/api/project/%s/seta/" \
--                "job-priorities/?build_system_type=%s"
--PUSH_ENDPOINT = "https://hg.mozilla.org/integration/%s/json-pushes/?startID=%d&endID=%d"
--
--
--class SETA(object):
--    """
--    Interface to the SETA service, which defines low-value tasks that can be optimized out
--    of the taskgraph.
--    """
--    def __init__(self):
--        # cached low value tasks, by project
--        self.low_value_tasks = {}
--        self.low_value_bb_tasks = {}
--        # cached push dates by project
--        self.push_dates = defaultdict(dict)
--        # cached push_ids that failed to retrieve datetime for
--        self.failed_json_push_calls = []
--
--    def _get_task_string(self, task_tuple):
--        # convert task tuple to single task string, so the task label sent in can match
--        # remove any empty parts of the tuple
--        task_tuple = [x for x in task_tuple if len(x) != 0]
--
--        if len(task_tuple) == 0:
--            return ''
--        if len(task_tuple) != 3:
--            return ' '.join(task_tuple)
--
--        return 'test-%s/%s-%s' % (task_tuple[0], task_tuple[1], task_tuple[2])
--
--    def query_low_value_tasks(self, project, bbb=False):
--        # Request the set of low value tasks from the SETA service.  Low value tasks will be
--        # optimized out of the task graph.
--        low_value_tasks = []
--
--        if not bbb:
--            # we want to get low priority taskcluster jobs
--            url = SETA_ENDPOINT % (project, 'taskcluster')
--        else:
--            # we want low priority buildbot jobs
--            url = SETA_ENDPOINT % (project, 'buildbot&priority=5')
--
--        # Try to fetch the SETA data twice, falling back to an empty list of low value tasks.
--        # There are 10 seconds between each try.
--        try:
--            logger.debug("Retrieving low-value jobs list from SETA")
--            response = retry(requests.get, attempts=2, sleeptime=10,
--                             args=(url, ),
--                             kwargs={'timeout': 60, 'headers': ''})
--            task_list = json.loads(response.content).get('jobtypes', '')
--
--            if type(task_list) == dict and len(task_list) > 0:
--                if type(task_list.values()[0]) == list and len(task_list.values()[0]) > 0:
--                    low_value_tasks = task_list.values()[0]
--                    # bb job types return a list instead of a single string,
--                    # convert to a single string to match tc tasks format
--                    if type(low_value_tasks[0]) == list:
--                        low_value_tasks = [self._get_task_string(x) for x in low_value_tasks]
--
--            # ensure no build tasks slipped in, we never want to optimize out those
--            low_value_tasks = [x for x in low_value_tasks if 'build' not in x.lower()]
--
--            # Bug 1340065, temporarily disable SETA for linux64-stylo
--            low_value_tasks = [x for x in low_value_tasks if x.find('linux64-stylo') == -1]
--
--        # In the event of request times out, requests will raise a TimeoutError.
--        except exceptions.Timeout:
--            logger.warning("SETA timeout, we will treat all test tasks as high value.")
--
--        # In the event of a network problem (e.g. DNS failure, refused connection, etc),
--        # requests will raise a ConnectionError.
--        except exceptions.ConnectionError:
--            logger.warning("SETA connection error, we will treat all test tasks as high value.")
--
--        # In the event of the rare invalid HTTP response(e.g 404, 401),
--        # requests will raise an HTTPError exception
--        except exceptions.HTTPError:
--            logger.warning("We got bad Http response from ouija,"
--                           " we will treat all test tasks as high value.")
--
--        # We just print the error out as a debug message if we failed to catch the exception above
--        except exceptions.RequestException as error:
--            logger.warning(error)
--
--        # When we get invalid JSON (i.e. 500 error), it results in a ValueError (bug 1313426)
--        except ValueError as error:
--            logger.warning("Invalid JSON, possible server error: {}".format(error))
--
--        return low_value_tasks
--
--    def minutes_between_pushes(self, project, cur_push_id, cur_push_date):
--        # figure out the minutes that have elapsed between the current push and previous one
--        # defaulting to max min so if we can't get value, defaults to run the task
--        min_between_pushes = PROJECT_SCHEDULE_ALL_EVERY_MINUTES.get(project, 60)
--        prev_push_id = cur_push_id - 1
--
--        # cache the pushdate for the current push so we can use it next time
--        self.push_dates[project].update({cur_push_id: cur_push_date})
--
--        # check if we already have the previous push id's datetime cached
--        prev_push_date = self.push_dates[project].get(prev_push_id, 0)
--
--        # we have datetime of current and previous push, so return elapsed minutes and bail
--        if cur_push_date > 0 and prev_push_date > 0:
--            return (cur_push_date - prev_push_date) / 60
--
--        # datetime for previous pushid not cached, so must retrieve it
--        # if we already tried to retrieve the datetime for this pushid
--        # before and the json-push request failed, don't try it again
--        if prev_push_id in self.failed_json_push_calls:
--            return min_between_pushes
--
--        url = PUSH_ENDPOINT % (project, cur_push_id - 2, prev_push_id)
--
--        try:
--            response = retry(requests.get, attempts=2, sleeptime=10,
--                             args=(url, ),
--                             kwargs={'timeout': 60, 'headers': {'User-Agent': 'TaskCluster'}})
--            prev_push_date = json.loads(response.content).get(str(prev_push_id), {}).get('date', 0)
--
--            # cache it for next time
--            self.push_dates[project].update({prev_push_id: prev_push_date})
--
--            # now have datetime of current and previous push
--            if cur_push_date > 0 and prev_push_date > 0:
--                min_between_pushes = (cur_push_date - prev_push_date) / 60
--
--        # In the event of request times out, requests will raise a TimeoutError.
--        except exceptions.Timeout:
--            logger.warning("json-pushes timeout, treating task as high value")
--            self.failed_json_push_calls.append(prev_push_id)
--
--        # In the event of a network problem (e.g. DNS failure, refused connection, etc),
--        # requests will raise a ConnectionError.
--        except exceptions.ConnectionError:
--            logger.warning("json-pushes connection error, treating task as high value")
--            self.failed_json_push_calls.append(prev_push_id)
--
--        # In the event of the rare invalid HTTP response(e.g 404, 401),
--        # requests will raise an HTTPError exception
--        except exceptions.HTTPError:
--            logger.warning("Bad Http response, treating task as high value")
--            self.failed_json_push_calls.append(prev_push_id)
--
--        # When we get invalid JSON (i.e. 500 error), it results in a ValueError (bug 1313426)
--        except ValueError as error:
--            logger.warning("Invalid JSON, possible server error: {}".format(error))
--            self.failed_json_push_calls.append(prev_push_id)
--
--        # We just print the error out as a debug message if we failed to catch the exception above
--        except exceptions.RequestException as error:
--            logger.warning(error)
--            self.failed_json_push_calls.append(prev_push_id)
--
--        return min_between_pushes
--
--    def is_low_value_task(self, label, project, pushlog_id, push_date, bbb_task=False):
--        # marking a task as low_value means it will be optimized out by tc
--        if project not in SETA_PROJECTS:
--            return False
--
--        schedule_all_every = PROJECT_SCHEDULE_ALL_EVERY_PUSHES.get(project, 5)
--        # on every Nth push, want to run all tasks
--        if int(pushlog_id) % schedule_all_every == 0:
--            return False
--
--        # Nth push, so time to call seta based on number of pushes; however
--        # we also want to ensure we run all tasks at least once per N minutes
--        if self.minutes_between_pushes(
--                project,
--                int(pushlog_id),
--                int(push_date)) >= PROJECT_SCHEDULE_ALL_EVERY_MINUTES.get(project, 60):
--            return False
--
--        if not bbb_task:
--            # cache the low value tasks per project to avoid repeated SETA server queries
--            if project not in self.low_value_tasks:
--                self.low_value_tasks[project] = self.query_low_value_tasks(project)
--            return label in self.low_value_tasks[project]
--
--        # gecko decision task requesting if a bbb task is a low value task, so use bb jobs
--        # in this case, the label param sent in will be the buildbot buildername already
--        if project not in self.low_value_bb_tasks:
--            self.low_value_bb_tasks[project] = self.query_low_value_tasks(project, bbb=True)
--        return label in self.low_value_bb_tasks[project]
--
--
--# create a single instance of this class, and expose its `is_low_value_task`
--# bound method as a module-level function
--is_low_value_task = SETA().is_low_value_task
-diff --git a/taskcluster/taskgraph/util/signed_artifacts.py b/taskcluster/taskgraph/util/signed_artifacts.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/signed_artifacts.py
-+++ /dev/null
-@@ -1,61 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--"""
--Defines artifacts to sign before repackage.
--"""
--
--
--def generate_specifications_of_artifacts_to_sign(
--    build_platform, is_nightly=False, keep_locale_template=True
--):
--    if 'android' in build_platform:
--        artifacts_specifications = [{
--            'artifacts': [
--                'public/build/{locale}/target.apk',
--            ],
--            'formats': ['jar'],
--        }]
--    # XXX: Mars aren't signed here (on any platform) because internals will be
--    # signed at after this stage of the release
--    elif 'macosx' in build_platform:
--        artifacts_specifications = [{
--            'artifacts': ['public/build/{locale}/target.dmg'],
--            'formats': ['macapp', 'widevine'],
--        }]
--    elif 'win' in build_platform:
--        artifacts_specifications = [{
--            'artifacts': [
--                'public/build/{locale}/setup.exe',
--            ],
--            'formats': ['sha2signcode'],
--        }, {
--            'artifacts': [
--                'public/build/{locale}/target.zip',
--            ],
--            'formats': ['sha2signcode', 'widevine'],
--        }]
--        if 'win32' in build_platform and is_nightly:
--            artifacts_specifications[0]['artifacts'] += ['public/build/{locale}/setup-stub.exe']
--    elif 'linux' in build_platform:
--        artifacts_specifications = [{
--            'artifacts': ['public/build/{locale}/target.tar.bz2'],
--            'formats': ['gpg', 'widevine'],
--        }]
--    else:
--        raise Exception("Platform not implemented for signing")
--
--    if not keep_locale_template:
--        artifacts_specifications = _strip_locale_template(artifacts_specifications)
--
--    return artifacts_specifications
--
--
--def _strip_locale_template(artifacts_without_locales):
--    for spec in artifacts_without_locales:
--        for index, artifact in enumerate(spec['artifacts']):
--            stripped_artifact = artifact.format(locale='')
--            stripped_artifact = stripped_artifact.replace('//', '/')
--            spec['artifacts'][index] = stripped_artifact
--
--    return artifacts_without_locales
-diff --git a/taskcluster/taskgraph/util/taskcluster.py b/taskcluster/taskgraph/util/taskcluster.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/taskcluster.py
-+++ /dev/null
-@@ -1,103 +0,0 @@
--# -*- coding: utf-8 -*-
--
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import functools
--import yaml
--import requests
--from mozbuild.util import memoize
--from requests.packages.urllib3.util.retry import Retry
--from requests.adapters import HTTPAdapter
--
--
--@memoize
--def get_session():
--    session = requests.Session()
--    retry = Retry(total=5, backoff_factor=0.1,
--                  status_forcelist=[500, 502, 503, 504])
--    session.mount('http://', HTTPAdapter(max_retries=retry))
--    session.mount('https://', HTTPAdapter(max_retries=retry))
--    return session
--
--
--def _do_request(url):
--    session = get_session()
--    response = session.get(url, stream=True)
--    if response.status_code >= 400:
--        # Consume content before raise_for_status, so that the connection can be
--        # reused.
--        response.content
--    response.raise_for_status()
--    return response
--
--
--def _handle_artifact(path, response):
--    if path.endswith('.json'):
--        return response.json()
--    if path.endswith('.yml'):
--        return yaml.load(response.text)
--    response.raw.read = functools.partial(response.raw.read,
--                                          decode_content=True)
--    return response.raw
--
--
--def get_artifact_url(task_id, path, use_proxy=False):
--    if use_proxy:
--        ARTIFACT_URL = 'http://taskcluster/queue/v1/task/{}/artifacts/{}'
--    else:
--        ARTIFACT_URL = 'https://queue.taskcluster.net/v1/task/{}/artifacts/{}'
--    return ARTIFACT_URL.format(task_id, path)
--
--
--def get_artifact(task_id, path, use_proxy=False):
--    """
--    Returns the artifact with the given path for the given task id.
--
--    If the path ends with ".json" or ".yml", the content is deserialized as,
--    respectively, json or yaml, and the corresponding python data (usually
--    dict) is returned.
--    For other types of content, a file-like object is returned.
--    """
--    response = _do_request(get_artifact_url(task_id, path, use_proxy))
--    return _handle_artifact(path, response)
--
--
--def list_artifacts(task_id, use_proxy=False):
--    response = _do_request(get_artifact_url(task_id, '', use_proxy).rstrip('/'))
--    return response.json()['artifacts']
--
--
--def get_index_url(index_path, use_proxy=False):
--    if use_proxy:
--        INDEX_URL = 'http://taskcluster/index/v1/task/{}'
--    else:
--        INDEX_URL = 'https://index.taskcluster.net/v1/task/{}'
--    return INDEX_URL.format(index_path)
--
--
--def find_task_id(index_path, use_proxy=False):
--    response = _do_request(get_index_url(index_path, use_proxy))
--    return response.json()['taskId']
--
--
--def get_artifact_from_index(index_path, artifact_path, use_proxy=False):
--    full_path = index_path + '/artifacts/' + artifact_path
--    response = _do_request(get_index_url(full_path, use_proxy))
--    return _handle_artifact(full_path, response)
--
--
--def get_task_url(task_id, use_proxy=False):
--    if use_proxy:
--        TASK_URL = 'http://taskcluster/queue/v1/task/{}'
--    else:
--        TASK_URL = 'https://queue.taskcluster.net/v1/task/{}'
--    return TASK_URL.format(task_id)
--
--
--def get_task_definition(task_id, use_proxy=False):
--    response = _do_request(get_task_url(task_id, use_proxy))
--    return response.json()
-diff --git a/taskcluster/taskgraph/util/templates.py b/taskcluster/taskgraph/util/templates.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/templates.py
-+++ /dev/null
-@@ -1,155 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--
--import pystache
--import yaml
--import copy
--
--# Key used in template inheritance...
--INHERITS_KEY = '$inherits'
--
--
--def merge_to(source, dest):
--    '''
--    Merge dict and arrays (override scalar values)
--
--    Keys from source override keys from dest, and elements from lists in source
--    are appended to lists in dest.
--
--    :param dict source: to copy from
--    :param dict dest: to copy to (modified in place)
--    '''
--
--    for key, value in source.items():
--        # Override mismatching or empty types
--        if type(value) != type(dest.get(key)):  # noqa
--            dest[key] = source[key]
--            continue
--
--        # Merge dict
--        if isinstance(value, dict):
--            merge_to(value, dest[key])
--            continue
--
--        if isinstance(value, list):
--            dest[key] = dest[key] + source[key]
--            continue
--
--        dest[key] = source[key]
--
--    return dest
--
--
--def merge(*objects):
--    '''
--    Merge the given objects, using the semantics described for merge_to, with
--    objects later in the list taking precedence.  From an inheritance
--    perspective, "parents" should be listed before "children".
--
--    Returns the result without modifying any arguments.
--    '''
--    if len(objects) == 1:
--        return copy.deepcopy(objects[0])
--    return merge_to(objects[-1], merge(*objects[:-1]))
--
--
--class TemplatesException(Exception):
--    pass
--
--
--class Templates():
--    '''
--    The taskcluster integration makes heavy use of yaml to describe tasks this
--    class handles the loading/rendering.
--    '''
--
--    def __init__(self, root):
--        '''
--        Initialize the template render.
--
--        :param str root: Root path where to load yaml files.
--        '''
--        if not root:
--            raise TemplatesException('Root is required')
--
--        if not os.path.isdir(root):
--            raise TemplatesException('Root must be a directory')
--
--        self.root = root
--
--    def _inherits(self, path, obj, properties, seen):
--        blueprint = obj.pop(INHERITS_KEY)
--        seen.add(path)
--
--        # Resolve the path here so we can detect circular references.
--        template = self.resolve_path(blueprint.get('from'))
--        variables = blueprint.get('variables', {})
--
--        # Passed parameters override anything in the task itself.
--        for key in properties:
--            variables[key] = properties[key]
--
--        if not template:
--            msg = '"{}" inheritance template missing'.format(path)
--            raise TemplatesException(msg)
--
--        if template in seen:
--            msg = 'Error while handling "{}" in "{}" circular template' + \
--                  'inheritance seen \n  {}'
--            raise TemplatesException(msg.format(path, template, seen))
--
--        try:
--            out = self.load(template, variables, seen)
--        except TemplatesException as e:
--            msg = 'Error expanding parent ("{}") of "{}" original error {}'
--            raise TemplatesException(msg.format(template, path, str(e)))
--
--        # Anything left in obj is merged into final results (and overrides)
--        return merge_to(obj, out)
--
--    def render(self, path, content, parameters, seen):
--        '''
--        Renders a given yaml string.
--
--        :param str path:  used to prevent infinite recursion in inheritance.
--        :param str content: Of yaml file.
--        :param dict parameters: For mustache templates.
--        :param set seen: Seen files (used for inheritance)
--        '''
--        content = pystache.render(content, parameters)
--        result = yaml.load(content)
--
--        # In addition to the usual template logic done by mustache we also
--        # handle special '$inherit' dict keys.
--        if isinstance(result, dict) and INHERITS_KEY in result:
--            return self._inherits(path, result, parameters, seen)
--
--        return result
--
--    def resolve_path(self, path):
--        return os.path.join(self.root, path)
--
--    def load(self, path, parameters=None, seen=None):
--        '''
--        Load an render the given yaml path.
--
--        :param str path: Location of yaml file to load (relative to root).
--        :param dict parameters: To template yaml file with.
--        '''
--        seen = seen or set()
--
--        if not path:
--            raise TemplatesException('path is required')
--
--        path = self.resolve_path(path)
--
--        if not os.path.isfile(path):
--            raise TemplatesException('"{}" is not a file'.format(path))
--
--        content = open(path).read()
--        return self.render(path, content, parameters, seen)
-diff --git a/taskcluster/taskgraph/util/time.py b/taskcluster/taskgraph/util/time.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/time.py
-+++ /dev/null
-@@ -1,119 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--# Python port of the ms.js node module this is not a direct port some things are
--# more complicated or less precise and we lean on time delta here.
--
--import re
--import datetime
--
--PATTERN = re.compile(
--    '((?:\d+)?\.?\d+) *([a-z]+)'
--)
--
--
--def seconds(value):
--    return datetime.timedelta(seconds=int(value))
--
--
--def minutes(value):
--    return datetime.timedelta(minutes=int(value))
--
--
--def hours(value):
--    return datetime.timedelta(hours=int(value))
--
--
--def days(value):
--    return datetime.timedelta(days=int(value))
--
--
--def months(value):
--    # See warning in years(), below
--    return datetime.timedelta(days=int(value) * 30)
--
--
--def years(value):
--    # Warning here "years" are vague don't use this for really sensitive date
--    # computation the idea is to give you a absolute amount of time in the
--    # future which is not the same thing as "precisely on this date next year"
--    return datetime.timedelta(days=int(value) * 365)
--
--
--ALIASES = {}
--ALIASES['seconds'] = ALIASES['second'] = ALIASES['s'] = seconds
--ALIASES['minutes'] = ALIASES['minute'] = ALIASES['min'] = minutes
--ALIASES['hours'] = ALIASES['hour'] = ALIASES['h'] = hours
--ALIASES['days'] = ALIASES['day'] = ALIASES['d'] = days
--ALIASES['months'] = ALIASES['month'] = ALIASES['mo'] = months
--ALIASES['years'] = ALIASES['year'] = ALIASES['y'] = years
--
--
--class InvalidString(Exception):
--    pass
--
--
--class UnknownTimeMeasurement(Exception):
--    pass
--
--
--def value_of(input_str):
--    '''
--    Convert a string to a json date in the future
--    :param str input_str: (ex: 1d, 2d, 6years, 2 seconds)
--    :returns: Unit given in seconds
--    '''
--
--    matches = PATTERN.search(input_str)
--
--    if matches is None or len(matches.groups()) < 2:
--        raise InvalidString("'{}' is invalid string".format(input_str))
--
--    value, unit = matches.groups()
--
--    if unit not in ALIASES:
--        raise UnknownTimeMeasurement(
--            '{} is not a valid time measure use one of {}'.format(
--                unit,
--                sorted(ALIASES.keys())
--            )
--        )
--
--    return ALIASES[unit](value)
--
--
--def json_time_from_now(input_str, now=None, datetime_format=False):
--    '''
--    :param str input_str: Input string (see value of)
--    :param datetime now: Optionally set the definition of `now`
--    :param boolean datetime_format: Set `True` to get a `datetime` output
--    :returns: JSON string representation of time in future.
--    '''
--
--    if now is None:
--        now = datetime.datetime.utcnow()
--
--    time = now + value_of(input_str)
--
--    if datetime_format is True:
--        return time
--    else:
--        # Sorta a big hack but the json schema validator for date does not like the
--        # ISO dates until 'Z' (for timezone) is added...
--        # the [:23] ensures only whole seconds or milliseconds are included,
--        # not microseconds (see bug 1381801)
--        return time.isoformat()[:23] + 'Z'
--
--
--def current_json_time(datetime_format=False):
--    '''
--    :param boolean datetime_format: Set `True` to get a `datetime` output
--    :returns: JSON string representation of the current time.
--    '''
--    if datetime_format is True:
--        return datetime.datetime.utcnow()
--    else:
--        # the [:23] ensures only whole seconds or milliseconds are included,
--        # not microseconds (see bug 1381801)
--        return datetime.datetime.utcnow().isoformat()[:23] + 'Z'
-diff --git a/taskcluster/taskgraph/util/treeherder.py b/taskcluster/taskgraph/util/treeherder.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/treeherder.py
-+++ /dev/null
-@@ -1,24 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--import re
--
--
--def split_symbol(treeherder_symbol):
--    """Split a symbol expressed as grp(sym) into its two parts.  If no group is
--    given, the returned group is '?'"""
--    groupSymbol = '?'
--    symbol = treeherder_symbol
--    if '(' in symbol:
--        groupSymbol, symbol = re.match(r'([^(]*)\(([^)]*)\)', symbol).groups()
--    return groupSymbol, symbol
--
--
--def join_symbol(group, symbol):
--    """Perform the reverse of split_symbol, combining the given group and
--    symbol.  If the group is '?', then it is omitted."""
--    if group == '?':
--        return symbol
--    return '{}({})'.format(group, symbol)
-diff --git a/taskcluster/taskgraph/util/verify.py b/taskcluster/taskgraph/util/verify.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/verify.py
-+++ /dev/null
-@@ -1,87 +0,0 @@
--# -*- coding: utf-8 -*-
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import re
--import os
--
--base_path = os.path.join(os.getcwd(), "taskcluster/docs/")
--
--
--def verify_docs(filename, identifiers, appearing_as):
--
--    # We ignore identifiers starting with '_' for the sake of tests.
--    # Strings starting with "_" are ignored for doc verification
--    # hence they can be used for faking test values
--    with open(os.path.join(base_path, filename)) as fileObject:
--        doctext = "".join(fileObject.readlines())
--        if appearing_as == "inline-literal":
--            expression_list = [
--                "``" + identifier + "``"
--                for identifier in identifiers
--                if not identifier.startswith("_")
--            ]
--        elif appearing_as == "heading":
--            expression_list = [
--                '\n' + identifier + "\n(?:(?:(?:-+\n)+)|(?:(?:.+\n)+))"
--                for identifier in identifiers
--                if not identifier.startswith("_")
--            ]
--        else:
--            raise Exception("appearing_as = `{}` not defined".format(appearing_as))
--
--        for expression, identifier in zip(expression_list, identifiers):
--            match_group = re.search(expression, doctext)
--            if not match_group:
--                raise Exception(
--                    "{}: `{}` missing from doc file: `{}`"
--                    .format(appearing_as, identifier, filename)
--                )
--
--
--def verify_task_graph_symbol(task, taskgraph, scratch_pad):
--    """
--        This function verifies that tuple
--        (collection.keys(), machine.platform, groupSymbol, symbol) is unique
--        for a target task graph.
--    """
--    task_dict = task.task
--    if "extra" in task_dict:
--        extra = task_dict["extra"]
--        if "treeherder" in extra:
--            treeherder = extra["treeherder"]
--
--            collection_keys = tuple(sorted(treeherder.get('collection', {}).keys()))
--            platform = treeherder.get('machine', {}).get('platform')
--            group_symbol = treeherder.get('groupSymbol')
--            symbol = treeherder.get('symbol')
--
--            key = (collection_keys, platform, group_symbol, symbol)
--            if key in scratch_pad:
--                raise Exception(
--                    "conflict between `{}`:`{}` for values `{}`"
--                    .format(task.label, scratch_pad[key], key)
--                )
--            else:
--                scratch_pad[key] = task.label
--
--
--def verify_gecko_v2_routes(task, taskgraph, scratch_pad):
--    """
--        This function ensures that any two
--        tasks have distinct index.v2.routes
--    """
--    route_prefix = "index.gecko.v2"
--    task_dict = task.task
--    routes = task_dict.get('routes', [])
--
--    for route in routes:
--        if route.startswith(route_prefix):
--            if route in scratch_pad:
--                raise Exception(
--                    "conflict between {}:{} for route: {}"
--                    .format(task.label, scratch_pad[route], route)
--                )
--            else:
--                scratch_pad[route] = task.label
-diff --git a/taskcluster/taskgraph/util/workertypes.py b/taskcluster/taskgraph/util/workertypes.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/workertypes.py
-+++ /dev/null
-@@ -1,51 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--WORKER_TYPES = {
--    'aws-provisioner-v1/gecko-images': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-1-b-android': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-1-b-linux': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-1-b-macosx64': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-1-b-win2012': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/gecko-2-b-android': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-2-b-linux': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-2-b-macosx64': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-2-b-win2012': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/gecko-3-b-android': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-3-b-linux': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-3-b-macosx64': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-3-b-win2012': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/gecko-symbol-upload': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-t-linux-large': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-t-linux-medium': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-t-linux-xlarge': ('docker-worker', 'linux'),
--    'aws-provisioner-v1/gecko-t-win10-64': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/gecko-t-win10-64-gpu': ('generic-worker', 'windows'),
--    'releng-hardware/gecko-t-win10-64-hw': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/gecko-t-win7-32': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/gecko-t-win7-32-gpu': ('generic-worker', 'windows'),
--    'releng-hardware/gecko-t-win7-32-hw': ('generic-worker', 'windows'),
--    'aws-provisioner-v1/taskcluster-generic': ('docker-worker', 'linux'),
--    'buildbot-bridge/buildbot-bridge': ('buildbot-bridge', None),
--    'invalid/invalid': ('invalid', None),
--    'null-provisioner/human-breakpoint': ('push-apk-breakpoint', None),
--    'null-provisioner/human-breakpoint': ('push-apk-breakpoint', None),
--    'releng-hardware/gecko-t-linux-talos': ('native-engine', 'linux'),
--    'scriptworker-prov-v1/balrogworker-v1': ('balrog', None),
--    'scriptworker-prov-v1/beetmoverworker-v1': ('beetmover', None),
--    'scriptworker-prov-v1/pushapk-v1': ('push-apk', None),
--    "scriptworker-prov-v1/signing-linux-v1": ('scriptworker-signing', None),
--    'releng-hardware/gecko-t-osx-1010': ('generic-worker', 'macosx'),
--}
--
--
--def worker_type_implementation(worker_type):
--    """Get the worker implementation and OS for the given workerType, where the
--    OS represents the host system, not the target OS, in the case of
--    cross-compiles."""
--    # assume that worker types for all levels are the same implementation
--    worker_type = worker_type.replace('{level}', '1')
--    return WORKER_TYPES[worker_type]
-diff --git a/taskcluster/taskgraph/util/yaml.py b/taskcluster/taskgraph/util/yaml.py
-deleted file mode 100644
---- a/taskcluster/taskgraph/util/yaml.py
-+++ /dev/null
-@@ -1,29 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import re
--import yaml
--
--
--def load_yaml(path, name, enforce_order=False):
--    """Convenience function to load a YAML file in the given path.  This is
--    useful for loading kind configuration files from the kind path.  If
--    `enforce_order` is given, then any top-level keys in the file must
--    be given in order."""
--    filename = os.path.join(path, name)
--    if enforce_order:
--        keys = []
--        key_re = re.compile('^([^ #:]+):')
--        with open(filename, "rb") as f:
--            for line in f:
--                mo = key_re.match(line)
--                if mo:
--                    keys.append(mo.group(1))
--            if keys != list(sorted(keys)):
--                raise Exception("keys in {} are not sorted".format(filename))
--    with open(filename, "rb") as f:
--        return yaml.load(f)
-diff --git a/tools/moz.build b/tools/moz.build
---- a/tools/moz.build
-+++ b/tools/moz.build
-@@ -35,16 +35,13 @@ with Files("quitter/**"):
-     BUG_COMPONENT = ("Testing", "General")
- 
- with Files("rb/**"):
-     BUG_COMPONENT = ("Core", "XPCOM")
- 
- with Files("rewriting/**"):
-     BUG_COMPONENT = ("Core", "Rewriting and Analysis")
- 
--with Files("tryselect/**"):
--    BUG_COMPONENT = ("Testing", "General")
--
- with Files("update-packaging/**"):
-     BUG_COMPONENT = ("Release Engineering", "Other")
- 
- SPHINX_TREES['lint'] = 'lint/docs'
- SPHINX_TREES['compare-locales'] = 'compare-locales/docs'
-diff --git a/tools/tryselect/__init__.py b/tools/tryselect/__init__.py
-deleted file mode 100644
-diff --git a/tools/tryselect/mach_commands.py b/tools/tryselect/mach_commands.py
-deleted file mode 100644
---- a/tools/tryselect/mach_commands.py
-+++ /dev/null
-@@ -1,184 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import argparse
--import os
--import sys
--
--from mach.decorators import (
--    CommandArgument,
--    CommandProvider,
--    Command,
--    SubCommand,
--)
--
--from mozbuild.base import BuildEnvironmentNotFoundException, MachCommandBase
--
--CONFIG_ENVIRONMENT_NOT_FOUND = '''
--No config environment detected. This means we are unable to properly
--detect test files in the specified paths or tags. Please run:
--
--    $ mach configure
--
--and try again.
--'''.lstrip()
--
--
--def syntax_parser():
--    from tryselect.selectors.syntax import arg_parser
--    parser = arg_parser()
--    # The --no-artifact flag is only interpreted locally by |mach try|; it's not
--    # like the --artifact flag, which is interpreted remotely by the try server.
--    #
--    # We need a tri-state where set is different than the default value, so we
--    # use a different variable than --artifact.
--    parser.add_argument('--no-artifact',
--                        dest='no_artifact',
--                        action='store_true',
--                        help='Force compiled (non-artifact) builds even when '
--                             '--enable-artifact-builds is set.')
--    return parser
--
--
--@CommandProvider
--class TrySelect(MachCommandBase):
--
--    @Command('try',
--             category='ci',
--             description='Push selected tasks to the try server')
--    @CommandArgument('args', nargs=argparse.REMAINDER)
--    def try_default(self, args):
--        """Push selected tests to the try server.
--
--        The |mach try| command is a frontend for scheduling tasks to
--        run on try server using selectors. A selector is a subcommand
--        that provides its own set of command line arguments and are
--        listed below.
--
--        If no subcommand is specified, the `syntax` selector is run by
--        default. Run |mach try syntax --help| for more information on
--        scheduling with the `syntax` selector.
--        """
--        parser = syntax_parser()
--        kwargs = vars(parser.parse_args(args))
--        return self._mach_context.commands.dispatch(
--            'try', subcommand='syntax', context=self._mach_context, **kwargs)
--
--    @SubCommand('try',
--                'fuzzy',
--                description='Select tasks on try using a fuzzy finder')
--    @CommandArgument('-u', '--update', action='store_true', default=False,
--                     help="Update fzf before running")
--    def try_fuzzy(self, update):
--        """Select which tasks to use with fzf.
--
--        This selector runs all task labels through a fuzzy finding interface.
--        All selected task labels and their dependencies will be scheduled on
--        try.
--
--        Keyboard Shortcuts
--        ------------------
--
--        When in the fuzzy finder interface, start typing to filter down the
--        task list. Then use the following keyboard shortcuts to select tasks:
--
--          accept: <enter>
--          cancel: <ctrl-c> or <esc>
--          cursor-up: <ctrl-k> or <up>
--          cursor-down: <ctrl-j> or <down>
--          toggle-select-down: <tab>
--          toggle-select-up: <shift-tab>
--          select-all: <ctrl-a>
--          deselect-all: <ctrl-d>
--          toggle-all: <ctrl-t>
--          clear-input: <alt-bspace>
--
--        There are many more shortcuts enabled by default, you can also define
--        your own shortcuts by setting `--bind` in the $FZF_DEFAULT_OPTS
--        environment variable. See `man fzf` for more info.
--
--        Extended Search
--        ---------------
--
--        When typing in search terms, the following modifiers can be applied:
--
--          'word: exact match (line must contain the literal string "word")
--          ^word: exact prefix match (line must start with literal "word")
--          word$: exact suffix match (line must end with literal "word")
--          !word: exact negation match (line must not contain literal "word")
--          'a | 'b: OR operator (joins two exact match operators together)
--
--        For example:
--
--          ^start 'exact | !ignore fuzzy end$
--        """
--        from tryselect.selectors.fuzzy import run_fuzzy_try
--        return run_fuzzy_try(update)
--
--    @SubCommand('try',
--                'syntax',
--                description='Select tasks on try using try syntax',
--                parser=syntax_parser)
--    def try_syntax(self, **kwargs):
--        """Push the current tree to try, with the specified syntax.
--
--        Build options, platforms and regression tests may be selected
--        using the usual try options (-b, -p and -u respectively). In
--        addition, tests in a given directory may be automatically
--        selected by passing that directory as a positional argument to the
--        command. For example:
--
--        mach try -b d -p linux64 dom testing/web-platform/tests/dom
--
--        would schedule a try run for linux64 debug consisting of all
--        tests under dom/ and testing/web-platform/tests/dom.
--
--        Test selection using positional arguments is available for
--        mochitests, reftests, xpcshell tests and web-platform-tests.
--
--        Tests may be also filtered by passing --tag to the command,
--        which will run only tests marked as having the specified
--        tags e.g.
--
--        mach try -b d -p win64 --tag media
--
--        would run all tests tagged 'media' on Windows 64.
--
--        If both positional arguments or tags and -u are supplied, the
--        suites in -u will be run in full. Where tests are selected by
--        positional argument they will be run in a single chunk.
--
--        If no build option is selected, both debug and opt will be
--        scheduled. If no platform is selected a default is taken from
--        the AUTOTRY_PLATFORM_HINT environment variable, if set.
--
--        The command requires either its own mercurial extension ("push-to-try",
--        installable from mach mercurial-setup) or a git repo using git-cinnabar
--        (available at https://github.com/glandium/git-cinnabar).
--
--        """
--        from mozbuild.testing import TestResolver
--        from tryselect.selectors.syntax import AutoTry
--
--        try:
--            if self.substs.get("MOZ_ARTIFACT_BUILDS"):
--                kwargs['local_artifact_build'] = True
--        except BuildEnvironmentNotFoundException:
--            # If we don't have a build locally, we can't tell whether
--            # an artifact build is desired, but we still want the
--            # command to succeed, if possible.
--            pass
--
--        config_status = os.path.join(self.topobjdir, 'config.status')
--        if (kwargs['paths'] or kwargs['tags']) and not config_status:
--            print(CONFIG_ENVIRONMENT_NOT_FOUND)
--            sys.exit(1)
--
--        def resolver_func():
--            return self._spawn(TestResolver)
--
--        at = AutoTry(self.topsrcdir, resolver_func, self._mach_context)
--        return at.run(**kwargs)
-diff --git a/tools/tryselect/selectors/__init__.py b/tools/tryselect/selectors/__init__.py
-deleted file mode 100644
-diff --git a/tools/tryselect/selectors/fuzzy.py b/tools/tryselect/selectors/fuzzy.py
-deleted file mode 100644
---- a/tools/tryselect/selectors/fuzzy.py
-+++ /dev/null
-@@ -1,198 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--import platform
--import subprocess
--import sys
--from distutils.spawn import find_executable
--
--from mozboot.util import get_state_dir
--
--from ..tasks import generate_target
--from ..vcs import VCSHelper
--
--try:
--    import blessings
--    terminal = blessings.Terminal()
--except ImportError:
--    from mozlint.formatters.stylish import NullTerminal
--    terminal = NullTerminal()
--
--FZF_NOT_FOUND = """
--Could not find the `fzf` binary.
--
--The `mach try fuzzy` command depends on fzf. Please install it following the
--appropriate instructions for your platform:
--
--    https://github.com/junegunn/fzf#installation
--
--Only the binary is required, if you do not wish to install the shell and
--editor integrations, download the appropriate binary and put it on your $PATH:
--
--    https://github.com/junegunn/fzf-bin/releases
--""".lstrip()
--
--FZF_INSTALL_FAILED = """
--Failed to install fzf.
--
--Please install fzf manually following the appropriate instructions for your
--platform:
--
--    https://github.com/junegunn/fzf#installation
--
--Only the binary is required, if you do not wish to install the shell and
--editor integrations, download the appropriate binary and put it on your $PATH:
--
--    https://github.com/junegunn/fzf-bin/releases
--""".lstrip()
--
--FZF_RUN_INSTALL_WIZARD = """
--{t.bold}Running the fzf installation wizard.{t.normal}
--
--Only the fzf binary is required, if you do not wish to install the shell
--integrations, {t.bold}feel free to press 'n' at each of the prompts.{t.normal}
--""".format(t=terminal)
--
--FZF_HEADER = """
--For more shortcuts, see {t.italic_white}mach help try fuzzy{t.normal} and {t.italic_white}man fzf
--{shortcuts}
--""".strip()
--
--fzf_shortcuts = {
--    'ctrl-a': 'select-all',
--    'ctrl-d': 'deselect-all',
--    'ctrl-t': 'toggle-all',
--    'alt-bspace': 'beginning-of-line+kill-line',
--    '?': 'toggle-preview',
--}
--
--fzf_header_shortcuts = {
--    'cursor-up': 'ctrl-k',
--    'cursor-down': 'ctrl-j',
--    'toggle-select': 'tab',
--    'select-all': 'ctrl-a',
--    'accept': 'enter',
--    'cancel': 'ctrl-c',
--}
--
--
--def run(cmd, cwd=None):
--    is_win = platform.system() == 'Windows'
--    return subprocess.call(cmd, cwd=cwd, shell=True if is_win else False)
--
--
--def run_fzf_install_script(fzf_path, bin_only=False):
--    # We could run this without installing the shell integrations on all
--    # platforms, but those integrations are actually really useful so give user
--    # the choice.
--    if platform.system() == 'Windows':
--        cmd = ['bash', '-c', './install --bin']
--    else:
--        cmd = ['./install']
--        if bin_only:
--            cmd.append('--bin')
--        else:
--            print(FZF_RUN_INSTALL_WIZARD)
--
--    if run(cmd, cwd=fzf_path):
--        print(FZF_INSTALL_FAILED)
--        sys.exit(1)
--
--
--def fzf_bootstrap(update=False):
--    """Bootstrap fzf if necessary and return path to the executable.
--
--    The bootstrap works by cloning the fzf repository and running the included
--    `install` script. If update is True, we will pull the repository and re-run
--    the install script.
--    """
--    fzf_bin = find_executable('fzf')
--    if fzf_bin and not update:
--        return fzf_bin
--
--    fzf_path = os.path.join(get_state_dir()[0], 'fzf')
--    if update and not os.path.isdir(fzf_path):
--        print("fzf installed somewhere other than {}, please update manually".format(fzf_path))
--        sys.exit(1)
--
--    def get_fzf():
--        return find_executable('fzf', os.path.join(fzf_path, 'bin'))
--
--    if update:
--        ret = run(['git', 'pull'], cwd=fzf_path)
--        if ret:
--            print("Update fzf failed.")
--            sys.exit(1)
--
--        run_fzf_install_script(fzf_path, bin_only=True)
--        return get_fzf()
--
--    if os.path.isdir(fzf_path):
--        fzf_bin = get_fzf()
--        if fzf_bin:
--            return fzf_bin
--        # Fzf is cloned, but binary doesn't exist. Try running the install script
--        return fzf_bootstrap(update=True)
--
--    install = raw_input("Could not detect fzf, install it now? [y/n]: ")
--    if install.lower() != 'y':
--        return
--
--    if not find_executable('git'):
--        print("Git not found.")
--        print(FZF_INSTALL_FAILED)
--        sys.exit(1)
--
--    cmd = ['git', 'clone', '--depth', '1', 'https://github.com/junegunn/fzf.git']
--    if subprocess.call(cmd, cwd=os.path.dirname(fzf_path)):
--        print(FZF_INSTALL_FAILED)
--        sys.exit(1)
--
--    run_fzf_install_script(fzf_path)
--
--    print("Installed fzf to {}".format(fzf_path))
--    return get_fzf()
--
--
--def format_header():
--    shortcuts = []
--    for action, key in sorted(fzf_header_shortcuts.iteritems()):
--        shortcuts.append('{t.white}{action}{t.normal}: {t.yellow}<{key}>{t.normal}'.format(
--                         t=terminal, action=action, key=key))
--    return FZF_HEADER.format(shortcuts=', '.join(shortcuts), t=terminal)
--
--
--def run_fuzzy_try(update):
--    fzf = fzf_bootstrap(update)
--
--    if not fzf:
--        print(FZF_NOT_FOUND)
--        return
--
--    vcs = VCSHelper.create()
--    vcs.check_working_directory()
--
--    all_tasks = generate_target()
--
--    key_shortcuts = [k + ':' + v for k, v in fzf_shortcuts.iteritems()]
--    cmd = [
--        fzf, '-m',
--        '--bind', ','.join(key_shortcuts),
--        '--header', format_header(),
--        # Using python to split the preview string is a bit convoluted,
--        # but is guaranteed to be available on all platforms.
--        '--preview', 'python -c "print(\\"\\n\\".join(sorted([s.strip(\\"\'\\") for s in \\"{+}\\".split()])))"',  # noqa
--        '--preview-window=right:20%',
--    ]
--    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
--    selected = proc.communicate('\n'.join(all_tasks))[0].splitlines()
--
--    if not selected:
--        print("no tasks selected")
--        return
--
--    return vcs.push_to_try("Pushed via 'mach try fuzzy', see diff for scheduled tasks", selected)
-diff --git a/tools/tryselect/selectors/syntax.py b/tools/tryselect/selectors/syntax.py
-deleted file mode 100644
---- a/tools/tryselect/selectors/syntax.py
-+++ /dev/null
-@@ -1,668 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import ConfigParser
--import argparse
--import os
--import re
--import sys
--from collections import defaultdict
--
--import mozpack.path as mozpath
--from ..vcs import VCSHelper
--
--
--def arg_parser():
--    parser = argparse.ArgumentParser()
--    parser.add_argument('paths', nargs='*', help='Paths to search for tests to run on try.')
--    parser.add_argument('-b', '--build', dest='builds', default='do',
--                        help='Build types to run (d for debug, o for optimized).')
--    parser.add_argument('-p', '--platform', dest='platforms', action='append',
--                        help='Platforms to run (required if not found in the environment as '
--                             'AUTOTRY_PLATFORM_HINT).')
--    parser.add_argument('-u', '--unittests', dest='tests', action='append',
--                        help='Test suites to run in their entirety.')
--    parser.add_argument('-t', '--talos', dest='talos', action='append',
--                        help='Talos suites to run.')
--    parser.add_argument('-j', '--jobs', dest='jobs', action='append',
--                        help='Job tasks to run.')
--    parser.add_argument('--tag', dest='tags', action='append',
--                        help='Restrict tests to the given tag (may be specified multiple times).')
--    parser.add_argument('--and', action='store_true', dest='intersection',
--                        help='When -u and paths are supplied run only the intersection of the '
--                             'tests specified by the two arguments.')
--    parser.add_argument('--no-push', dest='push', action='store_false',
--                        help='Do not push to try as a result of running this command (if '
--                        'specified this command will only print calculated try '
--                        'syntax and selection info).')
--    parser.add_argument('--save', dest='save', action='store',
--                        help='Save the command line arguments for future use with --preset.')
--    parser.add_argument('--preset', dest='load', action='store',
--                        help='Load a saved set of arguments. Additional arguments will override '
--                             'saved ones.')
--    parser.add_argument('--list', action='store_true',
--                        help='List all saved try strings')
--    parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=False,
--                        help='Print detailed information about the resulting test selection '
--                        'and commands performed.')
--    for arg, opts in AutoTry.pass_through_arguments.items():
--        parser.add_argument(arg, **opts)
--    return parser
--
--
--class TryArgumentTokenizer(object):
--    symbols = [("seperator", ","),
--               ("list_start", "\["),
--               ("list_end", "\]"),
--               ("item", "([^,\[\]\s][^,\[\]]+)"),
--               ("space", "\s+")]
--    token_re = re.compile("|".join("(?P<%s>%s)" % item for item in symbols))
--
--    def tokenize(self, data):
--        for match in self.token_re.finditer(data):
--            symbol = match.lastgroup
--            data = match.group(symbol)
--            if symbol == "space":
--                pass
--            else:
--                yield symbol, data
--
--
--class TryArgumentParser(object):
--    """Simple three-state parser for handling expressions
--    of the from "foo[sub item, another], bar,baz". This takes
--    input from the TryArgumentTokenizer and runs through a small
--    state machine, returning a dictionary of {top-level-item:[sub_items]}
--    i.e. the above would result in
--    {"foo":["sub item", "another"], "bar": [], "baz": []}
--    In the case of invalid input a ValueError is raised."""
--
--    EOF = object()
--
--    def __init__(self):
--        self.reset()
--
--    def reset(self):
--        self.tokens = None
--        self.current_item = None
--        self.data = {}
--        self.token = None
--        self.state = None
--
--    def parse(self, tokens):
--        self.reset()
--        self.tokens = tokens
--        self.consume()
--        self.state = self.item_state
--        while self.token[0] != self.EOF:
--            self.state()
--        return self.data
--
--    def consume(self):
--        try:
--            self.token = self.tokens.next()
--        except StopIteration:
--            self.token = (self.EOF, None)
--
--    def expect(self, *types):
--        if self.token[0] not in types:
--            raise ValueError("Error parsing try string, unexpected %s" % (self.token[0]))
--
--    def item_state(self):
--        self.expect("item")
--        value = self.token[1].strip()
--        if value not in self.data:
--            self.data[value] = []
--        self.current_item = value
--        self.consume()
--        if self.token[0] == "seperator":
--            self.consume()
--        elif self.token[0] == "list_start":
--            self.consume()
--            self.state = self.subitem_state
--        elif self.token[0] == self.EOF:
--            pass
--        else:
--            raise ValueError
--
--    def subitem_state(self):
--        self.expect("item")
--        value = self.token[1].strip()
--        self.data[self.current_item].append(value)
--        self.consume()
--        if self.token[0] == "seperator":
--            self.consume()
--        elif self.token[0] == "list_end":
--            self.consume()
--            self.state = self.after_list_end_state
--        else:
--            raise ValueError
--
--    def after_list_end_state(self):
--        self.expect("seperator")
--        self.consume()
--        self.state = self.item_state
--
--
--def parse_arg(arg):
--    tokenizer = TryArgumentTokenizer()
--    parser = TryArgumentParser()
--    return parser.parse(tokenizer.tokenize(arg))
--
--
--class AutoTry(object):
--
--    # Maps from flavors to the job names needed to run that flavour
--    flavor_jobs = {
--        'mochitest': ['mochitest-1', 'mochitest-e10s-1'],
--        'xpcshell': ['xpcshell'],
--        'chrome': ['mochitest-o'],
--        'browser-chrome': ['mochitest-browser-chrome-1',
--                           'mochitest-e10s-browser-chrome-1',
--                           'mochitest-browser-chrome-e10s-1'],
--        'devtools-chrome': ['mochitest-devtools-chrome-1',
--                            'mochitest-e10s-devtools-chrome-1',
--                            'mochitest-devtools-chrome-e10s-1'],
--        'crashtest': ['crashtest', 'crashtest-e10s'],
--        'reftest': ['reftest', 'reftest-e10s'],
--        'web-platform-tests': ['web-platform-tests-1'],
--    }
--
--    flavor_suites = {
--        "mochitest": "mochitests",
--        "xpcshell": "xpcshell",
--        "chrome": "mochitest-o",
--        "browser-chrome": "mochitest-bc",
--        "devtools-chrome": "mochitest-dt",
--        "crashtest": "crashtest",
--        "reftest": "reftest",
--        "web-platform-tests": "web-platform-tests",
--    }
--
--    compiled_suites = [
--        "cppunit",
--        "gtest",
--        "jittest",
--    ]
--
--    common_suites = [
--        "cppunit",
--        "crashtest",
--        "firefox-ui-functional",
--        "gtest",
--        "jittest",
--        "jsreftest",
--        "marionette",
--        "marionette-e10s",
--        "mochitests",
--        "reftest",
--        "web-platform-tests",
--        "xpcshell",
--    ]
--
--    # Arguments we will accept on the command line and pass through to try
--    # syntax with no further intervention. The set is taken from
--    # http://trychooser.pub.build.mozilla.org with a few additions.
--    #
--    # Note that the meaning of store_false and store_true arguments is
--    # not preserved here, as we're only using these to echo the literal
--    # arguments to another consumer. Specifying either store_false or
--    # store_true here will have an equivalent effect.
--    pass_through_arguments = {
--        '--rebuild': {
--            'action': 'store',
--            'dest': 'rebuild',
--            'help': 'Re-trigger all test jobs (up to 20 times)',
--        },
--        '--rebuild-talos': {
--            'action': 'store',
--            'dest': 'rebuild_talos',
--            'help': 'Re-trigger all talos jobs',
--        },
--        '--interactive': {
--            'action': 'store_true',
--            'dest': 'interactive',
--            'help': 'Allow ssh-like access to running test containers',
--        },
--        '--no-retry': {
--            'action': 'store_true',
--            'dest': 'no_retry',
--            'help': 'Do not retrigger failed tests',
--        },
--        '--setenv': {
--            'action': 'append',
--            'dest': 'setenv',
--            'help': 'Set the corresponding variable in the test environment for'
--                    'applicable harnesses.',
--        },
--        '-f': {
--            'action': 'store_true',
--            'dest': 'failure_emails',
--            'help': 'Request failure emails only',
--        },
--        '--failure-emails': {
--            'action': 'store_true',
--            'dest': 'failure_emails',
--            'help': 'Request failure emails only',
--        },
--        '-e': {
--            'action': 'store_true',
--            'dest': 'all_emails',
--            'help': 'Request all emails',
--        },
--        '--all-emails': {
--            'action': 'store_true',
--            'dest': 'all_emails',
--            'help': 'Request all emails',
--        },
--        '--artifact': {
--            'action': 'store_true',
--            'dest': 'artifact',
--            'help': 'Force artifact builds where possible.',
--        },
--        '--upload-xdbs': {
--            'action': 'store_true',
--            'dest': 'upload_xdbs',
--            'help': 'Upload XDB compilation db files generated by hazard build',
--        },
--    }
--
--    def __init__(self, topsrcdir, resolver_func, mach_context):
--        self.topsrcdir = topsrcdir
--        self._resolver_func = resolver_func
--        self._resolver = None
--        self.mach_context = mach_context
--        self.vcs = VCSHelper.create()
--
--    @property
--    def resolver(self):
--        if self._resolver is None:
--            self._resolver = self._resolver_func()
--        return self._resolver
--
--    @property
--    def config_path(self):
--        return os.path.join(self.mach_context.state_dir, "autotry.ini")
--
--    def load_config(self, name):
--        config = ConfigParser.RawConfigParser()
--        success = config.read([self.config_path])
--        if not success:
--            return None
--
--        try:
--            data = config.get("try", name)
--        except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
--            return None
--
--        kwargs = vars(arg_parser().parse_args(self.split_try_string(data)))
--
--        return kwargs
--
--    def list_presets(self):
--        config = ConfigParser.RawConfigParser()
--        success = config.read([self.config_path])
--
--        data = []
--        if success:
--            try:
--                data = config.items("try")
--            except (ConfigParser.NoSectionError, ConfigParser.NoOptionError):
--                pass
--
--        if not data:
--            print("No presets found")
--
--        for name, try_string in data:
--            print("%s: %s" % (name, try_string))
--
--    def split_try_string(self, data):
--        return re.findall(r'(?:\[.*?\]|\S)+', data)
--
--    def save_config(self, name, data):
--        assert data.startswith("try: ")
--        data = data[len("try: "):]
--
--        parser = ConfigParser.RawConfigParser()
--        parser.read([self.config_path])
--
--        if not parser.has_section("try"):
--            parser.add_section("try")
--
--        parser.set("try", name, data)
--
--        with open(self.config_path, "w") as f:
--            parser.write(f)
--
--    def paths_by_flavor(self, paths=None, tags=None):
--        paths_by_flavor = defaultdict(set)
--
--        if not (paths or tags):
--            return dict(paths_by_flavor)
--
--        tests = list(self.resolver.resolve_tests(paths=paths,
--                                                 tags=tags))
--
--        for t in tests:
--            if t['flavor'] in self.flavor_suites:
--                flavor = t['flavor']
--                if 'subsuite' in t and t['subsuite'] == 'devtools':
--                    flavor = 'devtools-chrome'
--
--                if flavor in ['crashtest', 'reftest']:
--                    manifest_relpath = os.path.relpath(t['manifest'], self.topsrcdir)
--                    paths_by_flavor[flavor].add(os.path.dirname(manifest_relpath))
--                elif 'dir_relpath' in t:
--                    paths_by_flavor[flavor].add(t['dir_relpath'])
--                else:
--                    file_relpath = os.path.relpath(t['path'], self.topsrcdir)
--                    dir_relpath = os.path.dirname(file_relpath)
--                    paths_by_flavor[flavor].add(dir_relpath)
--
--        for flavor, path_set in paths_by_flavor.items():
--            paths_by_flavor[flavor] = self.deduplicate_prefixes(path_set, paths)
--
--        return dict(paths_by_flavor)
--
--    def deduplicate_prefixes(self, path_set, input_paths):
--        # Removes paths redundant to test selection in the given path set.
--        # If a path was passed on the commandline that is the prefix of a
--        # path in our set, we only need to include the specified prefix to
--        # run the intended tests (every test in "layout/base" will run if
--        # "layout" is passed to the reftest harness).
--        removals = set()
--        additions = set()
--
--        for path in path_set:
--            full_path = path
--            while path:
--                path, _ = os.path.split(path)
--                if path in input_paths:
--                    removals.add(full_path)
--                    additions.add(path)
--
--        return additions | (path_set - removals)
--
--    def remove_duplicates(self, paths_by_flavor, tests):
--        rv = {}
--        for item in paths_by_flavor:
--            if self.flavor_suites[item] not in tests:
--                rv[item] = paths_by_flavor[item].copy()
--        return rv
--
--    def calc_try_syntax(self, platforms, tests, talos, jobs, builds, paths_by_flavor, tags,
--                        extras, intersection):
--        parts = ["try:"]
--
--        if platforms:
--            parts.extend(["-b", builds, "-p", ",".join(platforms)])
--
--        suites = tests if not intersection else {}
--        paths = set()
--        for flavor, flavor_tests in paths_by_flavor.iteritems():
--            suite = self.flavor_suites[flavor]
--            if suite not in suites and (not intersection or suite in tests):
--                for job_name in self.flavor_jobs[flavor]:
--                    for test in flavor_tests:
--                        paths.add("%s:%s" % (flavor, test))
--                    suites[job_name] = tests.get(suite, [])
--
--        # intersection implies tests are expected
--        if intersection and not suites:
--            raise ValueError("No tests found matching filters")
--
--        if extras.get('artifact') and any([p.endswith("-nightly") for p in platforms]):
--            print('You asked for |--artifact| but "-nightly" platforms don\'t have artifacts. '
--                  'Running without |--artifact| instead.')
--            del extras['artifact']
--
--        if extras.get('artifact'):
--            rejected = []
--            for suite in suites.keys():
--                if any([suite.startswith(c) for c in self.compiled_suites]):
--                    rejected.append(suite)
--            if rejected:
--                raise ValueError("You can't run {} with "
--                                 "--artifact option.".format(', '.join(rejected)))
--
--        if extras.get('artifact') and 'all' in suites.keys():
--            non_compiled_suites = set(self.common_suites) - set(self.compiled_suites)
--            message = ('You asked for |-u all| with |--artifact| but compiled-code tests ({tests})'
--                       ' can\'t run against an artifact build. Running (-u {non_compiled_suites}) '
--                       'instead.')
--            string_format = {
--                'tests': ','.join(self.compiled_suites),
--                'non_compiled_suites': ','.join(non_compiled_suites),
--            }
--            print(message.format(**string_format))
--            del suites['all']
--            suites.update({suite_name: None for suite_name in non_compiled_suites})
--
--        if suites:
--            parts.append("-u")
--            parts.append(",".join("%s%s" % (k, "[%s]" % ",".join(v) if v else "")
--                                  for k, v in sorted(suites.items())))
--
--        if talos:
--            parts.append("-t")
--            parts.append(",".join("%s%s" % (k, "[%s]" % ",".join(v) if v else "")
--                                  for k, v in sorted(talos.items())))
--
--        if jobs:
--            parts.append("-j")
--            parts.append(",".join(jobs))
--
--        if tags:
--            parts.append(' '.join('--tag %s' % t for t in tags))
--
--        if paths:
--            parts.append("--try-test-paths %s" % " ".join(sorted(paths)))
--
--        args_by_dest = {v['dest']: k for k, v in AutoTry.pass_through_arguments.items()}
--        for dest, value in extras.iteritems():
--            assert dest in args_by_dest
--            arg = args_by_dest[dest]
--            action = AutoTry.pass_through_arguments[arg]['action']
--            if action == 'store':
--                parts.append(arg)
--                parts.append(value)
--            if action == 'append':
--                for e in value:
--                    parts.append(arg)
--                    parts.append(e)
--            if action in ('store_true', 'store_false'):
--                parts.append(arg)
--
--        try_syntax = " ".join(parts)
--        return try_syntax
--
--    def find_paths_and_tags(self, verbose):
--        paths, tags = set(), set()
--        changed_files = self.vcs.files_changed
--        if changed_files:
--            if verbose:
--                print("Pushing tests based on modifications to the "
--                      "following files:\n\t%s" % "\n\t".join(changed_files))
--
--            from mozbuild.frontend.reader import (
--                BuildReader,
--                EmptyConfig,
--            )
--
--            config = EmptyConfig(self.topsrcdir)
--            reader = BuildReader(config)
--            files_info = reader.files_info(changed_files)
--
--            for path, info in files_info.items():
--                paths |= info.test_files
--                tags |= info.test_tags
--
--            if verbose:
--                if paths:
--                    print("Pushing tests based on the following patterns:\n\t%s" %
--                          "\n\t".join(paths))
--                if tags:
--                    print("Pushing tests based on the following tags:\n\t%s" %
--                          "\n\t".join(tags))
--        return paths, tags
--
--    def normalise_list(self, items, allow_subitems=False):
--        rv = defaultdict(list)
--        for item in items:
--            parsed = parse_arg(item)
--            for key, values in parsed.iteritems():
--                rv[key].extend(values)
--
--        if not allow_subitems:
--            if not all(item == [] for item in rv.itervalues()):
--                raise ValueError("Unexpected subitems in argument")
--            return rv.keys()
--        else:
--            return rv
--
--    def validate_args(self, **kwargs):
--        tests_selected = kwargs["tests"] or kwargs["paths"] or kwargs["tags"]
--        if kwargs["platforms"] is None and (kwargs["jobs"] is None or tests_selected):
--            if 'AUTOTRY_PLATFORM_HINT' in os.environ:
--                kwargs["platforms"] = [os.environ['AUTOTRY_PLATFORM_HINT']]
--            elif tests_selected:
--                print("Must specify platform when selecting tests.")
--                sys.exit(1)
--            else:
--                print("Either platforms or jobs must be specified as an argument to autotry.")
--                sys.exit(1)
--
--        try:
--            platforms = (self.normalise_list(kwargs["platforms"])
--                         if kwargs["platforms"] else {})
--        except ValueError as e:
--            print("Error parsing -p argument:\n%s" % e.message)
--            sys.exit(1)
--
--        try:
--            tests = (self.normalise_list(kwargs["tests"], allow_subitems=True)
--                     if kwargs["tests"] else {})
--        except ValueError as e:
--            print("Error parsing -u argument (%s):\n%s" % (kwargs["tests"], e.message))
--            sys.exit(1)
--
--        try:
--            talos = (self.normalise_list(kwargs["talos"], allow_subitems=True)
--                     if kwargs["talos"] else [])
--        except ValueError as e:
--            print("Error parsing -t argument:\n%s" % e.message)
--            sys.exit(1)
--
--        try:
--            jobs = (self.normalise_list(kwargs["jobs"]) if kwargs["jobs"] else {})
--        except ValueError as e:
--            print("Error parsing -j argument:\n%s" % e.message)
--            sys.exit(1)
--
--        paths = []
--        for p in kwargs["paths"]:
--            p = mozpath.normpath(os.path.abspath(p))
--            if not (os.path.isdir(p) and p.startswith(self.topsrcdir)):
--                print('Specified path "%s" is not a directory under the srcdir,'
--                      ' unable to specify tests outside of the srcdir' % p)
--                sys.exit(1)
--            if len(p) <= len(self.topsrcdir):
--                print('Specified path "%s" is at the top of the srcdir and would'
--                      ' select all tests.' % p)
--                sys.exit(1)
--            paths.append(os.path.relpath(p, self.topsrcdir))
--
--        try:
--            tags = self.normalise_list(kwargs["tags"]) if kwargs["tags"] else []
--        except ValueError as e:
--            print("Error parsing --tags argument:\n%s" % e.message)
--            sys.exit(1)
--
--        extra_values = {k['dest'] for k in AutoTry.pass_through_arguments.values()}
--        extra_args = {k: v for k, v in kwargs.items()
--                      if k in extra_values and v}
--
--        return kwargs["builds"], platforms, tests, talos, jobs, paths, tags, extra_args
--
--    def run(self, **kwargs):
--        if kwargs["list"]:
--            self.list_presets()
--            sys.exit()
--
--        if kwargs["load"] is not None:
--            defaults = self.load_config(kwargs["load"])
--
--            if defaults is None:
--                print("No saved configuration called %s found in autotry.ini" % kwargs["load"],
--                      file=sys.stderr)
--
--            for key, value in kwargs.iteritems():
--                if value in (None, []) and key in defaults:
--                    kwargs[key] = defaults[key]
--
--        if not any(kwargs[item] for item in ("paths", "tests", "tags")):
--            kwargs["paths"], kwargs["tags"] = self.find_paths_and_tags(kwargs["verbose"])
--
--        builds, platforms, tests, talos, jobs, paths, tags, extra = self.validate_args(**kwargs)
--
--        if paths or tags:
--            paths = [os.path.relpath(os.path.normpath(os.path.abspath(item)), self.topsrcdir)
--                     for item in paths]
--            paths_by_flavor = self.paths_by_flavor(paths=paths, tags=tags)
--
--            if not paths_by_flavor and not tests:
--                print("No tests were found when attempting to resolve paths:\n\n\t%s" %
--                      paths)
--                sys.exit(1)
--
--            if not kwargs["intersection"]:
--                paths_by_flavor = self.remove_duplicates(paths_by_flavor, tests)
--        else:
--            paths_by_flavor = {}
--
--        # No point in dealing with artifacts if we aren't running any builds
--        local_artifact_build = False
--        if platforms:
--            local_artifact_build = kwargs.get('local_artifact_build', False)
--
--            # Add --artifact if --enable-artifact-builds is set ...
--            if local_artifact_build:
--                extra["artifact"] = True
--            # ... unless --no-artifact is explicitly given.
--            if kwargs["no_artifact"]:
--                if "artifact" in extra:
--                    del extra["artifact"]
--
--        try:
--            msg = self.calc_try_syntax(platforms, tests, talos, jobs, builds,
--                                       paths_by_flavor, tags, extra, kwargs["intersection"])
--        except ValueError as e:
--            print(e.message)
--            sys.exit(1)
--
--        if local_artifact_build:
--            if kwargs["no_artifact"]:
--                print('mozconfig has --enable-artifact-builds but '
--                      '--no-artifact specified, not including --artifact '
--                      'flag in try syntax')
--            else:
--                print('mozconfig has --enable-artifact-builds; including '
--                      '--artifact flag in try syntax (use --no-artifact '
--                      'to override)')
--
--        if kwargs["verbose"] and paths_by_flavor:
--            print('The following tests will be selected: ')
--            for flavor, paths in paths_by_flavor.iteritems():
--                print("%s: %s" % (flavor, ",".join(paths)))
--
--        if kwargs["verbose"] or not kwargs["push"]:
--            print('The following try syntax was calculated:\n%s' % msg)
--
--        if kwargs["push"]:
--            self.vcs.push_to_try(msg)
--
--        if kwargs["save"] is not None:
--            self.save_config(kwargs["save"], msg)
-diff --git a/tools/tryselect/tasks.py b/tools/tryselect/tasks.py
-deleted file mode 100644
---- a/tools/tryselect/tasks.py
-+++ /dev/null
-@@ -1,54 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--from __future__ import absolute_import, print_function, unicode_literals
--
--import os
--
--from mozboot.util import get_state_dir
--from mozbuild.base import MozbuildObject
--from mozpack.files import FileFinder
--
--from taskgraph.generator import TaskGraphGenerator
--from taskgraph.parameters import load_parameters_file
--
--here = os.path.abspath(os.path.dirname(__file__))
--build = MozbuildObject.from_environment(cwd=here)
--
--
--def invalidate(cache):
--    if not os.path.isfile(cache):
--        return
--
--    tc_dir = os.path.join(build.topsrcdir, 'taskcluster')
--    tmod = max(os.path.getmtime(os.path.join(tc_dir, p)) for p, _ in FileFinder(tc_dir))
--    cmod = os.path.getmtime(cache)
--
--    if tmod > cmod:
--        os.remove(cache)
--
--
--def generate_target(params='project=mozilla-central'):
--    cache_dir = os.path.join(get_state_dir()[0], 'cache', 'taskgraph')
--    cache = os.path.join(cache_dir, 'target_task_set')
--
--    invalidate(cache)
--    if os.path.isfile(cache):
--        with open(cache, 'r') as fh:
--            return fh.read().splitlines()
--
--    if not os.path.isdir(cache_dir):
--        os.makedirs(cache_dir)
--
--    print("Task configuration changed, generating target tasks")
--    params = load_parameters_file(params)
--    params.check()
--
--    root = os.path.join(build.topsrcdir, 'taskcluster', 'ci')
--    tg = TaskGraphGenerator(root_dir=root, parameters=params).target_task_set
--    labels = [label for label in tg.graph.visit_postorder()]
--
--    with open(cache, 'w') as fh:
--        fh.write('\n'.join(labels))
--    return labels
-diff --git a/tools/tryselect/vcs.py b/tools/tryselect/vcs.py
-deleted file mode 100644
---- a/tools/tryselect/vcs.py
-+++ /dev/null
-@@ -1,179 +0,0 @@
--# This Source Code Form is subject to the terms of the Mozilla Public
--# License, v. 2.0. If a copy of the MPL was not distributed with this
--# file, You can obtain one at http://mozilla.org/MPL/2.0/.
--
--import json
--import os
--import subprocess
--import sys
--from abc import ABCMeta, abstractmethod, abstractproperty
--from distutils.spawn import find_executable
--
--GIT_CINNABAR_NOT_FOUND = """
--Could not detect `git-cinnabar`.
--
--The `mach try` command requires git-cinnabar to be installed when
--pushing from git. For more information and installation instruction,
--please see:
--
--    https://github.com/glandium/git-cinnabar
--""".lstrip()
--
--HG_PUSH_TO_TRY_NOT_FOUND = """
--Could not detect `push-to-try`.
--
--The `mach try` command requires the push-to-try extension enabled
--when pushing from hg. Please install it by running:
--
--    $ ./mach mercurial-setup
--""".lstrip()
--
--VCS_NOT_FOUND = """
--Could not detect version control. Only `hg` or `git` are supported.
--""".strip()
--
--UNCOMMITTED_CHANGES = """
--ERROR please commit changes before continuing
--""".strip()
--
--
--class VCSHelper(object):
--    """A abstract base VCS helper that detects hg or git"""
--    __metaclass__ = ABCMeta
--
--    def __init__(self, root):
--        self.root = root
--
--    @classmethod
--    def find_vcs(cls):
--        # First check if we're in an hg repo, if not try git
--        commands = (
--            ['hg', 'root'],
--            ['git', 'rev-parse', '--show-toplevel'],
--        )
--
--        for cmd in commands:
--            proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
--            output = proc.communicate()[0].strip()
--
--            if proc.returncode == 0:
--                return cmd[0], output
--        return None, ''
--
--    @classmethod
--    def create(cls):
--        vcs, root = cls.find_vcs()
--        if not vcs:
--            print(VCS_NOT_FOUND)
--            sys.exit(1)
--        return vcs_class[vcs](root)
--
--    def run(self, cmd):
--        try:
--            return subprocess.check_output(cmd, stderr=subprocess.STDOUT)
--        except subprocess.CalledProcessError as e:
--            print("Error running `{}`:".format(' '.join(cmd)))
--            print(e.output)
--            raise
--
--    def write_task_config(self, labels):
--        config = os.path.join(self.root, 'try_task_config.json')
--        with open(config, 'w') as fh:
--            json.dump(sorted(labels), fh, indent=2)
--        return config
--
--    def check_working_directory(self):
--        if self.has_uncommitted_changes:
--            print(UNCOMMITTED_CHANGES)
--            sys.exit(1)
--
--    @abstractmethod
--    def push_to_try(self, msg, labels=None):
--        pass
--
--    @abstractproperty
--    def files_changed(self):
--        pass
--
--    @abstractproperty
--    def has_uncommitted_changes(self):
--        pass
--
--
--class HgHelper(VCSHelper):
--
--    def push_to_try(self, msg, labels=None):
--        self.check_working_directory()
--
--        if labels:
--            config = self.write_task_config(labels)
--            self.run(['hg', 'add', config])
--
--        try:
--            return subprocess.check_call(['hg', 'push-to-try', '-m', msg])
--        except subprocess.CalledProcessError:
--            try:
--                self.run(['hg', 'showconfig', 'extensions.push-to-try'])
--            except subprocess.CalledProcessError:
--                print(HG_PUSH_TO_TRY_NOT_FOUND)
--            return 1
--        finally:
--            self.run(['hg', 'revert', '-a'])
--
--            if labels and os.path.isfile(config):
--                os.remove(config)
--
--    @property
--    def files_changed(self):
--        return self.run(['hg', 'log', '-r', '::. and not public()',
--                         '--template', '{join(files, "\n")}\n'])
--
--    @property
--    def has_uncommitted_changes(self):
--        stat = [s for s in self.run(['hg', 'status', '-amrn']).split() if s]
--        return len(stat) > 0
--
--
--class GitHelper(VCSHelper):
--
--    def push_to_try(self, msg, labels=None):
--        self.check_working_directory()
--
--        if not find_executable('git-cinnabar'):
--            print(GIT_CINNABAR_NOT_FOUND)
--            return 1
--
--        if labels:
--            config = self.write_task_config(labels)
--            self.run(['git', 'add', config])
--
--        subprocess.check_call(['git', 'commit', '--allow-empty', '-m', msg])
--        try:
--            return subprocess.call(['git', 'push', 'hg::ssh://hg.mozilla.org/try',
--                                    '+HEAD:refs/heads/branches/default/tip'])
--        finally:
--            self.run(['git', 'reset', 'HEAD~'])
--
--    @property
--    def files_changed(self):
--        # This finds the files changed on the current branch based on the
--        # diff of the current branch its merge-base base with other branches.
--        current_branch = self.run(['git', 'rev-parse', 'HEAD']).strip()
--        all_branches = self.run(['git', 'for-each-ref', 'refs/heads', 'refs/remotes',
--                                 '--format=%(objectname)']).splitlines()
--        other_branches = set(all_branches) - set([current_branch])
--        base_commit = self.run(['git', 'merge-base', 'HEAD'] + list(other_branches)).strip()
--        return self.run(['git', 'diff', '--name-only', '-z', 'HEAD',
--                         base_commit]).strip('\0').split('\0')
--
--    @property
--    def has_uncommitted_changes(self):
--        stat = [s for s in self.run(['git', 'diff', '--cached', '--name-only',
--                                     '--diff-filter=AMD']).split() if s]
--        return len(stat) > 0
--
--
--vcs_class = {
--    'git': GitHelper,
--    'hg': HgHelper,
--}

+ 0 - 18
comm-esr60/mozilla-esr60/patches/series

@@ -217,24 +217,6 @@ NOBUG-BACKOUT-1439860-60.patch
 TOP-9999999-rust133-257.patch
 TOP-9999999-fixlangpack-257.patch
 mozilla-esr60-top-nonexisting.patch
-1602262-safebrowsing-pref.patch
-1583041-xlib_logging_hack.patch
-1372458-63a1.patch
-1472672-63a1.patch
-1502090-65a1.patch
-1445671-61a1.patch
-1602261-retsulcksat.patch
-1491467-64a1.patch
-1469790-63a1.patch
-1509867-65a1.patch
-1516605-66a1.patch
-1542829-68a1.patch
-1513236-68a1.patch
-1362858-backout-60-TBonly.patch
-1562176-70a1.patch
-1566465-70a1.patch
-1387428-Backout-56.patch
-1578303_enable_loginmanagercontextmenu-71a1.patch
 1578075-70.patch
 1574573-70a1.patch
 1576463-71a1.patch

+ 49 - 0
rel-257/mozilla-esr60/patches/1233768-61a1.patch

@@ -0,0 +1,49 @@
+# HG changeset patch
+# User Evelyn Hung <jj.evelyn@gmail.com>
+# Date 1497258988 -28800
+# Node ID eaf99ba3813aa82632262ba4fc0438a7e2574af6
+# Parent  2cba736ec8d443921b3bb16ceb36dbd1b3efe8ac
+Bug 1362858 - Part 1: make word boundary check more consistent.r=Ehsan
+
+We use ClassifyCharacter for detecting all possibilities of word
+boudaries when building mRealWords but not when building soft text.
+This inconsistency leads us repeatedly checking the same set of words
+in some cases.
+
+diff --git a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
+--- a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
++++ b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
+@@ -580,21 +580,30 @@ static inline bool IsBRElement(nsINode* 
+  */
+ static bool TextNodeContainsDOMWordSeparator(nsINode* aNode,
+                                              int32_t aBeforeOffset,
+                                              int32_t* aSeparatorOffset) {
+   // aNode is actually an nsIContent, since it's eTEXT
+   nsIContent* content = static_cast<nsIContent*>(aNode);
+   const nsTextFragment* textFragment = content->GetText();
+   NS_ASSERTION(textFragment, "Where is our text?");
+-  for (int32_t i = std::min(aBeforeOffset, int32_t(textFragment->GetLength())) - 1; i >= 0; --i) {
+-    if (IsDOMWordSeparator(textFragment->CharAt(i))) {
++  nsString text;
++  int32_t end = std::min(aBeforeOffset, int32_t(textFragment->GetLength()));
++  bool ok = textFragment->AppendTo(text, 0, end, mozilla::fallible);
++  if(!ok)
++    return false;
++
++  WordSplitState state(nullptr, text, 0, end);
++  for (int32_t i = end - 1; i >= 0; --i) {
++    if (IsDOMWordSeparator(textFragment->CharAt(i)) ||
++        state.ClassifyCharacter(i, true) == CHAR_CLASS_SEPARATOR) {
+       // Be greedy, find as many separators as we can
+       for (int32_t j = i - 1; j >= 0; --j) {
+-        if (IsDOMWordSeparator(textFragment->CharAt(j))) {
++        if (IsDOMWordSeparator(textFragment->CharAt(j)) ||
++            state.ClassifyCharacter(j, true) == CHAR_CLASS_SEPARATOR) {
+           i = j;
+         } else {
+           break;
+         }
+       }
+       *aSeparatorOffset = i;
+       return true;
+     }

+ 70 - 68
comm-esr60/mozilla-esr60/patches/1372458-63a1.patch → rel-257/mozilla-esr60/patches/1372458-63a1.patch

@@ -2,7 +2,7 @@
 # User Matt Woodrow <mwoodrow@mozilla.com>
 # Date 1531538749 -43200
 # Node ID d48e40cba0b40df512ba0bf0a35f5f0fea9d0b9c
-# Parent  163263864ca057632504cdcd0b32505019b83594
+# Parent  32a9e6442c87cd05bd3dffe0343a64f2eb230846
 Bug 1372458 - Fold opacity into filter drawing rather than using a temporary surface. r=bas,mstange
 
 MozReview-Commit-ID: GOBTUhN7fcC
@@ -10,19 +10,20 @@ MozReview-Commit-ID: GOBTUhN7fcC
 diff --git a/gfx/2d/DrawTargetD2D1.cpp b/gfx/2d/DrawTargetD2D1.cpp
 --- a/gfx/2d/DrawTargetD2D1.cpp
 +++ b/gfx/2d/DrawTargetD2D1.cpp
-@@ -245,17 +245,34 @@ DrawTargetD2D1::DrawFilter(FilterNode *a
+@@ -230,18 +230,34 @@ void DrawTargetD2D1::DrawFilter(FilterNo
  
    PrepareForDrawing(aOptions.mCompositionOp, ColorPattern(Color()));
  
    mDC->SetAntialiasMode(D2DAAMode(aOptions.mAntialiasMode));
  
-   FilterNodeD2D1* node = static_cast<FilterNodeD2D1*>(aNode);
+   FilterNodeD2D1 *node = static_cast<FilterNodeD2D1 *>(aNode);
    node->WillDraw(this);
  
--  mDC->DrawImage(node->OutputEffect(), D2DPoint(aDestPoint), D2DRect(aSourceRect));
+-  mDC->DrawImage(node->OutputEffect(), D2DPoint(aDestPoint),
+-                 D2DRect(aSourceRect));
 +  if (aOptions.mAlpha == 1.0f) {
-+    mDC->DrawImage(node->OutputEffect(), D2DPoint(aDestPoint), D2DRect(aSourceRect));
-+  } else {
++    mDC->DrawImage(node->OutputEffect(), D2DPoint(aDestPoint),
++                   D2DRect(aSourceRect));
 +    RefPtr<ID2D1Image> image;
 +    node->OutputEffect()->GetOutput(getter_AddRefs(image));
 +
@@ -42,64 +43,63 @@ diff --git a/gfx/2d/DrawTargetD2D1.cpp b/gfx/2d/DrawTargetD2D1.cpp
    FinalizeDrawing(aOptions.mCompositionOp, ColorPattern(Color()));
  }
  
- void
- DrawTargetD2D1::DrawSurfaceWithShadow(SourceSurface *aSurface,
-                                       const Point &aDest,
-                                       const Color &aColor,
+ void DrawTargetD2D1::DrawSurfaceWithShadow(SourceSurface *aSurface,
+                                            const Point &aDest,
+                                            const Color &aColor,
+                                            const Point &aOffset, Float aSigma,
 diff --git a/layout/svg/nsFilterInstance.cpp b/layout/svg/nsFilterInstance.cpp
 --- a/layout/svg/nsFilterInstance.cpp
 +++ b/layout/svg/nsFilterInstance.cpp
-@@ -59,17 +59,18 @@ UserSpaceMetricsForFrame(nsIFrame* aFram
+@@ -53,17 +53,17 @@ static UniquePtr<UserSpaceMetrics> UserS
+     return MakeUnique<SVGElementMetrics>(element);
+   }
    return MakeUnique<NonSVGFrameUserSpaceMetrics>(aFrame);
  }
  
- void
- nsFilterInstance::PaintFilteredFrame(nsIFrame *aFilteredFrame,
-                                      gfxContext* aCtx,
-                                      nsSVGFilterPaintCallback *aPaintCallback,
-                                      const nsRegion *aDirtyArea,
--                                     imgDrawingParams& aImgParams)
-+                                     imgDrawingParams& aImgParams,
-+                                     float aOpacity)
- {
+ void nsFilterInstance::PaintFilteredFrame(
+     nsIFrame* aFilteredFrame, gfxContext* aCtx,
+     nsSVGFilterPaintCallback* aPaintCallback, const nsRegion* aDirtyArea,
+-    imgDrawingParams& aImgParams) {
++    imgDrawingParams& aImgParams, float aOpacity) {
    auto& filterChain = aFilteredFrame->StyleEffects()->mFilters;
-   UniquePtr<UserSpaceMetrics> metrics = UserSpaceMetricsForFrame(aFilteredFrame);
+   UniquePtr<UserSpaceMetrics> metrics =
+       UserSpaceMetricsForFrame(aFilteredFrame);
  
    gfxContextMatrixAutoSaveRestore autoSR(aCtx);
-   gfxSize scaleFactors = aCtx->CurrentMatrix().ScaleFactors(true);
-   gfxMatrix scaleMatrix(scaleFactors.width, 0.0f,
-                         0.0f, scaleFactors.height,
-@@ -87,17 +88,17 @@ nsFilterInstance::PaintFilteredFrame(nsI
+   gfxSize scaleFactors = aCtx->CurrentMatrixDouble().ScaleFactors(true);
+   if (scaleFactors.IsEmpty()) {
+     return;
+@@ -84,17 +84,17 @@ void nsFilterInstance::PaintFilteredFram
  
    // Hardcode InputIsTainted to true because we don't want JS to be able to
    // read the rendered contents of aFilteredFrame.
    nsFilterInstance instance(aFilteredFrame, aFilteredFrame->GetContent(),
                              *metrics, filterChain, /* InputIsTainted */ true,
-                             aPaintCallback, scaleMatrixInDevUnits,
-                             aDirtyArea, nullptr, nullptr, nullptr);
+                             aPaintCallback, scaleMatrixInDevUnits, aDirtyArea,
+                             nullptr, nullptr, nullptr);
    if (instance.IsInitialized()) {
 -    instance.Render(aCtx, aImgParams);
 +    instance.Render(aCtx, aImgParams, aOpacity);
    }
  }
  
- nsRegion
- nsFilterInstance::GetPostFilterDirtyArea(nsIFrame *aFilteredFrame,
-                                          const nsRegion& aPreFilterDirtyRegion)
- {
+ nsRegion nsFilterInstance::GetPostFilterDirtyArea(
+     nsIFrame* aFilteredFrame, const nsRegion& aPreFilterDirtyRegion) {
    if (aPreFilterDirtyRegion.IsEmpty()) {
-@@ -484,17 +485,17 @@ nsFilterInstance::BuildSourceImage(imgDr
+     return nsRegion();
+   }
+@@ -456,17 +456,18 @@ void nsFilterInstance::BuildSourceImage(
  
-   mPaintCallback->Paint(*ctx, mTargetFrame, mPaintTransform, &dirty, aImgParams);
+   mPaintCallback->Paint(*ctx, mTargetFrame, mPaintTransform, &dirty,
+                         aImgParams);
  
    mSourceGraphic.mSourceSurface = offscreenDT->Snapshot();
    mSourceGraphic.mSurfaceRect = neededRect;
  }
  
- void
--nsFilterInstance::Render(gfxContext* aCtx, imgDrawingParams& aImgParams)
-+nsFilterInstance::Render(gfxContext* aCtx, imgDrawingParams& aImgParams, float aOpacity)
- {
+-void nsFilterInstance::Render(gfxContext* aCtx, imgDrawingParams& aImgParams) {
++void nsFilterInstance::Render(gfxContext* aCtx, imgDrawingParams& aImgParams,
++                              float aOpacity) {
    MOZ_ASSERT(mTargetFrame, "Need a frame for rendering");
  
    if (mPrimitiveDescriptions.IsEmpty()) {
@@ -107,36 +107,37 @@ diff --git a/layout/svg/nsFilterInstance.cpp b/layout/svg/nsFilterInstance.cpp
      return;
    }
  
-@@ -512,17 +513,17 @@ nsFilterInstance::Render(gfxContext* aCt
-   BuildSourceImage(aImgParams);
+   nsIntRect filterRect =
+@@ -484,17 +485,17 @@ void nsFilterInstance::Render(gfxContext
+   BuildSourceImage(aCtx->GetDrawTarget(), aImgParams);
    BuildSourcePaints(aImgParams);
  
    FilterSupport::RenderFilterDescription(
-     aCtx->GetDrawTarget(), mFilterDescription, IntRectToRect(filterRect),
-     mSourceGraphic.mSourceSurface, mSourceGraphic.mSurfaceRect,
-     mFillPaint.mSourceSurface, mFillPaint.mSurfaceRect,
-     mStrokePaint.mSourceSurface, mStrokePaint.mSurfaceRect,
--    mInputImages, Point(0, 0));
-+    mInputImages, Point(0, 0), DrawOptions(aOpacity));
+       aCtx->GetDrawTarget(), mFilterDescription, IntRectToRect(filterRect),
+       mSourceGraphic.mSourceSurface, mSourceGraphic.mSurfaceRect,
+       mFillPaint.mSourceSurface, mFillPaint.mSurfaceRect,
+       mStrokePaint.mSourceSurface, mStrokePaint.mSurfaceRect, mInputImages,
+-      Point(0, 0));
++      Point(0, 0), DrawOptions(aOpacity));
  }
  
- nsRegion
- nsFilterInstance::ComputePostFilterDirtyRegion()
- {
+ nsRegion nsFilterInstance::ComputePostFilterDirtyRegion() {
    if (mPreFilterDirtyRegion.IsEmpty() || mPrimitiveDescriptions.IsEmpty()) {
      return nsRegion();
    }
+ 
+   nsIntRegion resultChangeRegion = FilterSupport::ComputeResultChangeRegion(
 diff --git a/layout/svg/nsFilterInstance.h b/layout/svg/nsFilterInstance.h
 --- a/layout/svg/nsFilterInstance.h
 +++ b/layout/svg/nsFilterInstance.h
-@@ -82,17 +82,18 @@ public:
+@@ -81,17 +81,18 @@ class nsFilterInstance {
+    * Paint the given filtered frame.
     * @param aDirtyArea The area than needs to be painted, in aFilteredFrame's
     *   frame space (i.e. relative to its origin, the top-left corner of its
     *   border box).
     */
-   static void PaintFilteredFrame(nsIFrame *aFilteredFrame,
-                                  gfxContext* aCtx,
-                                  nsSVGFilterPaintCallback *aPaintCallback,
+   static void PaintFilteredFrame(nsIFrame* aFilteredFrame, gfxContext* aCtx,
+                                  nsSVGFilterPaintCallback* aPaintCallback,
                                   const nsRegion* aDirtyArea,
 -                                 imgDrawingParams& aImgParams);
 +                                 imgDrawingParams& aImgParams,
@@ -148,8 +149,8 @@ diff --git a/layout/svg/nsFilterInstance.h b/layout/svg/nsFilterInstance.h
     * @param aPreFilterDirtyRegion The pre-filter area of aFilteredFrame that has
     *   changed, relative to aFilteredFrame, in app units.
     */
-   static nsRegion GetPostFilterDirtyArea(nsIFrame *aFilteredFrame,
-@@ -162,17 +163,17 @@ private:
+   static nsRegion GetPostFilterDirtyArea(nsIFrame* aFilteredFrame,
+@@ -160,17 +161,18 @@ class nsFilterInstance {
    bool IsInitialized() const { return mInitialized; }
  
    /**
@@ -159,10 +160,11 @@ diff --git a/layout/svg/nsFilterInstance.h b/layout/svg/nsFilterInstance.h
     * nsFilterInstance constructor.
     */
 -  void Render(gfxContext* aCtx, imgDrawingParams& aImgParams);
-+  void Render(gfxContext* aCtx, imgDrawingParams& aImgParams, float aOpacity = 1.0f);
++  void Render(gfxContext* aCtx, imgDrawingParams& aImgParams,
++              float aOpacity = 1.0f);
  
-   const FilterDescription& ExtractDescriptionAndAdditionalImages(nsTArray<RefPtr<SourceSurface>>& aOutAdditionalImages)
-   {
+   const FilterDescription& ExtractDescriptionAndAdditionalImages(
+       nsTArray<RefPtr<SourceSurface>>& aOutAdditionalImages) {
      mInputImages.SwapElements(aOutAdditionalImages);
      return mFilterDescription;
    }
@@ -171,7 +173,7 @@ diff --git a/layout/svg/nsFilterInstance.h b/layout/svg/nsFilterInstance.h
 diff --git a/layout/svg/nsSVGIntegrationUtils.cpp b/layout/svg/nsSVGIntegrationUtils.cpp
 --- a/layout/svg/nsSVGIntegrationUtils.cpp
 +++ b/layout/svg/nsSVGIntegrationUtils.cpp
-@@ -1081,32 +1081,23 @@ nsSVGIntegrationUtils::PaintFilter(const
+@@ -1024,32 +1024,23 @@ void nsSVGIntegrationUtils::PaintFilter(
      return;
    }
  
@@ -181,8 +183,8 @@ diff --git a/layout/svg/nsSVGIntegrationUtils.cpp b/layout/svg/nsSVGIntegrationU
    EffectOffsets offsets = MoveContextOriginToUserSpace(firstFrame, aParams);
  
 -  if (opacity != 1.0f) {
--    context.PushGroupForBlendBack(gfxContentType::COLOR_ALPHA, opacity,
--                                  nullptr, Matrix());
+-    context.PushGroupForBlendBack(gfxContentType::COLOR_ALPHA, opacity, nullptr,
+-                                  Matrix());
 -  }
 -
    /* Paint the child and apply filters */
@@ -190,18 +192,18 @@ diff --git a/layout/svg/nsSVGIntegrationUtils.cpp b/layout/svg/nsSVGIntegrationU
                                       offsets.offsetToUserSpaceInDevPx);
    nsRegion dirtyRegion = aParams.dirtyRect - offsets.offsetToBoundingBox;
  
-   nsFilterInstance::PaintFilteredFrame(frame, &context, &callback,
--                                       &dirtyRegion, aParams.imgParams);
+   nsFilterInstance::PaintFilteredFrame(frame, &context, &callback, &dirtyRegion,
+-                                       aParams.imgParams);
 -
 -  if (opacity != 1.0f) {
 -    context.PopGroupAndBlend();
 -  }
-+                                       &dirtyRegion, aParams.imgParams, opacity);
++                                       aParams.imgParams, opacity);
  }
  
  class PaintFrameCallback : public gfxDrawingCallback {
- public:
-   PaintFrameCallback(nsIFrame* aFrame,
-                      const nsSize aPaintServerSize,
-                      const IntSize aRenderSize,
-                      uint32_t aFlags)
+  public:
+   PaintFrameCallback(nsIFrame* aFrame, const nsSize aPaintServerSize,
+                      const IntSize aRenderSize, uint32_t aFlags)
+       : mFrame(aFrame),
+         mPaintServerSize(aPaintServerSize),

+ 179 - 0
rel-257/mozilla-esr60/patches/1418629-68a1.patch

@@ -0,0 +1,179 @@
+# HG changeset patch
+# User Makoto Kato <m_kato@ga2.so-net.ne.jp>
+# Date 1557308165 0
+# Node ID 5822c9d23ff717f637b5cd9c2c24a8e2d223fcb8
+# Parent  c68cb2a8cdd17d72bd37a0e1499f26f78ca00700
+Bug 1418629 - Single quotation mark shouldn't always separator. r=Ehsan
+
+This seems to be regression by bug 1362858.
+
+Actually, single quotation mark is always separator for spellchecker after
+landing bug 1462858. When user tries to input "doesn't",  "'" becomes separator
+for spellchecker. Then "doesn" will be misspell word.
+
+So we shouldn't mark single quotation mark as separator if user is inputting
+word.
+
+Differential Revision: https://phabricator.services.mozilla.com/D29153
+
+diff --git a/editor/spellchecker/tests/mochitest.ini b/editor/spellchecker/tests/mochitest.ini
+--- a/editor/spellchecker/tests/mochitest.ini
++++ b/editor/spellchecker/tests/mochitest.ini
+@@ -7,20 +7,22 @@ support-files =
+   bug1204147_subframe.html
+   bug1204147_subframe2.html
+   en-GB/en_GB.dic
+   en-GB/en_GB.aff
+   en-AU/en_AU.dic
+   en-AU/en_AU.aff
+   de-DE/de_DE.dic
+   de-DE/de_DE.aff
++  !/editor/libeditor/tests/spellcheck.js
+ 
+ [test_async_UpdateCurrentDictionary.html]
+ [test_bug678842.html]
+ [test_bug697981.html]
+ [test_bug717433.html]
+ [test_bug1200533.html]
+ [test_bug1204147.html]
+ [test_bug1205983.html]
+ [test_bug1209414.html]
+ [test_bug1219928.html]
+ skip-if = e10s
+ [test_bug1365383.html]
++[test_bug1418629.html]
+diff --git a/editor/spellchecker/tests/test_bug1418629.html b/editor/spellchecker/tests/test_bug1418629.html
+new file mode 100644
+--- /dev/null
++++ b/editor/spellchecker/tests/test_bug1418629.html
+@@ -0,0 +1,96 @@
++<!DOCTYPE html>
++<html>
++<head>
++  <title>Mozilla bug 1418629</title>
++  <link rel=stylesheet href="/tests/SimpleTest/test.css">
++  <script src="/tests/SimpleTest/EventUtils.js"></script>
++  <script src="/tests/SimpleTest/SimpleTest.js"></script>
++  <script src="/tests/SimpleTest/AddTask.js"></script>
++  <script src="/tests/editor/libeditor/tests/spellcheck.js"></script>
++</head>
++<body>
++<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1418629">Mozilla Bug 1418629</a>
++<p id="display"></p>
++<div id="content" style="display: none;">
++
++</div>
++
++<input id="input1" autofocus spellcheck="true">
++
++<script>
++const {onSpellCheck} = SpecialPowers.Cu.import("resource://testing-common/AsyncSpellCheckTestHelper.jsm", {});
++
++SimpleTest.waitForExplicitFinish();
++
++add_task(async function() {
++  await new Promise((resolve) => {
++    SimpleTest.waitForFocus(() => {
++      SimpleTest.executeSoon(resolve);
++    }, window);
++  });
++
++  let misspeltWords = [];
++  let input = document.getElementById("input1");
++
++  input.focus();
++  input.value = "";
++  synthesizeKey("d");
++  synthesizeKey("o");
++  synthesizeKey("e");
++  synthesizeKey("s");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  // isSpellingCheckOk is defined in spellcheck.js
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
++     "no misspelt words");
++
++  synthesizeKey("n");
++  synthesizeKey("\'");
++  is(input.value, "doesn\'", "");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  // isSpellingCheckOk is defined in spellcheck.js
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
++     "don't run spellchecker during inputting word");
++
++  synthesizeKey(" ");
++  is(input.value, "doesn\' ", "");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  misspeltWords.push("doesn\'");
++  // isSpellingCheckOk is defined in spellcheck.js
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
++     "should run spellchecker");
++});
++
++async function test_with_twice_characters(ch) {
++  let misspeltWords = [];
++  let input = document.getElementById("input1");
++
++  input.focus();
++  input.value = "";
++  synthesizeKey("d");
++  synthesizeKey("o");
++  synthesizeKey("e");
++  synthesizeKey("s");
++  synthesizeKey("n");
++  synthesizeKey(ch);
++  synthesizeKey(ch);
++  is(input.value, "doesn" + ch + ch, "");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  misspeltWords.push("doesn");
++  // isSpellingCheckOk is defined in spellcheck.js
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
++     "should run spellchecker");
++}
++
++add_task(test_with_twice_characters.bind(null, "\'"));
++add_task(test_with_twice_characters.bind(null, String.fromCharCode(0x2019)));
++</script>
++</body>
++</html>
+diff --git a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
+--- a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
++++ b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
+@@ -385,18 +385,29 @@ CharClass WordSplitState::ClassifyCharac
+     if (aIndex == 0) return CHAR_CLASS_SEPARATOR;
+     if (ClassifyCharacter(aIndex - 1, false) != CHAR_CLASS_WORD)
+       return CHAR_CLASS_SEPARATOR;
+     // If the previous charatcer is a word-char, make sure that it's not a
+     // special dot character.
+     if (mDOMWordText[aIndex - 1] == '.') return CHAR_CLASS_SEPARATOR;
+ 
+     // now we know left char is a word-char, check the right-hand character
+-    if (aIndex == int32_t(mDOMWordText.Length()) - 1)
++    if (aIndex == int32_t(mDOMWordText.Length() - 1)) {
++      if (mDOMWordText[aIndex] == '\'' || mDOMWordText[aIndex] == 0x2019) {
++        nsUGenCategory prevCategory =
++            mozilla::unicode::GetGenCategory(mDOMWordText[aIndex - 1]);
++        if (prevCategory == nsUGenCategory::kLetter ||
++            prevCategory == nsUGenCategory::kNumber) {
++          // If single quotation mark is last, we don't return separator yet.
++          return CHAR_CLASS_WORD;
++        }
++      }
+       return CHAR_CLASS_SEPARATOR;
++    }
++
+     if (ClassifyCharacter(aIndex + 1, false) != CHAR_CLASS_WORD)
+       return CHAR_CLASS_SEPARATOR;
+     // If the next charatcer is a word-char, make sure that it's not a
+     // special dot character.
+     if (mDOMWordText[aIndex + 1] == '.') return CHAR_CLASS_SEPARATOR;
+ 
+     // char on either side is a word, this counts as a word
+     return CHAR_CLASS_WORD;

+ 16 - 25
rel-257/mozilla-esr60/patches/1437128-61a1.patch → rel-257/mozilla-esr60/patches/1437128-61a1.patch

@@ -2,7 +2,7 @@
 # User Dão Gottwald <dao@mozilla.com>
 # Date 1524567548 -7200
 # Node ID 7b8e832071f1c1b64d05dd597d692f4d4eb0866e
-# Parent  a49cc82c6f2920b693f0c47250e8e9292ce8b217
+# Parent  b5a845e976b6cf54fbfc88abdfb45d8da3cf628e
 Bug 1445671 - Stop using -moz-font-smoothing-background-color for the selected tab when using a lightweight theme. r=mstange
 
 MozReview-Commit-ID: 32ZrF86Xeon
@@ -10,14 +10,14 @@ MozReview-Commit-ID: 32ZrF86Xeon
 diff --git a/browser/themes/osx/browser.css b/browser/themes/osx/browser.css
 --- a/browser/themes/osx/browser.css
 +++ b/browser/themes/osx/browser.css
-@@ -1586,17 +1586,17 @@ toolbarbutton.chevron > .toolbarbutton-m
- %endif
- 
- %ifndef MOZ_PHOTON_THEME
- #navigator-toolbox[inFullscreen] > #TabsToolbar {
-   padding-top: var(--space-above-tabbar);
+@@ -754,17 +754,17 @@ html|input.urlbar-input {
+ :root:-moz-any([inFullscreen], [tabsintitlebar]) #TabsToolbar:not(:-moz-lwtheme) {
+   -moz-appearance: -moz-mac-vibrant-titlebar-dark;
+   -moz-font-smoothing-background-color: -moz-mac-vibrant-titlebar-dark;
+   background-color: #232323;
+   color: hsl(240, 9%, 98%);
+   text-shadow: none;
  }
- %endif
  
 -.tabbrowser-tab[visuallyselected=true] {
 +.tabbrowser-tab[visuallyselected=true]:not(:-moz-lwtheme) {
@@ -28,29 +28,20 @@ diff --git a/browser/themes/osx/browser.css b/browser/themes/osx/browser.css
    -moz-box-align: stretch;
  }
  
- .tabs-newtab-button > .toolbarbutton-icon {
+ /**
 diff --git a/browser/themes/osx/compacttheme.css b/browser/themes/osx/compacttheme.css
 --- a/browser/themes/osx/compacttheme.css
 +++ b/browser/themes/osx/compacttheme.css
-@@ -30,16 +30,21 @@
+@@ -8,8 +8,12 @@
+ #main-window[tabsintitlebar] #titlebar-content {
+   background: var(--chrome-background-color);
+ }
  
- %ifndef MOZ_PHOTON_THEME
- /* Resize things so that the native titlebar is in line with the tabs */
- #main-window[tabsintitlebar] > #titlebar > #titlebar-content > #titlebar-buttonbox-container,
- #main-window[tabsintitlebar] > #titlebar > #titlebar-content > #titlebar-secondary-buttonbox > #titlebar-fullscreen-button {
-   margin-top: 6px;
+ #TabsToolbar:-moz-lwtheme-darktext {
+   -moz-appearance: -moz-mac-vibrant-titlebar-light;
+   -moz-font-smoothing-background-color: -moz-mac-vibrant-titlebar-light;
  }
- %endif
 +
 +.tabbrowser-tab[visuallyselected=true] {
 +  -moz-font-smoothing-background-color: var(--toolbar-bgcolor);
 +}
-+
- /* Prevent the hover styling from on the identity icon from overlapping the
-    urlbar border. */
- #identity-box {
-   margin-top: -1px !important;
-   margin-bottom: -1px !important;
-   padding-top: 3px !important;
-   padding-bottom: 3px !important;
- }

+ 4 - 3
rel-257/mozilla-esr60/patches/1445969-61a1.patch → rel-257/mozilla-esr60/patches/1445969-61a1.patch

@@ -2,7 +2,7 @@
 # User Mike Hommey <mh+mozilla@glandium.org>
 # Date 1530054447 -32400
 # Node ID 9d09683ea3d8997c9f4405102f7a97e3236e58a4
-# Parent  37b765db84edbca72889b5e8fce38a3f256f0ba7
+# Parent  e0497e118632e0e347a73cecf3d2c07a9b003d62
 Bug 1469790 - Build for NEON by default when targetting Android arm. r=nalexander
 
 The media/libpng/moz.build file overrides the C standard used via
@@ -23,7 +23,7 @@ diff --git a/build/autoconf/arch.m4 b/build/autoconf/arch.m4
      arm-Android)
          MOZ_THUMB=yes
          MOZ_ARCH=armv7-a
--        MOZ_FPU=vfp
+-        MOZ_FPU=vfpv3-d16
 +        MOZ_FPU=neon
          MOZ_FLOAT_ABI=softfp
          MOZ_ALIGN=no
@@ -46,6 +46,7 @@ diff --git a/media/libpng/moz.build b/media/libpng/moz.build
  
  if CONFIG['CC_TYPE'] in ('clang', 'gcc'):
 -    if CONFIG['OS_ARCH'] == 'WINNT':
-         CFLAGS += ['-std=gnu89']
+-        CFLAGS += ['-std=gnu89']
 -    else:
 -        CFLAGS += ['-std=c89']
++    CFLAGS += ['-std=gnu89']

+ 7 - 7
comm-esr60/mozilla-esr60/patches/1472672-63a1.patch → rel-257/mozilla-esr60/patches/1472672-63a1.patch

@@ -2,7 +2,7 @@
 # User Robert Bartlensky <rbartlensky@mozilla.com>
 # Date 1530709772 -3600
 # Node ID c463161da7b4a687f3e96d862246c0322dcd1c1f
-# Parent  10b9f1a22b36a22fae408ff72b457b0b48c47bc2
+# Parent  df9218c6d2cbb3ff688aa5819e83d0c52ff8af71
 Bug 1472672 - Add null check for textAcc. r=davidb
 
 MozReview-Commit-ID: AL7R1fdlcvN
@@ -10,17 +10,17 @@ MozReview-Commit-ID: AL7R1fdlcvN
 diff --git a/accessible/atk/nsMaiInterfaceText.cpp b/accessible/atk/nsMaiInterfaceText.cpp
 --- a/accessible/atk/nsMaiInterfaceText.cpp
 +++ b/accessible/atk/nsMaiInterfaceText.cpp
-@@ -417,18 +417,18 @@ getRangeExtentsCB(AtkText *aText, gint a
+@@ -388,18 +388,18 @@ static void getRangeExtentsCB(AtkText* a
+   aRect->width = rect.width;
+   aRect->height = rect.height;
  }
  
- static gint
- getCharacterCountCB(AtkText *aText)
- {
+ static gint getCharacterCountCB(AtkText* aText) {
    AccessibleWrap* accWrap = GetAccessibleWrap(ATK_OBJECT(aText));
    if (accWrap) {
      HyperTextAccessible* textAcc = accWrap->AsHyperText();
--    return
--      textAcc->IsDefunct() ? 0 : static_cast<gint>(textAcc->CharacterCount());
+-    return textAcc->IsDefunct() ? 0
+-                                : static_cast<gint>(textAcc->CharacterCount());
 +    return !textAcc || textAcc->IsDefunct() ?
 +        0 : static_cast<gint>(textAcc->CharacterCount());
    }

+ 45 - 0
rel-257/mozilla-esr60/patches/1473833-63a1.patch

@@ -0,0 +1,45 @@
+# HG changeset patch
+# User Ryan Hunt <rhunt@eqrion.net>
+# Date 1533145832 18000
+# Node ID f07a1e9cbab30755c76e3c1334b7d78aee1fb4e3
+# Parent  15579bf8eab673b19266166a3c9479ff8dbe1fd3
+Bug 1478815 part 7 - Add a buffer unrotate operation to DrawTarget. r=bas
+
+This commit adds a buffer unrotate operation to DrawTarget. It's
+initially implemented with LockBits in DrawTarget. DrawTargetDual
+overrides the implementation to pass on the operation to it's
+DrawTargets.
+
+No override is given for DrawTargetCapture as we intentionally
+avoid this code path when async painting as it can fail.
+
+This is needed so that RotatedBuffer can expose a single DrawTarget,
+which can be a DrawTarget (for normal alpha), DrawTargetDual (for
+component alpha), or DrawTargetCapture (when async painting).
+
+MozReview-Commit-ID: csjjZ733hl
+
+diff --git a/gfx/layers/LayerScope.cpp b/gfx/layers/LayerScope.cpp
+--- a/gfx/layers/LayerScope.cpp
++++ b/gfx/layers/LayerScope.cpp
+@@ -46,16 +46,20 @@
+ #include "nsIEventTarget.h"
+ #include "nsProxyRelease.h"
+ #include <list>
+ 
+ // Undo the damage done by mozzconf.h
+ #undef compress
+ #include "mozilla/Compression.h"
+ 
++// Undo the damage done by X11
++#ifdef Status
++#  undef Status
++#endif
+ // Protocol buffer (generated automatically)
+ #include "protobuf/LayerScopePacket.pb.h"
+ 
+ namespace mozilla {
+ namespace layers {
+ 
+ using namespace mozilla::Compression;
+ using namespace mozilla::gfx;

+ 2 - 3
rel-257/mozilla-esr60/patches/1488401-64a1.patch → rel-257/mozilla-esr60/patches/1488401-64a1.patch

@@ -2,7 +2,7 @@
 # User Ryan VanderMeulen <ryanvm@gmail.com>
 # Date 1537295591 0
 # Node ID 4e5643db5a23372cc1095c5ef60eca76b958c493
-# Parent  1d61cb1fb3d24fc0032936190e75d0a44b8d05c0
+# Parent  85b4ccdc6ef24d4e0c1041af69112aea953845f0
 Bug 1491467 - Update libpng to 1.6.35. r=aosmond
 
 Differential Revision: https://phabricator.services.mozilla.com/D5913
@@ -9178,11 +9178,10 @@ diff --git a/old-configure.in b/old-configure.in
 -MOZPNG=10634
 +MOZPNG=10635
  NSPR_VERSION=4
- NSPR_MINVER=4.19
+ NSPR_MINVER=4.23
  NSS_VERSION=3
  
  dnl Set the minimum version of toolkit libs used by mozilla
  dnl ========================================================
  GLIB_VERSION=2.22
  # 2_26 is the earliest version we can set GLIB_VERSION_MIN_REQUIRED.
-

+ 1 - 2
rel-257/mozilla-esr60/patches/1500637-65a1.patch → rel-257/mozilla-esr60/patches/1500637-65a1.patch

@@ -2,7 +2,7 @@
 # User byron jones <glob@mozilla.com>
 # Date 1543242758 0
 # Node ID 689677e786f3ed6c9c54094bc9e55ee482ba797e
-# Parent  209fcb29ed4fdceddaa9060ecc41f38dbc055ce4
+# Parent  a01dfd031addc694ece01c4f46a2bf9083881436
 Bug 1509867 - add moz.yaml to libpng, r=aosmond
 
 Differential Revision: https://phabricator.services.mozilla.com/D12901
@@ -41,4 +41,3 @@ new file mode 100644
 +  license: "libpng"
 +
 +  release: "1.6.35"
-

+ 23 - 2
rel-257/mozilla-esr60/patches/1510276-67a1.patch → rel-257/mozilla-esr60/patches/1510276-67a1.patch

@@ -2,7 +2,7 @@
 # User Ryan VanderMeulen <ryanvm@gmail.com>
 # Date 1556057075 0
 # Node ID 179c0370dfa12b59d2214509d41a8356962f5df7
-# Parent  30ed3b129f01e715bb12f91a7c11d6794d89601c
+# Parent  98c4e99bc09f8827b127015649f2143fa5efa4c5
 Bug 1513236 - Update libpng to version 1.6.37. r=aosmond
 
 Differential Revision: https://phabricator.services.mozilla.com/D15239
@@ -5534,4 +5534,25 @@ diff --git a/media/libpng/powerpc/powerpc_init.c b/media/libpng/powerpc/powerpc_
  #include <stdio.h>
  #include "../pngpriv.h"
  
-
+diff --git a/old-configure.in b/old-configure.in
+--- a/old-configure.in
++++ b/old-configure.in
+@@ -41,17 +41,17 @@ dnl ====================================
+ _SUBDIR_HOST_CFLAGS="$HOST_CFLAGS"
+ _SUBDIR_HOST_CXXFLAGS="$HOST_CXXFLAGS"
+ _SUBDIR_HOST_LDFLAGS="$HOST_LDFLAGS"
+ _SUBDIR_CONFIG_ARGS="$ac_configure_args"
+ 
+ dnl Set the version number of the libs included with mozilla
+ dnl ========================================================
+ MOZJPEG=62
+-MOZPNG=10635
++MOZPNG=10637
+ NSPR_VERSION=4
+ NSPR_MINVER=4.23
+ NSS_VERSION=3
+ 
+ dnl Set the minimum version of toolkit libs used by mozilla
+ dnl ========================================================
+ GLIB_VERSION=2.22
+ # 2_26 is the earliest version we can set GLIB_VERSION_MIN_REQUIRED.

+ 1 - 2
comm-esr60/mozilla-esr60/patches/1516605-66a1.patch → rel-257/mozilla-esr60/patches/1516605-66a1.patch

@@ -2,7 +2,7 @@
 # User Makoto Kato <m_kato@ga2.so-net.ne.jp>
 # Date 1546438291 0
 # Node ID 129f8c60783c4daba0c04106c3082c2f94890b66
-# Parent  7be73b4e5299792d47667c8587d56a5b2e36c71e
+# Parent  89493fd7e26ba523fa086ebe339d1614dfb7e1b9
 Bug 1516605 - Turn on NEON on aarch64 when using gcc or clang. r=aosmond
 
 libpng has NEON support using intrinsics, not assembler, so we should use it
@@ -40,4 +40,3 @@ diff --git a/media/libpng/moz.build b/media/libpng/moz.build
      ]
  
  if CONFIG['HAVE_ALTIVEC']:
-

+ 73 - 0
rel-257/mozilla-esr60/patches/1520909-65a1.patch

@@ -0,0 +1,73 @@
+# HG changeset patch
+# User Ted Campbell <tcampbell@mozilla.com>
+# Date 1540497002 0
+# Node ID 1c4bf766a99a657e2f88183afbef240e9e8e38ac
+# Parent  0e2815647e05a339b3b88f8d72346cc9c84293f6
+Bug 1502090 - Fix bailout tracking with fun.call. r=nbp
+
+NOTE: Multi-arg array.push is still disabled in Ion.
+
+Differential Revision: https://phabricator.services.mozilla.com/D9803
+
+diff --git a/js/src/jit-test/tests/ion/bug1502090.js b/js/src/jit-test/tests/ion/bug1502090.js
+new file mode 100644
+--- /dev/null
++++ b/js/src/jit-test/tests/ion/bug1502090.js
+@@ -0,0 +1,13 @@
++function f(o) {
++   var a = [o];
++   a.length = a[0];
++   var useless = function() {}
++   var sz = Array.prototype.push.call(a, 42, 43);
++   (function(){
++       sz;
++   })(new Boolean(false));
++}
++for (var i = 0; i < 2; i++) {
++   f(1);
++}
++f(2);
+diff --git a/js/src/jit/IonBuilder.cpp b/js/src/jit/IonBuilder.cpp
+--- a/js/src/jit/IonBuilder.cpp
++++ b/js/src/jit/IonBuilder.cpp
+@@ -4854,32 +4854,38 @@ AbortReasonOr<Ok> IonBuilder::jsop_funca
+     return makeCall(native, callInfo);
+   }
+   current->peek(calleeDepth)->setImplicitlyUsedUnchecked();
+ 
+   // Extract call target.
+   TemporaryTypeSet* funTypes = current->peek(funcDepth)->resultTypeSet();
+   JSFunction* target = getSingleCallTarget(funTypes);
+ 
++  CallInfo callInfo(alloc(), pc, /* constructing = */ false,
++                    /* ignoresReturnValue = */ BytecodeIsPopped(pc));
++
++  // Save prior call stack in case we need to resolve during bailout
++  // recovery of inner inlined function. This includes the JSFunction and the
++  // 'call' native function.
++  MOZ_TRY(callInfo.savePriorCallStack(this, current, argc + 2));
++
+   // Shimmy the slots down to remove the native 'call' function.
+   current->shimmySlots(funcDepth - 1);
+ 
+   bool zeroArguments = (argc == 0);
+ 
+   // If no |this| argument was provided, explicitly pass Undefined.
+   // Pushing is safe here, since one stack slot has been removed.
+   if (zeroArguments) {
+     pushConstant(UndefinedValue());
+   } else {
+     // |this| becomes implicit in the call.
+     argc -= 1;
+   }
+ 
+-  CallInfo callInfo(alloc(), pc, /* constructing = */ false,
+-                    /* ignoresReturnValue = */ BytecodeIsPopped(pc));
+   if (!callInfo.init(current, argc)) return abort(AbortReason::Alloc);
+ 
+   // Try to inline the call.
+   if (!zeroArguments) {
+     InliningDecision decision = makeInliningDecision(target, callInfo);
+     switch (decision) {
+       case InliningDecision_Error:
+         return abort(AbortReason::Error);

+ 17 - 17
rel-257/mozilla-esr60/patches/1523665-67a1.patch → rel-257/mozilla-esr60/patches/1523665-67a1.patch

@@ -2,18 +2,18 @@
 # User Gijs Kruitbosch <gijskruitbosch@gmail.com>
 # Date 1562701510 0
 # Node ID b4f105259931fe50fd1313de876ec69f23cbee3e
-# Parent  94641aa87335f47edccb1e44de86183109eada2c
+# Parent  ac43e24a9918f8576b43649598834257cd3772b9
 Bug 1562176 - fix colons in 'save file as' filenames, r=mak
 
 Differential Revision: https://phabricator.services.mozilla.com/D37400
 
-diff --git a/toolkit/components/jsdownloads/src/DownloadPaths.jsm b/toolkit/components/jsdownloads/src/DownloadPaths.jsm
---- a/toolkit/components/jsdownloads/src/DownloadPaths.jsm
-+++ b/toolkit/components/jsdownloads/src/DownloadPaths.jsm
-@@ -18,28 +18,28 @@ Cu.import("resource://gre/modules/XPCOMU
+diff --git a/toolkit/components/downloads/DownloadPaths.jsm b/toolkit/components/downloads/DownloadPaths.jsm
+--- a/toolkit/components/downloads/DownloadPaths.jsm
++++ b/toolkit/components/downloads/DownloadPaths.jsm
+@@ -16,28 +16,28 @@ ChromeUtils.import("resource://gre/modul
  
- XPCOMUtils.defineLazyModuleGetter(this, "AppConstants",
-                                   "resource://gre/modules/AppConstants.jsm");
+ ChromeUtils.defineModuleGetter(this, "AppConstants",
+                                "resource://gre/modules/AppConstants.jsm");
  
  /**
   * Platform-dependent regular expression used by the "sanitize" method.
@@ -38,14 +38,14 @@ diff --git a/toolkit/components/jsdownloads/src/DownloadPaths.jsm b/toolkit/comp
    /* eslint-enable no-control-regex */
  });
  
- this.DownloadPaths = {
+ var DownloadPaths = {
    /**
     * Sanitizes an arbitrary string for use as the local file name of a download.
     * The input is often a document title or a manually edited name. The output
-diff --git a/toolkit/components/jsdownloads/test/unit/test_DownloadPaths.js b/toolkit/components/jsdownloads/test/unit/test_DownloadPaths.js
---- a/toolkit/components/jsdownloads/test/unit/test_DownloadPaths.js
-+++ b/toolkit/components/jsdownloads/test/unit/test_DownloadPaths.js
-@@ -64,21 +64,21 @@ add_task(async function test_sanitize() 
+diff --git a/toolkit/components/downloads/test/unit/test_DownloadPaths.js b/toolkit/components/downloads/test/unit/test_DownloadPaths.js
+--- a/toolkit/components/downloads/test/unit/test_DownloadPaths.js
++++ b/toolkit/components/downloads/test/unit/test_DownloadPaths.js
+@@ -49,21 +49,21 @@ add_task(async function test_sanitize() 
      testSanitize("Directory Listing: /a/b/", "Directory Listing  _a_b_");
    } else if (AppConstants.platform == "macosx") {
      testSanitize(kSpecialChars, "A *?|\"\"<<>>;,+=[]B][=+,;>><<\"\"|?* C");
@@ -70,15 +70,15 @@ diff --git a/toolkit/components/jsdownloads/test/unit/test_DownloadPaths.js b/to
    // Removal of leading and trailing whitespace and dots after conversion.
    testSanitize("  Website  ", "Website");
    testSanitize(". . Website . Page . .", "Website . Page");
-@@ -87,16 +87,19 @@ add_task(async function test_sanitize() 
-   testSanitize("\u1680\u180e\u2000\u2008\u200a . txt", "txt");
-   testSanitize("\u2028\u2029\u202f\u205f\u3000\ufeff . txt", "txt");
- 
-   // Strings with whitespace and dots only.
+@@ -76,16 +76,19 @@ add_task(async function test_sanitize() 
    testSanitize(".", "");
    testSanitize("..", "");
    testSanitize(" ", "");
    testSanitize(" . ", "");
+ 
+   // Stripping of BIDI formatting characters.
+   testSanitize("\u200e \u202b\u202c\u202d\u202etest\x7f\u200f", "test");
+   testSanitize("AB\x7f\u202a\x7f\u202a\x7fCD", "AB CD");
 +
 +  // Stripping of colons:
 +  testSanitize("foo:bar", "foo bar");

+ 383 - 0
rel-257/mozilla-esr60/patches/1565919-70a1.patch

@@ -0,0 +1,383 @@
+# HG changeset patch
+# User Makoto Kato <m_kato@ga2.so-net.ne.jp>
+# Date 1564839896 0
+# Node ID 15fbd2a9c32d7504bb24fdc153781e14ee82e090
+# Parent  f8bf6887ca0be008e334fd16e5f993fdc6b0ba98
+Bug 1565919 - Don't separate words by contextual-based character. r=Ehsan
+
+Original regression was by bug 1362858, and bug 1418629 wasn't enough to fix.
+
+By bug 1362858, we use `CHAR_CLASS_SEPARATOR` in additional to DOM word separator. But some characters such as single quote, `@` and etc are `CHAR_CLASS_SEPARATOR`, so we may check spell by incomplete word.
+
+We shouldn't separate word by characters that is email part, URL part or conditional punctuation.
+
+And I also update test cases for this situation. `<textarea>` is better for spell checking since it can has multiple anonymous text nodes.
+
+Differential Revision: https://phabricator.services.mozilla.com/D39829
+
+diff --git a/editor/libeditor/tests/spellcheck.js b/editor/libeditor/tests/spellcheck.js
+--- a/editor/libeditor/tests/spellcheck.js
++++ b/editor/libeditor/tests/spellcheck.js
+@@ -1,20 +1,36 @@
+ function isSpellingCheckOk(aEditor, aMisspelledWords) {
+   var selcon = aEditor.selectionController;
+   var sel = selcon.getSelection(selcon.SELECTION_SPELLCHECK);
+   var numWords = sel.rangeCount;
+ 
+-  is(numWords, aMisspelledWords.length, "Correct number of misspellings and words.");
++  if (aTodo) {
++    todo_is(
++      numWords,
++      aMisspelledWords.length,
++      "Correct number of misspellings and words."
++    );
++  } else {
++    is(
++      numWords,
++      aMisspelledWords.length,
++      "Correct number of misspellings and words."
++    );
++  }
+ 
+   if (numWords !== aMisspelledWords.length) {
+     return false;
+   }
+ 
+   for (var i = 0; i < numWords; ++i) {
+     var word = String(sel.getRangeAt(i));
+-    is(word, aMisspelledWords[i], "Misspelling is what we think it is.");
++    if (aTodo) {
++      todo_is(word, aMisspelledWords[i], "Misspelling is what we think it is.");
++    } else {
++      is(word, aMisspelledWords[i], "Misspelling is what we think it is.");
++    }
+     if (word !== aMisspelledWords[i]) {
+       return false;
+     }
+   }
+   return true;
+ }
+diff --git a/editor/spellchecker/tests/test_bug1418629.html b/editor/spellchecker/tests/test_bug1418629.html
+--- a/editor/spellchecker/tests/test_bug1418629.html
++++ b/editor/spellchecker/tests/test_bug1418629.html
+@@ -10,87 +10,209 @@
+ </head>
+ <body>
+ <a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1418629">Mozilla Bug 1418629</a>
+ <p id="display"></p>
+ <div id="content" style="display: none;">
+ 
+ </div>
+ 
+-<input id="input1" autofocus spellcheck="true">
++<input id="input1" spellcheck="true">
++<textarea id="textarea1"></textarea>
++<div id="edit1" contenteditable=true></div>
+ 
+ <script>
+ const {onSpellCheck} = SpecialPowers.Cu.import("resource://testing-common/AsyncSpellCheckTestHelper.jsm", {});
+ 
+ SimpleTest.waitForExplicitFinish();
+ 
+-add_task(async function() {
+-  await new Promise((resolve) => {
+-    SimpleTest.waitForFocus(() => {
+-      SimpleTest.executeSoon(resolve);
+-    }, window);
+-  });
++function getEditor(input) {
++  if (input instanceof HTMLInputElement ||
++      input instanceof HTMLTextAreaElement) {
++    return SpecialPowers.wrap(input).editor;
++  }
++
++  return SpecialPowers.wrap(window).docShell.editor;
++}
+ 
++function resetEditableContent(input) {
++  if (input instanceof HTMLInputElement ||
++      input instanceof HTMLTextAreaElement) {
++    input.value = "";
++    return;
++  }
++  input.innerHTML = "";
++}
++
++async function test_with_single_quote(input) {
+   let misspeltWords = [];
+-  let input = document.getElementById("input1");
+ 
+   input.focus();
+-  input.value = "";
++  resetEditableContent(input);
++
+   synthesizeKey("d");
+   synthesizeKey("o");
+   synthesizeKey("e");
+   synthesizeKey("s");
+ 
+   await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  let editor = getEditor(input);
+   // isSpellingCheckOk is defined in spellcheck.js
+   // eslint-disable-next-line no-undef
+-  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
+-     "no misspelt words");
++  ok(isSpellingCheckOk(editor, misspeltWords), "no misspelt words");
+ 
+   synthesizeKey("n");
+   synthesizeKey("\'");
+-  is(input.value, "doesn\'", "");
+-
+-  await new Promise((resolve) => { onSpellCheck(input, resolve); });
+-  // isSpellingCheckOk is defined in spellcheck.js
+-  // eslint-disable-next-line no-undef
+-  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
+-     "don't run spellchecker during inputting word");
+-
+-  synthesizeKey(" ");
+-  is(input.value, "doesn\' ", "");
++  is(input.value || input.textContent, "doesn\'", "");
+ 
+   await new Promise((resolve) => { onSpellCheck(input, resolve); });
+-  misspeltWords.push("doesn\'");
++  // XXX This won't work since mozInlineSpellWordUtil::SplitDOM removes
++  // last single quote unfortunately that is during inputting.
+   // isSpellingCheckOk is defined in spellcheck.js
+   // eslint-disable-next-line no-undef
+-  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
+-     "should run spellchecker");
+-});
++  todo_is(isSpellingCheckOk(editor, misspeltWords, true), true,
++          "don't run spellchecker during inputting word");
++
++  synthesizeKey(" ");
++  is(input.value || input.textContent, "doesn\' ", "");
+ 
+-async function test_with_twice_characters(ch) {
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  misspeltWords.push("doesn");
++  // isSpellingCheckOk is defined in spellcheck.js
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(editor, misspeltWords), "should run spellchecker");
++}
++
++async function test_with_twice_characters(input, ch) {
+   let misspeltWords = [];
+-  let input = document.getElementById("input1");
+ 
+   input.focus();
+-  input.value = "";
++  resetEditableContent(input);
++
+   synthesizeKey("d");
+   synthesizeKey("o");
+   synthesizeKey("e");
+   synthesizeKey("s");
+   synthesizeKey("n");
+   synthesizeKey(ch);
+   synthesizeKey(ch);
+-  is(input.value, "doesn" + ch + ch, "");
++  is(input.value || input.textContent, "doesn" + ch + ch, "");
++
++  // trigger spellchecker
++  synthesizeKey(" ");
+ 
+   await new Promise((resolve) => { onSpellCheck(input, resolve); });
+   misspeltWords.push("doesn");
++  let editor = getEditor(input);
+   // isSpellingCheckOk is defined in spellcheck.js
+   // eslint-disable-next-line no-undef
+-  ok(isSpellingCheckOk(SpecialPowers.wrap(input).editor, misspeltWords),
+-     "should run spellchecker");
++  ok(isSpellingCheckOk(editor, misspeltWords), "should run spellchecker");
++}
++
++async function test_between_single_quote(input) {
++  let misspeltWords = [];
++
++  input.focus();
++  resetEditableContent(input);
++
++  synthesizeKey("\'");
++  synthesizeKey("t");
++  synthesizeKey("e");
++  synthesizeKey("s");
++  synthesizeKey("t");
++  synthesizeKey("\'");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  let editor = getEditor(input);
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(editor, misspeltWords),
++     "don't run spellchecker between single qoute");
+ }
+ 
+-add_task(test_with_twice_characters.bind(null, "\'"));
+-add_task(test_with_twice_characters.bind(null, String.fromCharCode(0x2019)));
++async function test_with_email(input) {
++  let misspeltWords = [];
++
++  input.focus();
++  resetEditableContent(input);
++
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("@");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey(".");
++  synthesizeKey("c");
++  synthesizeKey("o");
++  synthesizeKey("m");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  let editor = getEditor(input);
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(editor, misspeltWords),
++     "don't run spellchecker for email address");
++
++  synthesizeKey(" ");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(editor, misspeltWords),
++     "no misspelt words due to email address");
++}
++
++async function test_with_url(input) {
++  let misspeltWords = [];
++
++  input.focus();
++  resetEditableContent(input);
++
++  synthesizeKey("h");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("p");
++  synthesizeKey(":");
++  synthesizeKey("/");
++  synthesizeKey("/");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey("t");
++  synthesizeKey(".");
++  synthesizeKey("c");
++  synthesizeKey("o");
++  synthesizeKey("m");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  let editor = getEditor(input);
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(editor, misspeltWords),
++     "don't run spellchecker for URL");
++
++  synthesizeKey(" ");
++
++  await new Promise((resolve) => { onSpellCheck(input, resolve); });
++  // eslint-disable-next-line no-undef
++  ok(isSpellingCheckOk(editor, misspeltWords),
++     "no misspelt words due to URL");
++}
++
++SimpleTest.waitForFocus(() => {
++  for (let n of ["input1", "textarea1", "edit1"])  {
++    add_task(test_with_single_quote.bind(null,
++                                         document.getElementById(n)));
++    add_task(test_with_twice_characters.bind(null,
++                                             document.getElementById(n),
++                                             "\'"));
++    add_task(test_with_twice_characters.bind(null,
++                                             document.getElementById(n),
++                                             String.fromCharCode(0x2019)));
++    add_task(test_between_single_quote.bind(null,
++                                            document.getElementById(n)));
++    add_task(test_with_email.bind(null, document.getElementById(n)));
++    add_task(test_with_url.bind(null, document.getElementById(n)));
++  }
++});
+ </script>
+ </body>
+ </html>
+diff --git a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
+--- a/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
++++ b/extensions/spellcheck/src/mozInlineSpellWordUtil.cpp
+@@ -41,16 +41,22 @@ inline bool IsIgnorableCharacter(char16_
+ //    Some characters (like apostrophes) require characters on each side to be
+ //    part of a word, and are otherwise punctuation.
+ 
+ inline bool IsConditionalPunctuation(char16_t ch) {
+   return (ch == '\'' || ch == 0x2019 ||  // RIGHT SINGLE QUOTATION MARK
+           ch == 0x00B7);                 // MIDDLE DOT
+ }
+ 
++static bool IsAmbiguousDOMWordSeprator(char16_t ch) {
++  // This class may be CHAR_CLASS_SEPARATOR, but it depends on context.
++  return (ch == '@' || ch == ':' || ch == '.' || ch == '/' || ch == '-' ||
++          IsConditionalPunctuation(ch));
++}
++
+ // mozInlineSpellWordUtil::Init
+ 
+ nsresult mozInlineSpellWordUtil::Init(TextEditor* aTextEditor) {
+   if (NS_WARN_IF(!aTextEditor)) {
+     return NS_ERROR_FAILURE;
+   }
+ 
+   mDocument = aTextEditor->GetDocument();
+@@ -386,25 +392,16 @@ CharClass WordSplitState::ClassifyCharac
+     if (ClassifyCharacter(aIndex - 1, false) != CHAR_CLASS_WORD)
+       return CHAR_CLASS_SEPARATOR;
+     // If the previous charatcer is a word-char, make sure that it's not a
+     // special dot character.
+     if (mDOMWordText[aIndex - 1] == '.') return CHAR_CLASS_SEPARATOR;
+ 
+     // now we know left char is a word-char, check the right-hand character
+     if (aIndex == int32_t(mDOMWordText.Length() - 1)) {
+-      if (mDOMWordText[aIndex] == '\'' || mDOMWordText[aIndex] == 0x2019) {
+-        nsUGenCategory prevCategory =
+-            mozilla::unicode::GetGenCategory(mDOMWordText[aIndex - 1]);
+-        if (prevCategory == nsUGenCategory::kLetter ||
+-            prevCategory == nsUGenCategory::kNumber) {
+-          // If single quotation mark is last, we don't return separator yet.
+-          return CHAR_CLASS_WORD;
+-        }
+-      }
+       return CHAR_CLASS_SEPARATOR;
+     }
+ 
+     if (ClassifyCharacter(aIndex + 1, false) != CHAR_CLASS_WORD)
+       return CHAR_CLASS_SEPARATOR;
+     // If the next charatcer is a word-char, make sure that it's not a
+     // special dot character.
+     if (mDOMWordText[aIndex + 1] == '.') return CHAR_CLASS_SEPARATOR;
+@@ -600,21 +597,23 @@ static bool TextNodeContainsDOMWordSepar
+   int32_t end = std::min(aBeforeOffset, int32_t(textFragment->GetLength()));
+   bool ok = textFragment->AppendTo(text, 0, end, mozilla::fallible);
+   if(!ok)
+     return false;
+ 
+   WordSplitState state(nullptr, text, 0, end);
+   for (int32_t i = end - 1; i >= 0; --i) {
+     if (IsDOMWordSeparator(textFragment->CharAt(i)) ||
+-        state.ClassifyCharacter(i, true) == CHAR_CLASS_SEPARATOR) {
++        (!IsAmbiguousDOMWordSeprator(textFragment->CharAt(i)) &&
++         state.ClassifyCharacter(i, true) == CHAR_CLASS_SEPARATOR)) {
+       // Be greedy, find as many separators as we can
+       for (int32_t j = i - 1; j >= 0; --j) {
+         if (IsDOMWordSeparator(textFragment->CharAt(j)) ||
+-            state.ClassifyCharacter(j, true) == CHAR_CLASS_SEPARATOR) {
++            (!IsAmbiguousDOMWordSeprator(textFragment->CharAt(j)) &&
++             state.ClassifyCharacter(j, true) == CHAR_CLASS_SEPARATOR)) {
+           i = j;
+         } else {
+           break;
+         }
+       }
+       *aSeparatorOffset = i;
+       return true;
+     }

+ 7 - 7
comm-esr60/mozilla-esr60/patches/1566465-70a1.patch → rel-257/mozilla-esr60/patches/1566465-70a1.patch

@@ -2,7 +2,7 @@
 # User Christian Holler <choller@mozilla.com>
 # Date 1564559465 0
 # Node ID 3c6c96c10ba41a91b8ea9130c05ebfbe9730e803
-# Parent  b6f1acb81dd0010c8b7465be4c475a1273a41244
+# Parent  20004f472050540a6d225db12e83bcc3a28103b4
 Bug 1566465 - Fix crash in Http2Session::RecvAltSvc. r=dragana
 
 Differential Revision: https://phabricator.services.mozilla.com/D38197
@@ -10,10 +10,10 @@ Differential Revision: https://phabricator.services.mozilla.com/D38197
 diff --git a/netwerk/protocol/http/Http2Session.cpp b/netwerk/protocol/http/Http2Session.cpp
 --- a/netwerk/protocol/http/Http2Session.cpp
 +++ b/netwerk/protocol/http/Http2Session.cpp
-@@ -2422,16 +2422,17 @@ Http2Session::RecvAltSvc(Http2Session *s
-     // If the origin field is filled in the frame, the frame should be ignored
+@@ -2500,16 +2500,17 @@ nsresult Http2Session::RecvAltSvc(Http2S
      if (!origin.IsEmpty()) {
-       LOG(("Http2Session %p Alt-Svc pulled stream has non empty origin\n", self));
+       LOG(("Http2Session %p Alt-Svc pulled stream has non empty origin\n",
+            self));
        self->ResetDownstreamState();
        return NS_OK;
      }
@@ -22,9 +22,9 @@ diff --git a/netwerk/protocol/http/Http2Session.cpp b/netwerk/protocol/http/Http
 +        !self->mInputFrameDataStream ||
          !self->mInputFrameDataStream->Transaction() ||
          !self->mInputFrameDataStream->Transaction()->RequestHead()) {
-       LOG3(("Http2Session::RecvAltSvc %p got frame w/o origin on invalid stream", self));
+       LOG3(
+           ("Http2Session::RecvAltSvc %p got frame w/o origin on invalid stream",
+            self));
        self->ResetDownstreamState();
        return NS_OK;
      }
- 
-     self->mInputFrameDataStream->Transaction()->RequestHead()->Origin(origin);

+ 1 - 1
comm-esr60/mozilla-esr60/patches/1578303_enable_loginmanagercontextmenu-71a1.patch → rel-257/mozilla-esr60/patches/1578303_enable_loginmanagercontextmenu-71a1.patch

@@ -2,7 +2,7 @@
 # User Ian Neal <iann_cvs@blueyonder.co.uk>
 # Date 1567531902 0
 # Node ID 7532820fe4b9e8605dc75edf5731336e99ad78fe
-# Parent  25a3f55a0c461f85d230304da09cca9bbab0ee5e
+# Parent  f9b58406c73be12a080c33ae03fd1bb017f10caa
 Bug 1578303 - Enable LoginManagerContextMenu.jsm for SeaMonkey r=MattN
 
 Differential Revision: https://phabricator.services.mozilla.com/D44408

File diff suppressed because it is too large
+ 23820 - 0
rel-257/mozilla-esr60/patches/1602261-retsulcksat.patch


+ 3 - 21
comm-esr60/mozilla-esr60/patches/1602262-safebrowsing-pref.patch → rel-257/mozilla-esr60/patches/1602262-safebrowsing-pref.patch

@@ -1,13 +1,13 @@
 # HG changeset patch
 # User Frank-Rainer Grahl <frgrahl@gmx.net>
 # Date 1560180720 -7200
-# Parent  49e03697e7d456f0bd0a9a6608c999e022459c52
+# Parent  7597a60149ae3d25be5f12cdb1452470f32d61e0
 Bug 1602262 - Changes to Safe Browsing ESR60 lost in the patch trenches of the trees. r=IanN a=IanN
 
 diff --git a/modules/libpref/init/all.js b/modules/libpref/init/all.js
 --- a/modules/libpref/init/all.js
 +++ b/modules/libpref/init/all.js
-@@ -5331,16 +5331,17 @@ pref("urlclassifier.downloadBlockTable",
+@@ -5525,16 +5525,17 @@ pref("urlclassifier.downloadBlockTable",
  pref("urlclassifier.passwordAllowTable", "goog-passwordwhite-proto");
  
  // Tables for tracking protection
@@ -15,7 +15,7 @@ diff --git a/modules/libpref/init/all.js b/modules/libpref/init/all.js
  pref("urlclassifier.trackingWhitelistTable", "test-trackwhite-simple,mozstd-trackwhite-digest256");
  
  // These tables will never trigger a gethash call.
- pref("urlclassifier.disallow_completions", "test-malware-simple,test-harmful-simple,test-phish-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple,goog-downloadwhite-digest256,base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256");
+ pref("urlclassifier.disallow_completions", "test-malware-simple,test-harmful-simple,test-phish-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple,goog-downloadwhite-digest256,base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256,goog-passwordwhite-proto,ads-track-digest256,social-track-digest256,analytics-track-digest256");
 +//pref("urlclassifier.disallow_completions", "test-malware-simple,test-harmful-simple,test-phish-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple,goog-downloadwhite-digest256,base-track-digest256,mozstd-trackwhite-digest256,content-track-digest256,mozplugin-block-digest256,mozplugin2-block-digest256,block-flash-digest256,except-flash-digest256,allow-flashallow-digest256,except-flashallow-digest256,block-flashsubdoc-digest256,except-flashsubdoc-digest256,except-flashinfobar-digest256,goog-passwordwhite-proto");
  
  // Number of random entries to send with a gethash request
@@ -25,21 +25,3 @@ diff --git a/modules/libpref/init/all.js b/modules/libpref/init/all.js
  pref("urlclassifier.gethash.timeout_ms", 5000);
  // Update server response timeout for Safe Browsing
  pref("urlclassifier.update.response_timeout_ms", 30000);
-@@ -5362,16 +5363,17 @@ pref("browser.safebrowsing.allowOverride
- // Any changes must be coordinated with them.
- #ifdef MOZILLA_OFFICIAL
- pref("browser.safebrowsing.id", "navclient-auto-ffox");
- #else
- pref("browser.safebrowsing.id", "Firefox");
- #endif
- 
- // Download protection
-+pref("browser.safebrowsing.downloads.enabled", true);
- pref("browser.safebrowsing.downloads.remote.enabled", true);
- pref("browser.safebrowsing.downloads.remote.timeout_ms", 10000);
- pref("browser.safebrowsing.downloads.remote.url", "https://sb-ssl.google.com/safebrowsing/clientreport/download?key=%GOOGLE_API_KEY%");
- pref("browser.safebrowsing.downloads.remote.block_dangerous",            true);
- pref("browser.safebrowsing.downloads.remote.block_dangerous_host",       true);
- pref("browser.safebrowsing.downloads.remote.block_potentially_unwanted", true);
- pref("browser.safebrowsing.downloads.remote.block_uncommon",             true);
- 

+ 18 - 0
rel-257/mozilla-esr60/patches/NOBUG-0c5f5c2e2a86-64a1.patch

@@ -34,3 +34,21 @@ NOBUG-0c5f5c2e2a86-64a1.patch
 1506027-1-65a1.patch
 1464834-1-62a1.patch
 1464834-2-62a1.patch
+1602262-safebrowsing-pref.patch
+1478815-7partial-63a1.patch
+1372458-63a1.patch
+1472672-63a1.patch
+1520909-65a1.patch
+1445671-61a1.patch
+1602261-retsulcksat.patch
+1469790-63a1.patch
+1491467-64a1.patch
+1509867-65a1.patch
+1516605-66a1.patch
+1513236-68a1.patch
+1362858-1partial-56a1.patch
+1418629-68a1.patch
+1565919-70a1.patch
+1562176-70a1.patch
+1566465-70a1.patch
+1578303_enable_loginmanagercontextmenu-71a1.patch

Some files were not shown because too many files changed in this diff