Browse Source

Some mach vendor backports

Ian Neal 4 months ago
parent
commit
fd23bbf28d
28 changed files with 6502 additions and 0 deletions
  1. 31 0
      mozilla-release/patches/1519358-66a1.patch
  2. 43 0
      mozilla-release/patches/1540655-01-70a1.patch
  3. 30 0
      mozilla-release/patches/1561102-1-69a1.patch
  4. 30 0
      mozilla-release/patches/1561102-2-69a1.patch
  5. 43 0
      mozilla-release/patches/1582114-71a1.patch
  6. 192 0
      mozilla-release/patches/1630047-77a1.patch
  7. 54 0
      mozilla-release/patches/1630668-77a1.patch
  8. 65 0
      mozilla-release/patches/1632688-77a1.patch
  9. 155 0
      mozilla-release/patches/1634391-2-78a1.patch
  10. 300 0
      mozilla-release/patches/1637845-01-79a1.patch
  11. 2038 0
      mozilla-release/patches/1637845-02-79a1.patch
  12. 187 0
      mozilla-release/patches/1637845-03-79a1.patch
  13. 121 0
      mozilla-release/patches/1637845-04-79a1.patch
  14. 117 0
      mozilla-release/patches/1637845-05-79a1.patch
  15. 228 0
      mozilla-release/patches/1637845-06-79a1.patch
  16. 80 0
      mozilla-release/patches/1637845-07-79a1.patch
  17. 261 0
      mozilla-release/patches/1637845-08-79a1.patch
  18. 421 0
      mozilla-release/patches/1637845-09-79a1.patch
  19. 486 0
      mozilla-release/patches/1637845-10-79a1.patch
  20. 260 0
      mozilla-release/patches/1637845-11-79a1.patch
  21. 145 0
      mozilla-release/patches/1637845-12-79a1.patch
  22. 133 0
      mozilla-release/patches/1637845-13-79a1.patch
  23. 95 0
      mozilla-release/patches/1637845-14-79a1.patch
  24. 110 0
      mozilla-release/patches/1637845-15-79a1.patch
  25. 617 0
      mozilla-release/patches/1637845-16-79a1.patch
  26. 142 0
      mozilla-release/patches/1637845-17-79a1.patch
  27. 91 0
      mozilla-release/patches/1637845-18-79a1.patch
  28. 27 0
      mozilla-release/patches/series

+ 31 - 0
mozilla-release/patches/1519358-66a1.patch

@@ -0,0 +1,31 @@
+# HG changeset patch
+# User Alex Chronopoulos <achronop@gmail.com>
+# Date 1547502392 0
+# Node ID 8377d1696ac403b0e9d1b2fb9a855b7717f576a8
+# Parent  ceb0fab694ea733ec67ca50456e4997bf0023c7f
+Bug 1519358 - Update upstream commint hash when import libdav1d. r=TD-Linux
+
+Differential Revision: https://phabricator.services.mozilla.com/D16298
+
+diff --git a/python/mozbuild/mozbuild/vendor_dav1d.py b/python/mozbuild/mozbuild/vendor_dav1d.py
+--- a/python/mozbuild/mozbuild/vendor_dav1d.py
++++ b/python/mozbuild/mozbuild/vendor_dav1d.py
+@@ -91,17 +91,17 @@ Please set a repository url with --repo 
+         filename = mozpath.join(target, 'moz.yaml')
+         with open(filename) as f:
+             yaml = f.read()
+ 
+         prefix = '  release: commit'
+         if prefix in yaml:
+             new_yaml = re.sub(prefix + ' [v\.a-f0-9]+.*$',
+                               prefix + ' %s (%s).' % (revision, timestamp),
+-                              yaml)
++                              yaml, flags=re.MULTILINE)
+         else:
+             new_yaml = '%s\n\n%s %s.' % (yaml, prefix, revision)
+ 
+         if yaml != new_yaml:
+             with open(filename, 'w') as f:
+                 f.write(new_yaml)
+ 
+     def update_vcs_version(self, revision, vendor_dir, glue_dir):

+ 43 - 0
mozilla-release/patches/1540655-01-70a1.patch

@@ -0,0 +1,43 @@
+# HG changeset patch
+# User Andreas Tolfsen <ato@sny.no>
+# Date 1565960285 0
+# Node ID f2b9955b0822a27b92a52b846bb8b7b894453800
+# Parent  d1b7ccd2f6dfb9dd8b51fa1fb7957eab05644a58
+bug 1540655: build: sort MACH_MODULES; r=firefox-build-system-reviewers,mshal
+
+Differential Revision: https://phabricator.services.mozilla.com/D37006
+
+diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py
+--- a/build/mach_bootstrap.py
++++ b/build/mach_bootstrap.py
+@@ -36,27 +36,27 @@ Press ENTER/RETURN to continue or CTRL+c
+ 
+ # Individual files providing mach commands.
+ MACH_MODULES = [
+     'build/valgrind/mach_commands.py',
+     'devtools/shared/css/generated/mach_commands.py',
+     'dom/bindings/mach_commands.py',
+     'js/src/devtools/rootAnalysis/mach_commands.py',
+     'layout/tools/reftest/mach_commands.py',
+-    'python/mach_commands.py',
+     'python/mach/mach/commands/commandinfo.py',
+     'python/mach/mach/commands/settings.py',
++    'python/mach_commands.py',
+     'python/mozboot/mozboot/mach_commands.py',
+-    'python/mozbuild/mozbuild/mach_commands.py',
+     'python/mozbuild/mozbuild/artifact_commands.py',
++    'python/mozbuild/mozbuild/backend/mach_commands.py',
+     'python/mozbuild/mozbuild/build_commands.py',
+-    'python/mozbuild/mozbuild/backend/mach_commands.py',
+     'python/mozbuild/mozbuild/code-analysis/mach_commands.py',
+     'python/mozbuild/mozbuild/compilation/codecomplete.py',
+     'python/mozbuild/mozbuild/frontend/mach_commands.py',
++    'python/mozbuild/mozbuild/mach_commands.py',
+     'python/mozperftest/mozperftest/mach_commands.py',
+     'testing/awsy/mach_commands.py',
+     'testing/firefox-ui/mach_commands.py',
+     'testing/geckodriver/mach_commands.py',
+     'testing/mach_commands.py',
+     'testing/marionette/mach_commands.py',
+     'testing/mochitest/mach_commands.py',
+     'testing/mozharness/mach_commands.py',

+ 30 - 0
mozilla-release/patches/1561102-1-69a1.patch

@@ -0,0 +1,30 @@
+# HG changeset patch
+# User Mike Hommey <mh+mozilla@glandium.org>
+# Date 1561494344 0
+# Node ID 021c05e47e6f4932a63fc1eeabc3c965fae6e882
+# Parent  ceb0fab694ea733ec67ca50456e4997bf0023c7f
+Bug 1561102 - Add license exception for the mach crate. r=chmanchester
+
+Differential Revision: https://phabricator.services.mozilla.com/D35743
+
+diff --git a/python/mozbuild/mozbuild/vendor_rust.py b/python/mozbuild/mozbuild/vendor_rust.py
+--- a/python/mozbuild/mozbuild/vendor_rust.py
++++ b/python/mozbuild/mozbuild/vendor_rust.py
+@@ -155,16 +155,17 @@ Please commit or stash these changes bef
+ 
+     # This whitelist should only be used for packages that use an acceptable
+     # license, but that also need to explicitly mentioned in about:license.
+     RUNTIME_LICENSE_PACKAGE_WHITELIST = {
+         'BSD-2-Clause': [
+             'arrayref',
+             'cloudabi',
+             'Inflector',
++            'mach',
+         ],
+         'BSD-3-Clause': [
+             'sha1',
+         ]
+     }
+ 
+     # This whitelist should only be used for packages that use a
+     # license-file and for which the license-file entry has been

+ 30 - 0
mozilla-release/patches/1561102-2-69a1.patch

@@ -0,0 +1,30 @@
+# HG changeset patch
+# User Mike Hommey <mh+mozilla@glandium.org>
+# Date 1561668830 0
+# Node ID 50b60c0b65c53d08657018328a85298a7ac43cf8
+# Parent  75dac74822ae7eb5547ad7b08cdea03731845f26
+Bug 1561102 - Add mach crate to license.html as well. r=froydnj
+
+Differential Revision: https://phabricator.services.mozilla.com/D35871
+
+diff --git a/toolkit/content/license.html b/toolkit/content/license.html
+--- a/toolkit/content/license.html
++++ b/toolkit/content/license.html
+@@ -2873,16 +2873,17 @@ SOFTWARE.
+ 
+     <h1><a id="bsd2clause"></a>BSD 2-Clause License</h1>
+ 
+     <p>This license applies to files in the following directories:
+     <ul>
+         <li><code>third_party/rust/arrayref</code></li>
+         <li><code>third_party/rust/cloudabi</code></li>
+         <li><code>third_party/rust/Inflector</code></li>
++        <li><code>third_party/rust/mach</code></li>
+     </ul>
+     See the individual LICENSE files for copyright owners.</p>
+ 
+ <pre>
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ 

+ 43 - 0
mozilla-release/patches/1582114-71a1.patch

@@ -0,0 +1,43 @@
+# HG changeset patch
+# User Nathan Froyd <froydnj@mozilla.com>
+# Date 1568815338 0
+# Node ID 453b6ebf4cbd85a2b2fabc6b3c49bddfd853efc7
+# Parent  a6a82d94f94f13db80f75cad61419f96e1cd48f5
+Bug 1582114 - add the Zlib license to permitted licenses for Rust crates; r=mhoye
+
+New releases of the `adler32-rs` crate have this license, and we already
+incorporate zlib into Firefox, so crates using this license should be allowed.
+
+..and since `adler32-rs` now has an obviously acceptable license, we can
+remove it from the list of build-time exceptions.
+
+Differential Revision: https://phabricator.services.mozilla.com/D46283
+
+diff --git a/python/mozbuild/mozbuild/vendor_rust.py b/python/mozbuild/mozbuild/vendor_rust.py
+--- a/python/mozbuild/mozbuild/vendor_rust.py
++++ b/python/mozbuild/mozbuild/vendor_rust.py
+@@ -134,23 +134,23 @@ Please commit or stash these changes bef
+         # To encourage people to remember to do that, we do not whitelist
+         # the licenses themselves, and we require the packages to be added
+         # to RUNTIME_LICENSE_PACKAGE_WHITELIST below.
+         'CC0-1.0',
+         'ISC',
+         'MIT',
+         'MPL-2.0',
+         'Unlicense',
++        'Zlib',
+     ]
+ 
+     # Licenses for code used at build time (e.g. code generators). Please see the above
+     # comments before adding anything to this list.
+     BUILDTIME_LICENSE_WHITELIST = {
+         'BSD-3-Clause': [
+-            'adler32',
+             'bindgen',
+             'fuchsia-zircon',
+             'fuchsia-zircon-sys',
+             'fuchsia-cprng',
+         ]
+     }
+ 
+     # This whitelist should only be used for packages that use an acceptable

+ 192 - 0
mozilla-release/patches/1630047-77a1.patch

@@ -0,0 +1,192 @@
+# HG changeset patch
+# User Tom Prince <mozilla@hocat.ca>
+# Date 1586989009 0
+# Node ID 3910c83dfabc63a963a465fdb89e4663aad34c45
+# Parent  4de4a42c503b09f419ea0ec3e922a7199bffb958
+Bug 1630047 - Allow mozversioncontrol to add or remove multiple files at once; r=ahal
+
+Differential Revision: https://phabricator.services.mozilla.com/D70938
+
+diff --git a/python/mozbuild/mozbuild/vendor_aom.py b/python/mozbuild/mozbuild/vendor_aom.py
+--- a/python/mozbuild/mozbuild/vendor_aom.py
++++ b/python/mozbuild/mozbuild/vendor_aom.py
+@@ -194,15 +194,14 @@ Please commit or stash these changes bef
+         self.clean_upstream(vendor_dir)
+         glue_dir = mozpath.join(self.topsrcdir, 'media/libaom')
+         self.log(logging.INFO, 'generate_sources', {},
+                  '''Generating build files...''')
+         self.generate_sources(glue_dir)
+         self.log(logging.INFO, 'update_readme', {},
+                  '''Updating README_MOZILLA.''')
+         self.update_readme(commit, timestamp, glue_dir)
+-        self.repository.add_remove_files(vendor_dir)
+         self.log(logging.INFO, 'add_remove_files', {},
+                  '''Registering changes with version control.''')
+-        self.repository.add_remove_files(vendor_dir)
++        self.repository.add_remove_files(vendor_dir, glue_dir)
+         self.repository.add_remove_files(glue_dir)
+         self.log(logging.INFO, 'done', {'revision': revision},
+                  '''Update to aom version '{revision}' ready to commit.''')
+diff --git a/python/mozbuild/mozbuild/vendor_dav1d.py b/python/mozbuild/mozbuild/vendor_dav1d.py
+--- a/python/mozbuild/mozbuild/vendor_dav1d.py
++++ b/python/mozbuild/mozbuild/vendor_dav1d.py
+@@ -162,12 +162,11 @@ Please commit or stash these changes bef
+         self.log(logging.INFO, 'update_moz.yaml', {},
+                  '''Updating moz.yaml.''')
+         self.update_yaml(commit, timestamp, glue_dir)
+         self.log(logging.INFO, 'update_vcs_version', {},
+                  '''Updating vcs_version.h.''')
+         self.update_vcs_version(commit, vendor_dir, glue_dir)
+         self.log(logging.INFO, 'add_remove_files', {},
+                  '''Registering changes with version control.''')
+-        self.repository.add_remove_files(vendor_dir)
+-        self.repository.add_remove_files(glue_dir)
++        self.repository.add_remove_files(vendor_dir, glue_dir)
+         self.log(logging.INFO, 'done', {'revision': revision},
+                  '''Update to dav1d version '{revision}' ready to commit.''')
+diff --git a/python/mozversioncontrol/mozversioncontrol/__init__.py b/python/mozversioncontrol/mozversioncontrol/__init__.py
+--- a/python/mozversioncontrol/mozversioncontrol/__init__.py
++++ b/python/mozversioncontrol/mozversioncontrol/__init__.py
+@@ -189,23 +189,23 @@ class Repository(object):
+ 
+         ``diff_filter`` works the same as `get_changed_files`.
+         ``upstream`` is a remote ref to compare against. If unspecified,
+         this will be determined automatically. If there is no remote ref,
+         a MissingUpstreamRepo exception will be raised.
+         """
+ 
+     @abc.abstractmethod
+-    def add_remove_files(self, path):
+-        '''Add and remove files under `path` in this repository's working copy.
++    def add_remove_files(self, *paths):
++        '''Add and remove files under `paths` in this repository's working copy.
+         '''
+ 
+     @abc.abstractmethod
+-    def forget_add_remove_files(self, path):
+-        '''Undo the effects of a previous add_remove_files call for `path`.
++    def forget_add_remove_files(self, *paths):
++        '''Undo the effects of a previous add_remove_files call for `paths`.
+         '''
+ 
+     @abc.abstractmethod
+     def get_files_in_working_directory(self):
+         """Obtain a list of managed files in the working directory."""
+ 
+     @abc.abstractmethod
+     def working_directory_clean(self, untracked=False, ignored=False):
+@@ -343,26 +343,26 @@ class HgRepository(Repository):
+             template = self._files_template(diff_filter)
+             return self._run('log', '-r', rev, '-T', template).splitlines()
+ 
+     def get_outgoing_files(self, diff_filter='ADM', upstream='default'):
+         template = self._files_template(diff_filter)
+         return self._run('outgoing', '-r', '.', '--quiet',
+                          '--template', template, upstream, return_codes=(1,)).split()
+ 
+-    def add_remove_files(self, path):
+-        args = ['addremove', path]
++    def add_remove_files(self, *paths):
++        args = ['addremove'] + list(paths)
+         m = re.search(r'\d+\.\d+', self.tool_version)
+         simplified_version = float(m.group(0)) if m else 0
+         if simplified_version >= 3.9:
+             args = ['--config', 'extensions.automv='] + args
+         self._run(*args)
+ 
+-    def forget_add_remove_files(self, path):
+-        self._run('forget', path)
++    def forget_add_remove_files(self, paths):
++        self._run('forget', *paths)
+ 
+     def get_files_in_working_directory(self):
+         # Can return backslashes on Windows. Normalize to forward slashes.
+         return list(p.replace('\\', '/') for p in
+                     self._run(b'files', b'-0').split('\0') if p)
+ 
+     def working_directory_clean(self, untracked=False, ignored=False):
+         args = ['status', '--modified', '--added', '--removed',
+@@ -465,21 +465,21 @@ class GitRepository(Repository):
+         if upstream == 'default':
+             upstream = self.base_ref
+ 
+         compare = '{}..HEAD'.format(upstream)
+         files = self._run('log', '--name-only', '--diff-filter={}'.format(diff_filter.upper()),
+                           '--oneline', '--pretty=format:', compare).splitlines()
+         return [f for f in files if f]
+ 
+-    def add_remove_files(self, path):
+-        self._run('add', path)
++    def add_remove_files(self, paths):
++        self._run('add', *paths)
+ 
+-    def forget_add_remove_files(self, path):
+-        self._run('reset', path)
++    def forget_add_remove_files(self, paths):
++        self._run('reset', *paths)
+ 
+     def get_files_in_working_directory(self):
+         return self._run('ls-files', '-z').split('\0')
+ 
+     def working_directory_clean(self, untracked=False, ignored=False):
+         args = ['status', '--porcelain']
+ 
+         # Even in --porcelain mode, behavior is affected by the
+diff --git a/tools/lint/hooks_clang_format.py.1630047.later b/tools/lint/hooks_clang_format.py.1630047.later
+new file mode 100644
+--- /dev/null
++++ b/tools/lint/hooks_clang_format.py.1630047.later
+@@ -0,0 +1,24 @@
++--- hooks_clang_format.py
+++++ hooks_clang_format.py
++@@ -48,20 +48,17 @@ def run_clang_format(hooktype, changedFi
++     if os.name == "nt":
++         clang_format_cmd = ["sh", "mach"] + arguments
++     else:
++         clang_format_cmd = [os.path.join(topsrcdir, "mach")] + arguments
++     if "commit" in hooktype:
++         # don't prevent commits, just display the clang-format results
++         subprocess.call(clang_format_cmd)
++ 
++-        # Add the modified files back to the repo (expect a string)
++-        # one by one (fails otherwise, see bug #1541409)
++-        for f in path_list:
++-            vcs.add_remove_files(f)
+++        vcs.add_remove_files(*path_list)
++ 
++         return False
++     print("warning: '{}' is not a valid clang-format hooktype".format(hooktype))
++     return False
++ 
++ 
++ def hg(ui, repo, node, **kwargs):
++     print(
+diff --git a/tools/lint/hooks_js_format.py.1630047.later b/tools/lint/hooks_js_format.py.1630047.later
+new file mode 100644
+--- /dev/null
++++ b/tools/lint/hooks_js_format.py.1630047.later
+@@ -0,0 +1,24 @@
++--- hooks_js_format.py
+++++ hooks_js_format.py
++@@ -43,20 +43,17 @@ def run_js_format(hooktype, changedFiles
++     if os.name == "nt":
++         js_format_cmd = ["sh", "mach"] + arguments
++     else:
++         js_format_cmd = [os.path.join(topsrcdir, "mach")] + arguments
++     if "commit" in hooktype:
++         # don't prevent commits, just display the eslint and prettier results
++         subprocess.call(js_format_cmd)
++ 
++-        # Add the modified files back to the repo (expect a string)
++-        # one by one (fails otherwise, see bug #1541409)
++-        for f in path_list:
++-            vcs.add_remove_files(f)
+++        vcs.add_remove_files(*path_list)
++ 
++         return False
++     print("warning: '{}' is not a valid js-format hooktype".format(hooktype))
++     return False
++ 
++ 
++ def git():
++     hooktype = os.path.basename(__file__)

+ 54 - 0
mozilla-release/patches/1630668-77a1.patch

@@ -0,0 +1,54 @@
+# HG changeset patch
+# User Ricky Stewart <rstewart@mozilla.com>
+# Date 1587056839 0
+# Node ID b411dd8c786a56c51c3e3c31d293358fa2038b69
+# Parent  e080901ca03098676ff86e1407669f50104c3cd2
+Bug 1630668 - Fix incompatible type signatures in subclasses of Repository r=dmajor
+
+Differential Revision: https://phabricator.services.mozilla.com/D71200
+
+diff --git a/python/mozversioncontrol/mozversioncontrol/__init__.py b/python/mozversioncontrol/mozversioncontrol/__init__.py
+--- a/python/mozversioncontrol/mozversioncontrol/__init__.py
++++ b/python/mozversioncontrol/mozversioncontrol/__init__.py
+@@ -351,17 +351,17 @@ class HgRepository(Repository):
+     def add_remove_files(self, *paths):
+         args = ['addremove'] + list(paths)
+         m = re.search(r'\d+\.\d+', self.tool_version)
+         simplified_version = float(m.group(0)) if m else 0
+         if simplified_version >= 3.9:
+             args = ['--config', 'extensions.automv='] + args
+         self._run(*args)
+ 
+-    def forget_add_remove_files(self, paths):
++    def forget_add_remove_files(self, *paths):
+         self._run('forget', *paths)
+ 
+     def get_files_in_working_directory(self):
+         # Can return backslashes on Windows. Normalize to forward slashes.
+         return list(p.replace('\\', '/') for p in
+                     self._run(b'files', b'-0').split('\0') if p)
+ 
+     def working_directory_clean(self, untracked=False, ignored=False):
+@@ -465,20 +465,20 @@ class GitRepository(Repository):
+         if upstream == 'default':
+             upstream = self.base_ref
+ 
+         compare = '{}..HEAD'.format(upstream)
+         files = self._run('log', '--name-only', '--diff-filter={}'.format(diff_filter.upper()),
+                           '--oneline', '--pretty=format:', compare).splitlines()
+         return [f for f in files if f]
+ 
+-    def add_remove_files(self, paths):
++    def add_remove_files(self, *paths):
+         self._run('add', *paths)
+ 
+-    def forget_add_remove_files(self, paths):
++    def forget_add_remove_files(self, *paths):
+         self._run('reset', *paths)
+ 
+     def get_files_in_working_directory(self):
+         return self._run('ls-files', '-z').split('\0')
+ 
+     def working_directory_clean(self, untracked=False, ignored=False):
+         args = ['status', '--porcelain']
+ 

+ 65 - 0
mozilla-release/patches/1632688-77a1.patch

@@ -0,0 +1,65 @@
+# HG changeset patch
+# User Tom Prince <mozilla@hocat.ca>
+# Date 1587752306 21600
+# Node ID 9442967f483a7c61b520c3f559a8db9fb29aa573
+# Parent  b9da46df253caa5bd1e7380a12c561ea9bb7e6c1
+Bug 1632688: [mozversioncontrol] Don't try to add files if none are provided; r=ahal,rstewart a=tomprince
+
+Differential Revision: https://phabricator.services.mozilla.com/D72414
+
+diff --git a/python/mozversioncontrol/mozversioncontrol/__init__.py b/python/mozversioncontrol/mozversioncontrol/__init__.py
+--- a/python/mozversioncontrol/mozversioncontrol/__init__.py
++++ b/python/mozversioncontrol/mozversioncontrol/__init__.py
+@@ -344,24 +344,28 @@ class HgRepository(Repository):
+             return self._run('log', '-r', rev, '-T', template).splitlines()
+ 
+     def get_outgoing_files(self, diff_filter='ADM', upstream='default'):
+         template = self._files_template(diff_filter)
+         return self._run('outgoing', '-r', '.', '--quiet',
+                          '--template', template, upstream, return_codes=(1,)).split()
+ 
+     def add_remove_files(self, *paths):
++        if not paths:
++            return
+         args = ['addremove'] + list(paths)
+         m = re.search(r'\d+\.\d+', self.tool_version)
+         simplified_version = float(m.group(0)) if m else 0
+         if simplified_version >= 3.9:
+             args = ['--config', 'extensions.automv='] + args
+         self._run(*args)
+ 
+     def forget_add_remove_files(self, *paths):
++        if not paths:
++            return
+         self._run('forget', *paths)
+ 
+     def get_files_in_working_directory(self):
+         # Can return backslashes on Windows. Normalize to forward slashes.
+         return list(p.replace('\\', '/') for p in
+                     self._run(b'files', b'-0').split('\0') if p)
+ 
+     def working_directory_clean(self, untracked=False, ignored=False):
+@@ -466,19 +470,23 @@ class GitRepository(Repository):
+             upstream = self.base_ref
+ 
+         compare = '{}..HEAD'.format(upstream)
+         files = self._run('log', '--name-only', '--diff-filter={}'.format(diff_filter.upper()),
+                           '--oneline', '--pretty=format:', compare).splitlines()
+         return [f for f in files if f]
+ 
+     def add_remove_files(self, *paths):
++        if not paths:
++            return
+         self._run('add', *paths)
+ 
+     def forget_add_remove_files(self, *paths):
++        if not paths:
++            return
+         self._run('reset', *paths)
+ 
+     def get_files_in_working_directory(self):
+         return self._run('ls-files', '-z').split('\0')
+ 
+     def working_directory_clean(self, untracked=False, ignored=False):
+         args = ['status', '--porcelain']
+ 

+ 155 - 0
mozilla-release/patches/1634391-2-78a1.patch

@@ -0,0 +1,155 @@
+# HG changeset patch
+# User Ricky Stewart <rstewart@mozilla.com>
+# Date 1589377253 0
+# Node ID c5f377e6f0af3662a3189db192e32d81496b8669
+# Parent  d1b7ccd2f6dfb9dd8b51fa1fb7957eab05644a58
+Bug 1634391 - Include more specific instructions than "consult a build peer" in Mozbuild comments/error messages r=froydnj
+
+The official decision appears to be that we want people to ask questions in the build channel on chat.mozilla.org for queries that require build peer approval, as opposed to emailing specific people directly, filing bugs, etc. Rather than the vague "consult a build peer" suggestion currently in the code, specify exactly what we expect people to do in mozbuild.
+
+Differential Revision: https://phabricator.services.mozilla.com/D74963
+
+diff --git a/python/mozbuild/mozbuild/action/node.py.1634391-2.later b/python/mozbuild/mozbuild/action/node.py.1634391-2.later
+new file mode 100644
+--- /dev/null
++++ b/python/mozbuild/mozbuild/action/node.py.1634391-2.later
+@@ -0,0 +1,22 @@
++--- node.py
+++++ node.py
++@@ -15,17 +15,18 @@ SCRIPT_ALLOWLIST = [
++     ]
++ 
++ ALLOWLIST_ERROR = '''
++ %s is not
++ in SCRIPT_ALLOWLIST in python/mozbuild/mozbuild/action/node.py.
++ Using NodeJS from moz.build is currently in beta, and node
++ scripts to be executed need to be added to the allowlist and
++ reviewed by a build peer so that we can get a better sense of
++-how support should evolve.
+++how support should evolve. (To consult a build peer, raise a
+++question in the #build channel at https://chat.mozilla.org.)
++ '''
++ 
++ 
++ def is_script_in_allowlist(script_path):
++     if script_path in SCRIPT_ALLOWLIST:
++         return True
++ 
++     return False
+diff --git a/python/mozbuild/mozbuild/frontend/context.py b/python/mozbuild/mozbuild/frontend/context.py
+--- a/python/mozbuild/mozbuild/frontend/context.py
++++ b/python/mozbuild/mozbuild/frontend/context.py
+@@ -1367,17 +1367,18 @@ VARIABLES = {
+         indicating extra files the output depends on.
+ 
+         When the ``flags`` attribute is present, the given list of flags is
+         passed as extra arguments following the inputs.
+ 
+         When the ``force`` attribute is present, the file is generated every
+         build, regardless of whether it is stale.  This is special to the
+         RecursiveMake backend and intended for special situations only (e.g.,
+-        localization).  Please consult a build peer before using ``force``.
++        localization).  Please consult a build peer (on the #build channel at
++        https://chat.mozilla.org) before using ``force``.
+         """
+         ),
+ 
+     'DEFINES': (
+         InitializedDefines,
+         dict,
+         """Dictionary of compiler defines to declare.
+ 
+@@ -1539,17 +1540,17 @@ VARIABLES = {
+         """),
+ 
+     'OBJDIR_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+                      """List of files to be installed anywhere in the objdir. Use sparingly.
+ 
+         ``OBJDIR_FILES`` is similar to FINAL_TARGET_FILES, but it allows copying
+         anywhere in the object directory. This is intended for various one-off
+         cases, not for general use. If you wish to add entries to OBJDIR_FILES,
+-        please consult a build peer.
++        please consult a build peer (on the #build channel at https://chat.mozilla.org).
+         """),
+ 
+     'OBJDIR_PP_FILES': (ContextDerivedTypedHierarchicalStringList(Path), list,
+                         """Like ``OBJDIR_FILES``, with preprocessing. Use sparingly.
+         """),
+ 
+     'FINAL_LIBRARY': (six.text_type, six.text_type,
+                       """Library in which the objects of the current directory will be linked.
+@@ -2356,18 +2357,18 @@ FUNCTIONS = {
+ 
+         The value used for the variable is the final value at the end of the
+         moz.build file, so it is possible (but not recommended style) to place
+         the export before the definition of the variable.
+ 
+         This function is limited to the upper-case variables that have special
+         meaning in moz.build files.
+ 
+-        NOTE: Please consult with a build peer before adding a new use of this
+-        function.
++        NOTE: Please consult with a build peer (on the #build channel at
++        https://chat.mozilla.org) before adding a new use of this function.
+ 
+         Example usage
+         ^^^^^^^^^^^^^
+ 
+         To make all children directories install as the given extension::
+ 
+           XPI_NAME = 'cool-extension'
+           export('XPI_NAME')
+diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
+--- a/python/mozbuild/mozbuild/mach_commands.py
++++ b/python/mozbuild/mozbuild/mach_commands.py
+@@ -1105,19 +1105,22 @@ class Vendor(MachCommandBase):
+         self._sub_mach(['help', 'vendor'])
+         return 1
+ 
+     @SubCommand('vendor', 'rust',
+                 description='Vendor rust crates from crates.io into third_party/rust')
+     @CommandArgument('--ignore-modified', action='store_true',
+                      help='Ignore modified files in current checkout',
+                      default=False)
+-    @CommandArgument('--build-peers-said-large-imports-were-ok', action='store_true',
+-                     help='Permit overly-large files to be added to the repository',
+-                     default=False)
++    @CommandArgument(
++        '--build-peers-said-large-imports-were-ok', action='store_true',
++        help=('Permit overly-large files to be added to the repository. '
++              'To get permission to set this, raise a question in the #build '
++              'channel at https://chat.mozilla.org.'),
++        default=False)
+     def vendor_rust(self, **kwargs):
+         from mozbuild.vendor_rust import VendorRust
+         vendor_command = self._spawn(VendorRust)
+         vendor_command.vendor(**kwargs)
+ 
+     @SubCommand('vendor', 'aom',
+                 description='Vendor av1 video codec reference implementation into the '
+                 'source repository.')
+diff --git a/python/mozbuild/mozbuild/vendor_rust.py b/python/mozbuild/mozbuild/vendor_rust.py
+--- a/python/mozbuild/mozbuild/vendor_rust.py
++++ b/python/mozbuild/mozbuild/vendor_rust.py
+@@ -365,18 +365,19 @@ license file's hash.
+         # Forcefully complain about large files being added, as history has
+         # shown that large-ish files typically are not needed.
+         if large_files and not build_peers_said_large_imports_were_ok:
+             self.log(logging.ERROR, 'filesize_check', {},
+                      '''The following files exceed the filesize limit of {size}:
+ 
+ {files}
+ 
+-Please find a way to reduce the sizes of these files or talk to a build
+-peer about the particular large files you are adding.
++If you can't reduce the size of these files, talk to a build peer (on the #build
++channel at https://chat.mozilla.org) about the particular large files you are
++adding.
+ 
+ The changes from `mach vendor rust` will NOT be added to version control.
+ 
+ {notice}'''.format(files='\n'.join(sorted(large_files)), size=FILESIZE_LIMIT,
+                    notice=CARGO_LOCK_NOTICE))
+             self.repository.forget_add_remove_files(vendor_dir)
+             self.repository.clean_directory(vendor_dir)
+             sys.exit(1)

+ 300 - 0
mozilla-release/patches/1637845-01-79a1.patch

@@ -0,0 +1,300 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591799937 0
+# Node ID 9c72208359b46c7457154cff2382c7a2b48e8815
+# Parent  9a014f05aa18ef1eb3ef72475742914f703ccc3b
+Bug 1637845 - Move all mach vendor files to a subdirectory r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D75693
+
+diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py
+--- a/build/mach_bootstrap.py
++++ b/build/mach_bootstrap.py
+@@ -46,16 +46,17 @@ MACH_MODULES = [
+     'python/mach_commands.py',
+     'python/mozboot/mozboot/mach_commands.py',
+     'python/mozbuild/mozbuild/artifact_commands.py',
+     'python/mozbuild/mozbuild/backend/mach_commands.py',
+     'python/mozbuild/mozbuild/build_commands.py',
+     'python/mozbuild/mozbuild/code-analysis/mach_commands.py',
+     'python/mozbuild/mozbuild/compilation/codecomplete.py',
+     'python/mozbuild/mozbuild/frontend/mach_commands.py',
++    'python/mozbuild/mozbuild/vendor/mach_commands.py',
+     'python/mozbuild/mozbuild/mach_commands.py',
+     'python/mozperftest/mozperftest/mach_commands.py',
+     'testing/awsy/mach_commands.py',
+     'testing/firefox-ui/mach_commands.py',
+     'testing/geckodriver/mach_commands.py',
+     'testing/mach_commands.py',
+     'testing/marionette/mach_commands.py',
+     'testing/mochitest/mach_commands.py',
+diff --git a/python/mozbuild/mozbuild/mach_commands.py b/python/mozbuild/mozbuild/mach_commands.py
+--- a/python/mozbuild/mozbuild/mach_commands.py
++++ b/python/mozbuild/mozbuild/mach_commands.py
+@@ -1091,100 +1091,16 @@ class MachDebug(MachCommandBase):
+                     return result
+                 elif isinstance(obj, set):
+                     return list(obj)
+                 return json.JSONEncoder.default(self, obj)
+         json.dump(self, cls=EnvironmentEncoder, sort_keys=True, fp=out)
+ 
+ 
+ @CommandProvider
+-class Vendor(MachCommandBase):
+-    """Vendor third-party dependencies into the source repository."""
+-
+-    @Command('vendor', category='misc',
+-             description='Vendor third-party dependencies into the source repository.')
+-    def vendor(self):
+-        self._sub_mach(['help', 'vendor'])
+-        return 1
+-
+-    @SubCommand('vendor', 'rust',
+-                description='Vendor rust crates from crates.io into third_party/rust')
+-    @CommandArgument('--ignore-modified', action='store_true',
+-                     help='Ignore modified files in current checkout',
+-                     default=False)
+-    @CommandArgument(
+-        '--build-peers-said-large-imports-were-ok', action='store_true',
+-        help=('Permit overly-large files to be added to the repository. '
+-              'To get permission to set this, raise a question in the #build '
+-              'channel at https://chat.mozilla.org.'),
+-        default=False)
+-    def vendor_rust(self, **kwargs):
+-        from mozbuild.vendor_rust import VendorRust
+-        vendor_command = self._spawn(VendorRust)
+-        vendor_command.vendor(**kwargs)
+-
+-    @SubCommand('vendor', 'aom',
+-                description='Vendor av1 video codec reference implementation into the '
+-                'source repository.')
+-    @CommandArgument('-r', '--revision',
+-                     help='Repository tag or commit to update to.')
+-    @CommandArgument('--repo',
+-                     help='Repository url to pull a snapshot from. '
+-                     'Supports github and googlesource.')
+-    @CommandArgument('--ignore-modified', action='store_true',
+-                     help='Ignore modified files in current checkout',
+-                     default=False)
+-    def vendor_aom(self, **kwargs):
+-        from mozbuild.vendor_aom import VendorAOM
+-        vendor_command = self._spawn(VendorAOM)
+-        vendor_command.vendor(**kwargs)
+-
+-    @SubCommand('vendor', 'dav1d',
+-                description='Vendor dav1d implementation of AV1 into the source repository.')
+-    @CommandArgument('-r', '--revision',
+-                     help='Repository tag or commit to update to.')
+-    @CommandArgument('--repo',
+-                     help='Repository url to pull a snapshot from. Supports gitlab.')
+-    @CommandArgument('--ignore-modified', action='store_true',
+-                     help='Ignore modified files in current checkout',
+-                     default=False)
+-    def vendor_dav1d(self, **kwargs):
+-        from mozbuild.vendor_dav1d import VendorDav1d
+-        vendor_command = self._spawn(VendorDav1d)
+-        vendor_command.vendor(**kwargs)
+-
+-    @SubCommand('vendor', 'python',
+-                description='Vendor Python packages from pypi.org into third_party/python')
+-    @CommandArgument('--with-windows-wheel', action='store_true',
+-        help='Vendor a wheel for Windows along with the source package',
+-        default=False)
+-    @CommandArgument('packages', default=None, nargs='*',
+-                     help='Packages to vendor. If omitted, packages and their dependencies '
+-                     'defined in Pipfile.lock will be vendored. If Pipfile has been modified, '
+-                     'then Pipfile.lock will be regenerated. Note that transient dependencies '
+-                     'may be updated when running this command.')
+-    def vendor_python(self, **kwargs):
+-        from mozbuild.vendor_python import VendorPython
+-        vendor_command = self._spawn(VendorPython)
+-        vendor_command.vendor(**kwargs)
+-
+-    @SubCommand('vendor', 'manifest',
+-                description='Vendor externally hosted repositories into this '
+-                            'repository.')
+-    @CommandArgument('files', nargs='+',
+-                     help='Manifest files to work on')
+-    @CommandArgumentGroup('verify')
+-    @CommandArgument('--verify', '-v', action='store_true', group='verify',
+-                     required=True, help='Verify manifest')
+-    def vendor_manifest(self, files, verify):
+-        from mozbuild.vendor_manifest import verify_manifests
+-        verify_manifests(files)
+-
+-
+-@CommandProvider
+ class Repackage(MachCommandBase):
+     '''Repackages artifacts into different formats.
+ 
+     This is generally used after packages are signed by the signing
+     scriptworkers in order to bundle things up into shippable formats, such as a
+     .dmg on OSX or an installer exe on Windows.
+     '''
+     @Command('repackage', category='misc',
+diff --git a/python/mozbuild/mozbuild/test/test_licenses.py b/python/mozbuild/mozbuild/test/test_licenses.py
+--- a/python/mozbuild/mozbuild/test/test_licenses.py
++++ b/python/mozbuild/mozbuild/test/test_licenses.py
+@@ -1,15 +1,15 @@
+ from __future__ import absolute_import, print_function
+ 
+ import unittest
+ 
+ import mozunit
+ 
+-from mozbuild.vendor_rust import VendorRust
++from mozbuild.vendor.vendor_rust import VendorRust
+ 
+ 
+ class TestLicenses(unittest.TestCase):
+     """
+     Unit tests for the Rust Vendoring stuff
+     """
+ 
+     def setUp(self):
+diff --git a/python/mozbuild/mozbuild/test/test_manifest.py b/python/mozbuild/mozbuild/test/test_manifest.py
+--- a/python/mozbuild/mozbuild/test/test_manifest.py
++++ b/python/mozbuild/mozbuild/test/test_manifest.py
+@@ -3,17 +3,17 @@
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+ import unittest
+ 
+ import mozfile
+-from mozbuild.moz_yaml import load_moz_yaml, VerifyError
++from mozbuild.vendor.moz_yaml import load_moz_yaml, VerifyError
+ from nose.tools import raises
+ 
+ 
+ class TestManifest(unittest.TestCase):
+     def test_simple(self):
+         simple_dict = {
+             'schema': 1,
+             'origin': {
+diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py
+new file mode 100644
+--- /dev/null
++++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
+@@ -0,0 +1,100 @@
++# This Source Code Form is subject to the terms of the Mozilla Public
++# License, v. 2.0. If a copy of the MPL was not distributed with this
++# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
++
++from __future__ import absolute_import, print_function, unicode_literals
++
++import sys
++
++from mach.decorators import (
++    CommandArgument,
++    CommandArgumentGroup,
++    CommandProvider,
++    Command,
++    SubCommand,
++)
++
++from mozbuild.base import MachCommandBase
++
++@CommandProvider
++class Vendor(MachCommandBase):
++    """Vendor third-party dependencies into the source repository."""
++
++    @Command('vendor', category='misc',
++             description='Vendor third-party dependencies into the source repository.')
++    def vendor(self):
++        self._sub_mach(['help', 'vendor'])
++        return 1
++
++    @SubCommand('vendor', 'rust',
++                description='Vendor rust crates from crates.io into third_party/rust')
++    @CommandArgument('--ignore-modified', action='store_true',
++                     help='Ignore modified files in current checkout',
++                     default=False)
++    @CommandArgument(
++        '--build-peers-said-large-imports-were-ok', action='store_true',
++        help=('Permit overly-large files to be added to the repository. '
++              'To get permission to set this, raise a question in the #build '
++              'channel at https://chat.mozilla.org.'),
++        default=False)
++    def vendor_rust(self, **kwargs):
++        from mozbuild.vendor_rust import VendorRust
++        vendor_command = self._spawn(VendorRust)
++        vendor_command.vendor(**kwargs)
++
++    @SubCommand('vendor', 'aom',
++                description='Vendor av1 video codec reference implementation into the '
++                'source repository.')
++    @CommandArgument('-r', '--revision',
++                     help='Repository tag or commit to update to.')
++    @CommandArgument('--repo',
++                     help='Repository url to pull a snapshot from. '
++                     'Supports github and googlesource.')
++    @CommandArgument('--ignore-modified', action='store_true',
++                     help='Ignore modified files in current checkout',
++                     default=False)
++    def vendor_aom(self, **kwargs):
++        from mozbuild.vendor_aom import VendorAOM
++        vendor_command = self._spawn(VendorAOM)
++        vendor_command.vendor(**kwargs)
++
++    @SubCommand('vendor', 'dav1d',
++                description='Vendor dav1d implementation of AV1 into the source repository.')
++    @CommandArgument('-r', '--revision',
++                     help='Repository tag or commit to update to.')
++    @CommandArgument('--repo',
++                     help='Repository url to pull a snapshot from. Supports gitlab.')
++    @CommandArgument('--ignore-modified', action='store_true',
++                     help='Ignore modified files in current checkout',
++                     default=False)
++    def vendor_dav1d(self, **kwargs):
++        from mozbuild.vendor_dav1d import VendorDav1d
++        vendor_command = self._spawn(VendorDav1d)
++        vendor_command.vendor(**kwargs)
++
++    @SubCommand('vendor', 'python',
++                description='Vendor Python packages from pypi.org into third_party/python')
++    @CommandArgument('--with-windows-wheel', action='store_true',
++                     help='Vendor a wheel for Windows along with the source package',
++                     default=False)
++    @CommandArgument('packages', default=None, nargs='*',
++                     help='Packages to vendor. If omitted, packages and their dependencies '
++                     'defined in Pipfile.lock will be vendored. If Pipfile has been modified, '
++                     'then Pipfile.lock will be regenerated. Note that transient dependencies '
++                     'may be updated when running this command.')
++    def vendor_python(self, **kwargs):
++        from mozbuild.vendor_python import VendorPython
++        vendor_command = self._spawn(VendorPython)
++        vendor_command.vendor(**kwargs)
++
++    @SubCommand('vendor', 'manifest',
++                description='Vendor externally hosted repositories into this '
++                            'repository.')
++    @CommandArgument('files', nargs='+',
++                     help='Manifest files to work on')
++    @CommandArgumentGroup('verify')
++    @CommandArgument('--verify', '-v', action='store_true', group='verify',
++                     required=True, help='Verify manifest')
++    def vendor_manifest(self, files, verify):
++        from mozbuild.vendor_manifest import verify_manifests
++        verify_manifests(files)
+\ No newline at end of file
+diff --git a/python/mozbuild/mozbuild/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+rename from python/mozbuild/mozbuild/moz_yaml.py
+rename to python/mozbuild/mozbuild/vendor/moz_yaml.py
+diff --git a/python/mozbuild/mozbuild/vendor_aom.py b/python/mozbuild/mozbuild/vendor/vendor_aom.py
+rename from python/mozbuild/mozbuild/vendor_aom.py
+rename to python/mozbuild/mozbuild/vendor/vendor_aom.py
+diff --git a/python/mozbuild/mozbuild/vendor_dav1d.py b/python/mozbuild/mozbuild/vendor/vendor_dav1d.py
+rename from python/mozbuild/mozbuild/vendor_dav1d.py
+rename to python/mozbuild/mozbuild/vendor/vendor_dav1d.py
+diff --git a/python/mozbuild/mozbuild/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+rename from python/mozbuild/mozbuild/vendor_manifest.py
+rename to python/mozbuild/mozbuild/vendor/vendor_manifest.py
+diff --git a/python/mozbuild/mozbuild/vendor_python.py b/python/mozbuild/mozbuild/vendor/vendor_python.py
+rename from python/mozbuild/mozbuild/vendor_python.py
+rename to python/mozbuild/mozbuild/vendor/vendor_python.py
+diff --git a/python/mozbuild/mozbuild/vendor_rust.py b/python/mozbuild/mozbuild/vendor/vendor_rust.py
+rename from python/mozbuild/mozbuild/vendor_rust.py
+rename to python/mozbuild/mozbuild/vendor/vendor_rust.py

+ 2038 - 0
mozilla-release/patches/1637845-02-79a1.patch

@@ -0,0 +1,2038 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800002 0
+# Node ID 5410c7af659630679cd31e2e555bc68d294a6639
+# Parent  dacc727bf9a10e4327419edd90add71f309ff30d
+Bug 1637845 - Apply 'black' to the vendor subdirectory r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D75896
+
+Depends on D75693
+
+diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py
+--- a/python/mozbuild/mozbuild/vendor/mach_commands.py
++++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
+@@ -11,90 +11,139 @@ from mach.decorators import (
+     CommandArgumentGroup,
+     CommandProvider,
+     Command,
+     SubCommand,
+ )
+ 
+ from mozbuild.base import MachCommandBase
+ 
++
+ @CommandProvider
+ class Vendor(MachCommandBase):
+     """Vendor third-party dependencies into the source repository."""
+ 
+-    @Command('vendor', category='misc',
+-             description='Vendor third-party dependencies into the source repository.')
++    @Command(
++        "vendor",
++        category="misc",
++        description="Vendor third-party dependencies into the source repository.",
++    )
+     def vendor(self):
+-        self._sub_mach(['help', 'vendor'])
++        self._sub_mach(["help", "vendor"])
+         return 1
+ 
+-    @SubCommand('vendor', 'rust',
+-                description='Vendor rust crates from crates.io into third_party/rust')
+-    @CommandArgument('--ignore-modified', action='store_true',
+-                     help='Ignore modified files in current checkout',
+-                     default=False)
++    @SubCommand(
++        "vendor",
++        "rust",
++        description="Vendor rust crates from crates.io into third_party/rust",
++    )
+     @CommandArgument(
+-        '--build-peers-said-large-imports-were-ok', action='store_true',
+-        help=('Permit overly-large files to be added to the repository. '
+-              'To get permission to set this, raise a question in the #build '
+-              'channel at https://chat.mozilla.org.'),
+-        default=False)
++        "--ignore-modified",
++        action="store_true",
++        help="Ignore modified files in current checkout",
++        default=False,
++    )
++    @CommandArgument(
++        "--build-peers-said-large-imports-were-ok",
++        action="store_true",
++        help=(
++            "Permit overly-large files to be added to the repository. "
++            "To get permission to set this, raise a question in the #build "
++            "channel at https://chat.mozilla.org."
++        ),
++        default=False,
++    )
+     def vendor_rust(self, **kwargs):
+         from mozbuild.vendor_rust import VendorRust
++
+         vendor_command = self._spawn(VendorRust)
+         vendor_command.vendor(**kwargs)
+ 
+-    @SubCommand('vendor', 'aom',
+-                description='Vendor av1 video codec reference implementation into the '
+-                'source repository.')
+-    @CommandArgument('-r', '--revision',
+-                     help='Repository tag or commit to update to.')
+-    @CommandArgument('--repo',
+-                     help='Repository url to pull a snapshot from. '
+-                     'Supports github and googlesource.')
+-    @CommandArgument('--ignore-modified', action='store_true',
+-                     help='Ignore modified files in current checkout',
+-                     default=False)
++    @SubCommand(
++        "vendor",
++        "aom",
++        description="Vendor av1 video codec reference implementation into the "
++        "source repository.",
++    )
++    @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
++    @CommandArgument(
++        "--repo",
++        help="Repository url to pull a snapshot from. "
++        "Supports github and googlesource.",
++    )
++    @CommandArgument(
++        "--ignore-modified",
++        action="store_true",
++        help="Ignore modified files in current checkout",
++        default=False,
++    )
+     def vendor_aom(self, **kwargs):
+         from mozbuild.vendor_aom import VendorAOM
++
+         vendor_command = self._spawn(VendorAOM)
+         vendor_command.vendor(**kwargs)
+ 
+-    @SubCommand('vendor', 'dav1d',
+-                description='Vendor dav1d implementation of AV1 into the source repository.')
+-    @CommandArgument('-r', '--revision',
+-                     help='Repository tag or commit to update to.')
+-    @CommandArgument('--repo',
+-                     help='Repository url to pull a snapshot from. Supports gitlab.')
+-    @CommandArgument('--ignore-modified', action='store_true',
+-                     help='Ignore modified files in current checkout',
+-                     default=False)
++    @SubCommand(
++        "vendor",
++        "dav1d",
++        description="Vendor dav1d implementation of AV1 into the source repository.",
++    )
++    @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
++    @CommandArgument(
++        "--repo", help="Repository url to pull a snapshot from. Supports gitlab."
++    )
++    @CommandArgument(
++        "--ignore-modified",
++        action="store_true",
++        help="Ignore modified files in current checkout",
++        default=False,
++    )
+     def vendor_dav1d(self, **kwargs):
+         from mozbuild.vendor_dav1d import VendorDav1d
++
+         vendor_command = self._spawn(VendorDav1d)
+         vendor_command.vendor(**kwargs)
+ 
+-    @SubCommand('vendor', 'python',
+-                description='Vendor Python packages from pypi.org into third_party/python')
+-    @CommandArgument('--with-windows-wheel', action='store_true',
+-                     help='Vendor a wheel for Windows along with the source package',
+-                     default=False)
+-    @CommandArgument('packages', default=None, nargs='*',
+-                     help='Packages to vendor. If omitted, packages and their dependencies '
+-                     'defined in Pipfile.lock will be vendored. If Pipfile has been modified, '
+-                     'then Pipfile.lock will be regenerated. Note that transient dependencies '
+-                     'may be updated when running this command.')
++    @SubCommand(
++        "vendor",
++        "python",
++        description="Vendor Python packages from pypi.org into third_party/python",
++    )
++    @CommandArgument(
++        "--with-windows-wheel",
++        action="store_true",
++        help="Vendor a wheel for Windows along with the source package",
++        default=False,
++    )
++    @CommandArgument(
++        "packages",
++        default=None,
++        nargs="*",
++        help="Packages to vendor. If omitted, packages and their dependencies "
++        "defined in Pipfile.lock will be vendored. If Pipfile has been modified, "
++        "then Pipfile.lock will be regenerated. Note that transient dependencies "
++        "may be updated when running this command.",
++    )
+     def vendor_python(self, **kwargs):
+         from mozbuild.vendor_python import VendorPython
++
+         vendor_command = self._spawn(VendorPython)
+         vendor_command.vendor(**kwargs)
+ 
+-    @SubCommand('vendor', 'manifest',
+-                description='Vendor externally hosted repositories into this '
+-                            'repository.')
+-    @CommandArgument('files', nargs='+',
+-                     help='Manifest files to work on')
+-    @CommandArgumentGroup('verify')
+-    @CommandArgument('--verify', '-v', action='store_true', group='verify',
+-                     required=True, help='Verify manifest')
++    @SubCommand(
++        "vendor",
++        "manifest",
++        description="Vendor externally hosted repositories into this " "repository.",
++    )
++    @CommandArgument("files", nargs="+", help="Manifest files to work on")
++    @CommandArgumentGroup("verify")
++    @CommandArgument(
++        "--verify",
++        "-v",
++        action="store_true",
++        group="verify",
++        required=True,
++        help="Verify manifest",
++    )
+     def vendor_manifest(self, files, verify):
+         from mozbuild.vendor_manifest import verify_manifests
+-        verify_manifests(files)
+\ No newline at end of file
++
++        verify_manifests(files)
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -10,46 +10,54 @@
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+ import errno
+ import os
+ import re
+ import sys
+ 
+ HERE = os.path.abspath(os.path.dirname(__file__))
+-lib_path = os.path.join(HERE, '..', '..', '..', 'third_party', 'python')
+-sys.path.append(os.path.join(lib_path, 'voluptuous'))
+-sys.path.append(os.path.join(lib_path, 'pyyaml', 'lib'))
++lib_path = os.path.join(HERE, "..", "..", "..", "third_party", "python")
++sys.path.append(os.path.join(lib_path, "voluptuous"))
++sys.path.append(os.path.join(lib_path, "pyyaml", "lib"))
+ 
+ import voluptuous
+ import yaml
+-from voluptuous import (All, FqdnUrl, Length, Match, Msg, Required, Schema,
+-                        Unique, )
++from voluptuous import (
++    All,
++    FqdnUrl,
++    Length,
++    Match,
++    Msg,
++    Required,
++    Schema,
++    Unique,
++)
+ from yaml.error import MarkedYAMLError
+ 
+ # TODO ensure this matches the approved list of licenses
+ VALID_LICENSES = [
+     # Standard Licenses (as per https://spdx.org/licenses/)
+-    'Apache-2.0',
+-    'BSD-2-Clause',
+-    'BSD-3-Clause-Clear',
+-    'GPL-3.0',
+-    'ISC',
+-    'ICU',
+-    'LGPL-2.1',
+-    'LGPL-3.0',
+-    'MIT',
+-    'MPL-1.1',
+-    'MPL-2.0',
++    "Apache-2.0",
++    "BSD-2-Clause",
++    "BSD-3-Clause-Clear",
++    "GPL-3.0",
++    "ISC",
++    "ICU",
++    "LGPL-2.1",
++    "LGPL-3.0",
++    "MIT",
++    "MPL-1.1",
++    "MPL-2.0",
+     # Unique Licenses
+-    'ACE',  # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
+-    'Anti-Grain-Geometry',  # http://www.antigrain.com/license/index.html
+-    'JPNIC',  # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
+-    'Khronos',  # https://www.khronos.org/openmaxdl
+-    'Unicode',  # http://www.unicode.org/copyright.html
++    "ACE",  # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
++    "Anti-Grain-Geometry",  # http://www.antigrain.com/license/index.html
++    "JPNIC",  # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
++    "Khronos",  # https://www.khronos.org/openmaxdl
++    "Unicode",  # http://www.unicode.org/copyright.html
+ ]
+ 
+ """
+ ---
+ # Third-Party Library Template
+ # All fields are mandatory unless otherwise noted
+ 
+ # Version of this schema
+@@ -146,60 +154,58 @@ vendoring:
+ 
+   # In-tree scripts to be executed after vendoring but before pushing.
+   # optional
+   run_after:
+     - script
+     - another script
+ """
+ 
+-RE_SECTION = re.compile(r'^(\S[^:]*):').search
+-RE_FIELD = re.compile(r'^\s\s([^:]+):\s+(\S+)$').search
++RE_SECTION = re.compile(r"^(\S[^:]*):").search
++RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
+ 
+ 
+ class VerifyError(Exception):
+     def __init__(self, filename, error):
+         self.filename = filename
+         self.error = error
+ 
+     def __str__(self):
+-        return '%s: %s' % (self.filename, self.error)
++        return "%s: %s" % (self.filename, self.error)
+ 
+ 
+ def load_moz_yaml(filename, verify=True, require_license_file=True):
+     """Loads and verifies the specified manifest."""
+ 
+     # Load and parse YAML.
+     try:
+-        with open(filename, 'r') as f:
++        with open(filename, "r") as f:
+             manifest = yaml.safe_load(f)
+     except IOError as e:
+         if e.errno == errno.ENOENT:
+-            raise VerifyError(filename,
+-                              'Failed to find manifest: %s' % filename)
++            raise VerifyError(filename, "Failed to find manifest: %s" % filename)
+         raise
+     except MarkedYAMLError as e:
+         raise VerifyError(filename, e)
+ 
+     if not verify:
+         return manifest
+ 
+     # Verify schema.
+-    if 'schema' not in manifest:
++    if "schema" not in manifest:
+         raise VerifyError(filename, 'Missing manifest "schema"')
+-    if manifest['schema'] == 1:
++    if manifest["schema"] == 1:
+         schema = _schema_1()
+         schema_additional = _schema_1_additional
+     else:
+-        raise VerifyError(filename, 'Unsupported manifest schema')
++        raise VerifyError(filename, "Unsupported manifest schema")
+ 
+     try:
+         schema(manifest)
+-        schema_additional(filename, manifest,
+-                          require_license_file=require_license_file)
++        schema_additional(filename, manifest, require_license_file=require_license_file)
+     except (voluptuous.Error, ValueError) as e:
+         raise VerifyError(filename, e)
+ 
+     return manifest
+ 
+ 
+ def update_moz_yaml(filename, release, revision, verify=True, write=True):
+     """Update origin:release and vendoring:revision without stripping
+@@ -216,106 +222,112 @@ def update_moz_yaml(filename, release, r
+         for line in f.readlines():
+             m = RE_SECTION(line)
+             if m:
+                 section = m.group(1)
+             else:
+                 m = RE_FIELD(line)
+                 if m:
+                     (name, value) = m.groups()
+-                    if section == 'origin' and name == 'release':
+-                        line = '  release: %s\n' % release
++                    if section == "origin" and name == "release":
++                        line = "  release: %s\n" % release
+                         found_release = True
+-                    elif section == 'vendoring' and name == 'revision':
+-                        line = '  revision: %s\n' % revision
++                    elif section == "vendoring" and name == "revision":
++                        line = "  revision: %s\n" % revision
+                         found_revision = True
+             lines.append(line)
+ 
+         if not found_release and found_revision:
+-            raise ValueError('Failed to find origin:release and '
+-                             'vendoring:revision')
++            raise ValueError("Failed to find origin:release and " "vendoring:revision")
+ 
+     if write:
+-        with open(filename, 'w') as f:
++        with open(filename, "w") as f:
+             f.writelines(lines)
+ 
+ 
+ def _schema_1():
+     """Returns Voluptuous Schema object."""
+-    return Schema({
+-        Required('schema'): 1,
+-        Required('bugzilla'): {
+-            Required('product'): All(str, Length(min=1)),
+-            Required('component'): All(str, Length(min=1)),
+-        },
+-        'origin': {
+-            Required('name'): All(str, Length(min=1)),
+-            Required('description'): All(str, Length(min=1)),
+-            Required('url'): FqdnUrl(),
+-            Required('license'): Msg(License(), msg='Unsupported License'),
+-            Required('release'): All(str, Length(min=1)),
+-        },
+-        'vendoring': {
+-            Required('url'): FqdnUrl(),
+-            Required('revision'): Match(r'^[a-fA-F0-9]{12,40}$'),
+-            'patches': Unique([str]),
+-            'keep': Unique([str]),
+-            'exclude': Unique([str]),
+-            'include': Unique([str]),
+-            'run_after': Unique([str]),
+-        },
+-    })
++    return Schema(
++        {
++            Required("schema"): 1,
++            Required("bugzilla"): {
++                Required("product"): All(str, Length(min=1)),
++                Required("component"): All(str, Length(min=1)),
++            },
++            "origin": {
++                Required("name"): All(str, Length(min=1)),
++                Required("description"): All(str, Length(min=1)),
++                Required("url"): FqdnUrl(),
++                Required("license"): Msg(License(), msg="Unsupported License"),
++                Required("release"): All(str, Length(min=1)),
++            },
++            "vendoring": {
++                Required("url"): FqdnUrl(),
++                Required("revision"): Match(r"^[a-fA-F0-9]{12,40}$"),
++                "patches": Unique([str]),
++                "keep": Unique([str]),
++                "exclude": Unique([str]),
++                "include": Unique([str]),
++                "run_after": Unique([str]),
++            },
++        }
++    )
+ 
+ 
+ def _schema_1_additional(filename, manifest, require_license_file=True):
+     """Additional schema/validity checks"""
+ 
+     # LICENSE file must exist.
+-    if require_license_file and 'origin' in manifest:
+-        files = [f.lower() for f in os.listdir(os.path.dirname(filename))
+-                 if f.lower().startswith('license')]
+-        if not ('license' in files
+-                or 'license.txt' in files
+-                or 'license.rst' in files
+-                or 'license.html' in files
+-                or 'license.md' in files):
+-            license = manifest['origin']['license']
++    if require_license_file and "origin" in manifest:
++        files = [
++            f.lower()
++            for f in os.listdir(os.path.dirname(filename))
++            if f.lower().startswith("license")
++        ]
++        if not (
++            "license" in files
++            or "license.txt" in files
++            or "license.rst" in files
++            or "license.html" in files
++            or "license.md" in files
++        ):
++            license = manifest["origin"]["license"]
+             if isinstance(license, list):
+-                license = '/'.join(license)
+-            raise ValueError('Failed to find %s LICENSE file' % license)
++                license = "/".join(license)
++            raise ValueError("Failed to find %s LICENSE file" % license)
+ 
+     # Cannot vendor without an origin.
+-    if 'vendoring' in manifest and 'origin' not in manifest:
++    if "vendoring" in manifest and "origin" not in manifest:
+         raise ValueError('"vendoring" requires an "origin"')
+ 
+     # Check for a simple YAML file
+-    with open(filename, 'r') as f:
++    with open(filename, "r") as f:
+         has_schema = False
+         for line in f.readlines():
+             m = RE_SECTION(line)
+             if m:
+-                if m.group(1) == 'schema':
++                if m.group(1) == "schema":
+                     has_schema = True
+                     break
+         if not has_schema:
+-            raise ValueError('Not simple YAML')
++            raise ValueError("Not simple YAML")
+ 
+     # Verify YAML can be updated.
+-    if 'vendor' in manifest:
+-        update_moz_yaml(filename, '', '', verify=False, write=True)
++    if "vendor" in manifest:
++        update_moz_yaml(filename, "", "", verify=False, write=True)
+ 
+ 
+ class License(object):
+     """Voluptuous validator which verifies the license(s) are valid as per our
+     whitelist."""
+ 
+     def __call__(self, values):
+         if isinstance(values, str):
+             values = [values]
+         elif not isinstance(values, list):
+-            raise ValueError('Must be string or list')
++            raise ValueError("Must be string or list")
+         for v in values:
+             if v not in VALID_LICENSES:
+-                raise ValueError('Bad License')
++                raise ValueError("Bad License")
+         return values
+ 
+     def __repr__(self):
+-        return 'License'
++        return "License"
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_aom.py b/python/mozbuild/mozbuild/vendor/vendor_aom.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_aom.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_aom.py
+@@ -1,207 +1,227 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+ import logging
+-from mozbuild.base import (
+-    MozbuildObject,
+-)
++from mozbuild.base import MozbuildObject
+ import mozfile
+ import mozpack.path as mozpath
+ import os
+ import requests
+ import re
+ import sys
+ import tarfile
+ from urllib.parse import urlparse
+ 
+ 
+ class VendorAOM(MozbuildObject):
+     def upstream_snapshot(self, revision):
+-        '''Construct a url for a tarball snapshot of the given revision.'''
+-        if 'googlesource' in self.repo_url:
+-            return mozpath.join(self.repo_url, '+archive', revision + '.tar.gz')
+-        elif 'github' in self.repo_url:
+-            return mozpath.join(self.repo_url, 'archive', revision + '.tar.gz')
++        """Construct a url for a tarball snapshot of the given revision."""
++        if "googlesource" in self.repo_url:
++            return mozpath.join(self.repo_url, "+archive", revision + ".tar.gz")
++        elif "github" in self.repo_url:
++            return mozpath.join(self.repo_url, "archive", revision + ".tar.gz")
+         else:
+-            raise ValueError('Unknown git host, no snapshot lookup method')
++            raise ValueError("Unknown git host, no snapshot lookup method")
+ 
+     def upstream_commit(self, revision):
+-        '''Convert a revision to a git commit and timestamp.
++        """Convert a revision to a git commit and timestamp.
+ 
+         Ask the upstream repo to convert the requested revision to
+         a git commit id and timestamp, so we can be precise in
+-        what we're vendoring.'''
+-        if 'googlesource' in self.repo_url:
++        what we're vendoring."""
++        if "googlesource" in self.repo_url:
+             return self.upstream_googlesource_commit(revision)
+-        elif 'github' in self.repo_url:
++        elif "github" in self.repo_url:
+             return self.upstream_github_commit(revision)
+         else:
+-            raise ValueError('Unknown git host, no commit lookup method')
++            raise ValueError("Unknown git host, no commit lookup method")
+ 
+     def upstream_validate(self, url):
+-        '''Validate repository urls to make sure we can handle them.'''
++        """Validate repository urls to make sure we can handle them."""
+         host = urlparse(url).netloc
+-        valid_domains = ('googlesource.com', 'github.com')
++        valid_domains = ("googlesource.com", "github.com")
+         if not any(filter(lambda domain: domain in host, valid_domains)):
+-            self.log(logging.ERROR, 'upstream_url', {},
+-                     '''Unsupported git host %s; cannot fetch snapshots.
++            self.log(
++                logging.ERROR,
++                "upstream_url",
++                {},
++                """Unsupported git host %s; cannot fetch snapshots.
+ 
+-Please set a repository url with --repo on either googlesource or github.''' % host)
++Please set a repository url with --repo on either googlesource or github."""
++                % host,
++            )
+             sys.exit(1)
+ 
+     def upstream_googlesource_commit(self, revision):
+-        '''Query gitiles for a git commit and timestamp.'''
+-        url = mozpath.join(self.repo_url, '+', revision + '?format=JSON')
+-        self.log(logging.INFO, 'fetch', {'url': url},
+-                 'Fetching commit id from {url}')
++        """Query gitiles for a git commit and timestamp."""
++        url = mozpath.join(self.repo_url, "+", revision + "?format=JSON")
++        self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
+         req = requests.get(url)
+         req.raise_for_status()
+         try:
+             info = req.json()
+         except ValueError:
+             # As of 2017 May, googlesource sends 4 garbage characters
+             # at the beginning of the json response. Work around this.
+             # https://bugs.chromium.org/p/chromium/issues/detail?id=718550
+             import json
++
+             info = json.loads(req.text[4:])
+-        return (info['commit'], info['committer']['time'])
++        return (info["commit"], info["committer"]["time"])
+ 
+     def upstream_github_commit(self, revision):
+-        '''Query the github api for a git commit id and timestamp.'''
+-        github_api = 'https://api.github.com/'
++        """Query the github api for a git commit id and timestamp."""
++        github_api = "https://api.github.com/"
+         repo = urlparse(self.repo_url).path[1:]
+-        url = mozpath.join(github_api, 'repos', repo, 'commits', revision)
+-        self.log(logging.INFO, 'fetch', {'url': url},
+-                 'Fetching commit id from {url}')
++        url = mozpath.join(github_api, "repos", repo, "commits", revision)
++        self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
+         req = requests.get(url)
+         req.raise_for_status()
+         info = req.json()
+-        return (info['sha'], info['commit']['committer']['date'])
++        return (info["sha"], info["commit"]["committer"]["date"])
+ 
+     def fetch_and_unpack(self, revision, target):
+-        '''Fetch and unpack upstream source'''
++        """Fetch and unpack upstream source"""
+         url = self.upstream_snapshot(revision)
+-        self.log(logging.INFO, 'fetch', {'url': url}, 'Fetching {url}')
+-        prefix = 'aom-' + revision
+-        filename = prefix + '.tar.gz'
+-        with open(filename, 'wb') as f:
++        self.log(logging.INFO, "fetch", {"url": url}, "Fetching {url}")
++        prefix = "aom-" + revision
++        filename = prefix + ".tar.gz"
++        with open(filename, "wb") as f:
+             req = requests.get(url, stream=True)
+             for data in req.iter_content(4096):
+                 f.write(data)
+         tar = tarfile.open(filename)
+-        bad_paths = filter(lambda name: name.startswith('/') or '..' in name,
+-                           tar.getnames())
++        bad_paths = filter(
++            lambda name: name.startswith("/") or ".." in name, tar.getnames()
++        )
+         if any(bad_paths):
+-            raise Exception("Tar archive contains non-local paths,"
+-                            "e.g. '%s'" % bad_paths[0])
+-        self.log(logging.INFO, 'rm_vendor_dir', {}, 'rm -rf %s' % target)
++            raise Exception(
++                "Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
++            )
++        self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % target)
+         mozfile.remove(target)
+-        self.log(logging.INFO, 'unpack', {}, 'Unpacking upstream files.')
++        self.log(logging.INFO, "unpack", {}, "Unpacking upstream files.")
+         tar.extractall(target)
+         # Github puts everything properly down a directory; move it up.
+         if all(map(lambda name: name.startswith(prefix), tar.getnames())):
+             tardir = mozpath.join(target, prefix)
+-            os.system('mv %s/* %s/.* %s' % (tardir, tardir, target))
++            os.system("mv %s/* %s/.* %s" % (tardir, tardir, target))
+             os.rmdir(tardir)
+         # Remove the tarball.
+         mozfile.remove(filename)
+ 
+     def update_readme(self, revision, timestamp, target):
+-        filename = mozpath.join(target, 'README_MOZILLA')
++        filename = mozpath.join(target, "README_MOZILLA")
+         with open(filename) as f:
+             readme = f.read()
+ 
+-        prefix = 'The git commit ID used was'
++        prefix = "The git commit ID used was"
+         if prefix in readme:
+-            new_readme = re.sub(prefix + ' [v\.a-f0-9]+.*$',
+-                                prefix + ' %s (%s).' % (revision, timestamp),
+-                                readme)
++            new_readme = re.sub(
++                prefix + " [v\.a-f0-9]+.*$",
++                prefix + " %s (%s)." % (revision, timestamp),
++                readme,
++            )
+         else:
+-            new_readme = '%s\n\n%s %s.' % (readme, prefix, revision)
++            new_readme = "%s\n\n%s %s." % (readme, prefix, revision)
+ 
+-        prefix = 'The last update was pulled from'
+-        new_readme = re.sub(prefix + ' https*://.*',
+-                            prefix + ' %s' % self.repo_url,
+-                            new_readme)
++        prefix = "The last update was pulled from"
++        new_readme = re.sub(
++            prefix + " https*://.*", prefix + " %s" % self.repo_url, new_readme
++        )
+ 
+         if readme != new_readme:
+-            with open(filename, 'w') as f:
++            with open(filename, "w") as f:
+                 f.write(new_readme)
+ 
+     def clean_upstream(self, target):
+-        '''Remove files we don't want to import.'''
+-        mozfile.remove(mozpath.join(target, '.gitattributes'))
+-        mozfile.remove(mozpath.join(target, '.gitignore'))
+-        mozfile.remove(mozpath.join(target, 'build', '.gitattributes'))
+-        mozfile.remove(mozpath.join(target, 'build', '.gitignore'))
++        """Remove files we don't want to import."""
++        mozfile.remove(mozpath.join(target, ".gitattributes"))
++        mozfile.remove(mozpath.join(target, ".gitignore"))
++        mozfile.remove(mozpath.join(target, "build", ".gitattributes"))
++        mozfile.remove(mozpath.join(target, "build", ".gitignore"))
+ 
+     def generate_sources(self, target):
+-        '''
++        """
+         Run the library's native build system to update ours.
+ 
+         Invoke configure for each supported platform to generate
+         appropriate config and header files, then invoke the
+         makefile to obtain a list of source files, writing
+         these out in the appropriate format for our build
+         system to use.
+-        '''
+-        config_dir = mozpath.join(target, 'config')
+-        self.log(logging.INFO, 'rm_confg_dir', {}, 'rm -rf %s' % config_dir)
++        """
++        config_dir = mozpath.join(target, "config")
++        self.log(logging.INFO, "rm_confg_dir", {}, "rm -rf %s" % config_dir)
+         mozfile.remove(config_dir)
+-        self.run_process(args=['./generate_sources_mozbuild.sh'],
+-                         cwd=target, log_name='generate_sources')
++        self.run_process(
++            args=["./generate_sources_mozbuild.sh"],
++            cwd=target,
++            log_name="generate_sources",
++        )
+ 
+     def check_modified_files(self):
+-        '''
++        """
+         Ensure that there aren't any uncommitted changes to files
+         in the working copy, since we're going to change some state
+         on the user.
+-        '''
+-        modified = self.repository.get_changed_files('M')
++        """
++        modified = self.repository.get_changed_files("M")
+         if modified:
+-            self.log(logging.ERROR, 'modified_files', {},
+-                     '''You have uncommitted changes to the following files:
++            self.log(
++                logging.ERROR,
++                "modified_files",
++                {},
++                """You have uncommitted changes to the following files:
+ 
+ {files}
+ 
+ Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+-'''.format(files='\n'.join(sorted(modified))))
++""".format(
++                    files="\n".join(sorted(modified))
++                ),
++            )
+             sys.exit(1)
+ 
+     def vendor(self, revision, repo, ignore_modified=False):
+         self.populate_logger()
+         self.log_manager.enable_unstructured()
+ 
+         if not ignore_modified:
+             self.check_modified_files()
+         if not revision:
+-            revision = 'master'
++            revision = "master"
+         if repo:
+             self.repo_url = repo
+         else:
+-            self.repo_url = 'https://aomedia.googlesource.com/aom/'
++            self.repo_url = "https://aomedia.googlesource.com/aom/"
+         self.upstream_validate(self.repo_url)
+ 
+         commit, timestamp = self.upstream_commit(revision)
+ 
+-        vendor_dir = mozpath.join(self.topsrcdir, 'third_party/aom')
++        vendor_dir = mozpath.join(self.topsrcdir, "third_party/aom")
+         self.fetch_and_unpack(commit, vendor_dir)
+-        self.log(logging.INFO, 'clean_upstream', {},
+-                 '''Removing unnecessary files.''')
++        self.log(logging.INFO, "clean_upstream", {}, """Removing unnecessary files.""")
+         self.clean_upstream(vendor_dir)
+-        glue_dir = mozpath.join(self.topsrcdir, 'media/libaom')
+-        self.log(logging.INFO, 'generate_sources', {},
+-                 '''Generating build files...''')
++        glue_dir = mozpath.join(self.topsrcdir, "media/libaom")
++        self.log(logging.INFO, "generate_sources", {}, """Generating build files...""")
+         self.generate_sources(glue_dir)
+-        self.log(logging.INFO, 'update_readme', {},
+-                 '''Updating README_MOZILLA.''')
++        self.log(logging.INFO, "update_readme", {}, """Updating README_MOZILLA.""")
+         self.update_readme(commit, timestamp, glue_dir)
+-        self.log(logging.INFO, 'add_remove_files', {},
+-                 '''Registering changes with version control.''')
++        self.log(
++            logging.INFO,
++            "add_remove_files",
++            {},
++            """Registering changes with version control.""",
++        )
+         self.repository.add_remove_files(vendor_dir, glue_dir)
+         self.repository.add_remove_files(glue_dir)
+-        self.log(logging.INFO, 'done', {'revision': revision},
+-                 '''Update to aom version '{revision}' ready to commit.''')
++        self.log(
++            logging.INFO,
++            "done",
++            {"revision": revision},
++            """Update to aom version '{revision}' ready to commit.""",
++        )
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_dav1d.py b/python/mozbuild/mozbuild/vendor/vendor_dav1d.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_dav1d.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_dav1d.py
+@@ -1,172 +1,190 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+ import logging
+-from mozbuild.base import (
+-    MozbuildObject,
+-)
++from mozbuild.base import MozbuildObject
+ import mozfile
+ import mozpack.path as mozpath
+ import os
+ import requests
+ import re
+ import sys
+ import tarfile
+ from urllib.parse import urlparse
+ 
+ 
+ class VendorDav1d(MozbuildObject):
+     def upstream_snapshot(self, revision):
+-        '''Construct a url for a tarball snapshot of the given revision.'''
+-        if 'code.videolan.org' in self.repo_url:
+-            return mozpath.join(self.repo_url, '-', 'archive', revision + '.tar.gz')
++        """Construct a url for a tarball snapshot of the given revision."""
++        if "code.videolan.org" in self.repo_url:
++            return mozpath.join(self.repo_url, "-", "archive", revision + ".tar.gz")
+         else:
+-            raise ValueError('Unknown git host, no snapshot lookup method')
++            raise ValueError("Unknown git host, no snapshot lookup method")
+ 
+     def upstream_commit(self, revision):
+-        '''Convert a revision to a git commit and timestamp.
++        """Convert a revision to a git commit and timestamp.
+ 
+         Ask the upstream repo to convert the requested revision to
+         a git commit id and timestamp, so we can be precise in
+-        what we're vendoring.'''
+-        if 'code.videolan.org' in self.repo_url:
++        what we're vendoring."""
++        if "code.videolan.org" in self.repo_url:
+             return self.upstream_gitlab_commit(revision)
+         else:
+-            raise ValueError('Unknown git host, no commit lookup method')
++            raise ValueError("Unknown git host, no commit lookup method")
+ 
+     def upstream_validate(self, url):
+-        '''Validate repository urls to make sure we can handle them.'''
++        """Validate repository urls to make sure we can handle them."""
+         host = urlparse(url).netloc
+-        valid_domains = ('code.videolan.org')
++        valid_domains = "code.videolan.org"
+         if not any(filter(lambda domain: domain in host, valid_domains)):
+-            self.log(logging.ERROR, 'upstream_url', {},
+-                     '''Unsupported git host %s; cannot fetch snapshots.
++            self.log(
++                logging.ERROR,
++                "upstream_url",
++                {},
++                """Unsupported git host %s; cannot fetch snapshots.
+ 
+-Please set a repository url with --repo on either googlesource or github.''' % host)
++Please set a repository url with --repo on either googlesource or github."""
++                % host,
++            )
+             sys.exit(1)
+ 
+     def upstream_gitlab_commit(self, revision):
+-        '''Query the github api for a git commit id and timestamp.'''
+-        gitlab_api = 'https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits'  # noqa
++        """Query the github api for a git commit id and timestamp."""
++        gitlab_api = "https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits"  # noqa
+         url = mozpath.join(gitlab_api, revision)
+-        self.log(logging.INFO, 'fetch', {'url': url},
+-                 'Fetching commit id from {url}')
++        self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
+         req = requests.get(url)
+         req.raise_for_status()
+         info = req.json()
+-        return (info['id'], info['committed_date'])
++        return (info["id"], info["committed_date"])
+ 
+     def fetch_and_unpack(self, revision, target):
+-        '''Fetch and unpack upstream source'''
++        """Fetch and unpack upstream source"""
+         url = self.upstream_snapshot(revision)
+-        self.log(logging.INFO, 'fetch', {'url': url}, 'Fetching {url}')
+-        prefix = 'dav1d-' + revision
+-        filename = prefix + '.tar.gz'
+-        with open(filename, 'wb') as f:
++        self.log(logging.INFO, "fetch", {"url": url}, "Fetching {url}")
++        prefix = "dav1d-" + revision
++        filename = prefix + ".tar.gz"
++        with open(filename, "wb") as f:
+             req = requests.get(url, stream=True)
+             for data in req.iter_content(4096):
+                 f.write(data)
+         tar = tarfile.open(filename)
+-        bad_paths = filter(lambda name: name.startswith('/') or '..' in name,
+-                           tar.getnames())
++        bad_paths = filter(
++            lambda name: name.startswith("/") or ".." in name, tar.getnames()
++        )
+         if any(bad_paths):
+-            raise Exception("Tar archive contains non-local paths,"
+-                            "e.g. '%s'" % bad_paths[0])
+-        self.log(logging.INFO, 'rm_vendor_dir', {}, 'rm -rf %s' % target)
++            raise Exception(
++                "Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
++            )
++        self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % target)
+         mozfile.remove(target)
+-        self.log(logging.INFO, 'unpack', {}, 'Unpacking upstream files.')
++        self.log(logging.INFO, "unpack", {}, "Unpacking upstream files.")
+         tar.extractall(target)
+         # Github puts everything properly down a directory; move it up.
+         if all(map(lambda name: name.startswith(prefix), tar.getnames())):
+             tardir = mozpath.join(target, prefix)
+-            os.system('mv %s/* %s/.* %s' % (tardir, tardir, target))
++            os.system("mv %s/* %s/.* %s" % (tardir, tardir, target))
+             os.rmdir(tardir)
+         # Remove the tarball.
+         mozfile.remove(filename)
+ 
+     def update_yaml(self, revision, timestamp, target):
+-        filename = mozpath.join(target, 'moz.yaml')
++        filename = mozpath.join(target, "moz.yaml")
+         with open(filename) as f:
+             yaml = f.read()
+ 
+-        prefix = '  release: commit'
++        prefix = "  release: commit"
+         if prefix in yaml:
+-            new_yaml = re.sub(prefix + ' [v\.a-f0-9]+.*$',
+-                              prefix + ' %s (%s).' % (revision, timestamp),
+-                              yaml, flags=re.MULTILINE)
++            new_yaml = re.sub(
++                prefix + " [v\.a-f0-9]+.*$",
++                prefix + " %s (%s)." % (revision, timestamp),
++                yaml,
++                flags=re.MULTILINE,
++            )
+         else:
+-            new_yaml = '%s\n\n%s %s.' % (yaml, prefix, revision)
++            new_yaml = "%s\n\n%s %s." % (yaml, prefix, revision)
+ 
+         if yaml != new_yaml:
+-            with open(filename, 'w') as f:
++            with open(filename, "w") as f:
+                 f.write(new_yaml)
+ 
+     def update_vcs_version(self, revision, vendor_dir, glue_dir):
+-        src_filename = mozpath.join(vendor_dir, 'include/vcs_version.h.in')
+-        dst_filename = mozpath.join(glue_dir, 'vcs_version.h')
++        src_filename = mozpath.join(vendor_dir, "include/vcs_version.h.in")
++        dst_filename = mozpath.join(glue_dir, "vcs_version.h")
+         with open(src_filename) as f:
+             vcs_version_in = f.read()
+-        vcs_version = vcs_version_in.replace('@VCS_TAG@', revision)
+-        with open(dst_filename, 'w') as f:
++        vcs_version = vcs_version_in.replace("@VCS_TAG@", revision)
++        with open(dst_filename, "w") as f:
+             f.write(vcs_version)
+ 
+     def clean_upstream(self, target):
+-        '''Remove files we don't want to import.'''
+-        mozfile.remove(mozpath.join(target, '.gitattributes'))
+-        mozfile.remove(mozpath.join(target, '.gitignore'))
+-        mozfile.remove(mozpath.join(target, 'build', '.gitattributes'))
+-        mozfile.remove(mozpath.join(target, 'build', '.gitignore'))
++        """Remove files we don't want to import."""
++        mozfile.remove(mozpath.join(target, ".gitattributes"))
++        mozfile.remove(mozpath.join(target, ".gitignore"))
++        mozfile.remove(mozpath.join(target, "build", ".gitattributes"))
++        mozfile.remove(mozpath.join(target, "build", ".gitignore"))
+ 
+     def check_modified_files(self):
+-        '''
++        """
+         Ensure that there aren't any uncommitted changes to files
+         in the working copy, since we're going to change some state
+         on the user.
+-        '''
+-        modified = self.repository.get_changed_files('M')
++        """
++        modified = self.repository.get_changed_files("M")
+         if modified:
+-            self.log(logging.ERROR, 'modified_files', {},
+-                     '''You have uncommitted changes to the following files:
++            self.log(
++                logging.ERROR,
++                "modified_files",
++                {},
++                """You have uncommitted changes to the following files:
+ 
+ {files}
+ 
+ Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+-'''.format(files='\n'.join(sorted(modified))))
++""".format(
++                    files="\n".join(sorted(modified))
++                ),
++            )
+             sys.exit(1)
+ 
+     def vendor(self, revision, repo, ignore_modified=False):
+         self.populate_logger()
+         self.log_manager.enable_unstructured()
+ 
+         if not ignore_modified:
+             self.check_modified_files()
+         if not revision:
+-            revision = 'master'
++            revision = "master"
+         if repo:
+             self.repo_url = repo
+         else:
+-            self.repo_url = 'https://code.videolan.org/videolan/dav1d'
++            self.repo_url = "https://code.videolan.org/videolan/dav1d"
+         self.upstream_validate(self.repo_url)
+ 
+         commit, timestamp = self.upstream_commit(revision)
+ 
+-        vendor_dir = mozpath.join(self.topsrcdir, 'third_party/dav1d')
++        vendor_dir = mozpath.join(self.topsrcdir, "third_party/dav1d")
+         self.fetch_and_unpack(commit, vendor_dir)
+-        self.log(logging.INFO, 'clean_upstream', {},
+-                 '''Removing unnecessary files.''')
++        self.log(logging.INFO, "clean_upstream", {}, """Removing unnecessary files.""")
+         self.clean_upstream(vendor_dir)
+-        glue_dir = mozpath.join(self.topsrcdir, 'media/libdav1d')
+-        self.log(logging.INFO, 'update_moz.yaml', {},
+-                 '''Updating moz.yaml.''')
++        glue_dir = mozpath.join(self.topsrcdir, "media/libdav1d")
++        self.log(logging.INFO, "update_moz.yaml", {}, """Updating moz.yaml.""")
+         self.update_yaml(commit, timestamp, glue_dir)
+-        self.log(logging.INFO, 'update_vcs_version', {},
+-                 '''Updating vcs_version.h.''')
++        self.log(logging.INFO, "update_vcs_version", {}, """Updating vcs_version.h.""")
+         self.update_vcs_version(commit, vendor_dir, glue_dir)
+-        self.log(logging.INFO, 'add_remove_files', {},
+-                 '''Registering changes with version control.''')
++        self.log(
++            logging.INFO,
++            "add_remove_files",
++            {},
++            """Registering changes with version control.""",
++        )
+         self.repository.add_remove_files(vendor_dir, glue_dir)
+-        self.log(logging.INFO, 'done', {'revision': revision},
+-                 '''Update to dav1d version '{revision}' ready to commit.''')
++        self.log(
++            logging.INFO,
++            "done",
++            {"revision": revision},
++            """Update to dav1d version '{revision}' ready to commit.""",
++        )
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -9,13 +9,13 @@ import sys
+ from . import moz_yaml
+ 
+ 
+ def verify_manifests(files):
+     success = True
+     for fn in files:
+         try:
+             moz_yaml.load_moz_yaml(fn)
+-            print('%s: OK' % fn)
++            print("%s: OK" % fn)
+         except moz_yaml.VerifyError as e:
+             success = False
+             print(e)
+     sys.exit(0 if success else 1)
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_python.py b/python/mozbuild/mozbuild/vendor/vendor_python.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_python.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_python.py
+@@ -11,137 +11,157 @@ import subprocess
+ import mozfile
+ import mozpack.path as mozpath
+ from mozbuild.base import MozbuildObject
+ from mozfile import TemporaryDirectory
+ from mozpack.files import FileFinder
+ 
+ 
+ class VendorPython(MozbuildObject):
+-
+     def vendor(self, packages=None, with_windows_wheel=False):
+         self.populate_logger()
+         self.log_manager.enable_unstructured()
+ 
+-        vendor_dir = mozpath.join(
+-            self.topsrcdir, os.path.join('third_party', 'python'))
++        vendor_dir = mozpath.join(self.topsrcdir, os.path.join("third_party", "python"))
+ 
+         packages = packages or []
+         if with_windows_wheel and len(packages) != 1:
+-            raise Exception('--with-windows-wheel is only supported for a single package!')
++            raise Exception(
++                "--with-windows-wheel is only supported for a single package!"
++            )
+ 
+         self._activate_virtualenv()
+-        pip_compile = os.path.join(self.virtualenv_manager.bin_path, 'pip-compile')
++        pip_compile = os.path.join(self.virtualenv_manager.bin_path, "pip-compile")
+         if not os.path.exists(pip_compile):
+-            path = os.path.normpath(os.path.join(
+-                self.topsrcdir, 'third_party', 'python', 'pip-tools'))
++            path = os.path.normpath(
++                os.path.join(self.topsrcdir, "third_party", "python", "pip-tools")
++            )
+             self.virtualenv_manager.install_pip_package(path, vendored=True)
+-        spec = os.path.join(vendor_dir, 'requirements.in')
+-        requirements = os.path.join(vendor_dir, 'requirements.txt')
++        spec = os.path.join(vendor_dir, "requirements.in")
++        requirements = os.path.join(vendor_dir, "requirements.txt")
+ 
+         with TemporaryDirectory() as spec_dir:
+-            tmpspec = 'requirements-mach-vendor-python.in'
++            tmpspec = "requirements-mach-vendor-python.in"
+             tmpspec_absolute = os.path.join(spec_dir, tmpspec)
+             shutil.copyfile(spec, tmpspec_absolute)
+             self._update_packages(tmpspec_absolute, packages)
+ 
+             # resolve the dependencies and update requirements.txt
+             subprocess.check_output(
+                 [
+                     pip_compile,
+                     tmpspec,
+-                    '--no-header',
+-                    '--no-index',
+-                    '--output-file', requirements,
+-                    '--generate-hashes'
++                    "--no-header",
++                    "--no-index",
++                    "--output-file",
++                    requirements,
++                    "--generate-hashes",
+                 ],
+                 # Run pip-compile from within the temporary directory so that the "via"
+                 # annotations don't have the non-deterministic temporary path in them.
+-                cwd=spec_dir)
++                cwd=spec_dir,
++            )
+ 
+             with TemporaryDirectory() as tmp:
+                 # use requirements.txt to download archived source distributions of all packages
+-                self.virtualenv_manager._run_pip([
+-                    'download',
+-                    '-r', requirements,
+-                    '--no-deps',
+-                    '--dest', tmp,
+-                    '--no-binary', ':all:',
+-                    '--disable-pip-version-check'])
++                self.virtualenv_manager._run_pip(
++                    [
++                        "download",
++                        "-r",
++                        requirements,
++                        "--no-deps",
++                        "--dest",
++                        tmp,
++                        "--no-binary",
++                        ":all:",
++                        "--disable-pip-version-check",
++                    ]
++                )
+                 if with_windows_wheel:
+                     # This is hardcoded to CPython 2.7 for win64, which is good
+                     # enough for what we need currently. If we need psutil for Python 3
+                     # in the future that could be added here as well.
+-                    self.virtualenv_manager._run_pip([
+-                        'download',
+-                        '--dest', tmp,
+-                        '--no-deps',
+-                        '--only-binary', ':all:',
+-                        '--platform', 'win_amd64',
+-                        '--implementation', 'cp',
+-                        '--python-version', '27',
+-                        '--abi', 'none',
+-                        '--disable-pip-version-check',
+-                        packages[0]])
++                    self.virtualenv_manager._run_pip(
++                        [
++                            "download",
++                            "--dest",
++                            tmp,
++                            "--no-deps",
++                            "--only-binary",
++                            ":all:",
++                            "--platform",
++                            "win_amd64",
++                            "--implementation",
++                            "cp",
++                            "--python-version",
++                            "27",
++                            "--abi",
++                            "none",
++                            "--disable-pip-version-check",
++                            packages[0],
++                        ]
++                    )
+                 self._extract(tmp, vendor_dir)
+ 
+             shutil.copyfile(tmpspec_absolute, spec)
+             self.repository.add_remove_files(vendor_dir)
+ 
+     def _update_packages(self, spec, packages):
+         for package in packages:
+-            if not all(package.partition('==')):
+-                raise Exception('Package {} must be in the format name==version'.format(package))
++            if not all(package.partition("==")):
++                raise Exception(
++                    "Package {} must be in the format name==version".format(package)
++                )
+ 
+         requirements = {}
+-        with open(spec, 'r') as f:
++        with open(spec, "r") as f:
+             comments = []
+             for line in f.readlines():
+                 line = line.strip()
+-                if not line or line.startswith('#'):
++                if not line or line.startswith("#"):
+                     comments.append(line)
+                     continue
+-                name, version = line.split('==')
++                name, version = line.split("==")
+                 requirements[name] = version, comments
+                 comments = []
+ 
+         for package in packages:
+-            name, version = package.split('==')
++            name, version = package.split("==")
+             requirements[name] = version, []
+ 
+-        with open(spec, 'w') as f:
++        with open(spec, "w") as f:
+             for name, (version, comments) in sorted(requirements.items()):
+                 if comments:
+-                    f.write('{}\n'.format('\n'.join(comments)))
+-                f.write('{}=={}\n'.format(name, version))
++                    f.write("{}\n".format("\n".join(comments)))
++                f.write("{}=={}\n".format(name, version))
+ 
+     def _extract(self, src, dest):
+         """extract source distribution into vendor directory"""
+         finder = FileFinder(src)
+-        for path, _ in finder.find('*'):
++        for path, _ in finder.find("*"):
+             base, ext = os.path.splitext(path)
+-            if ext == '.whl':
++            if ext == ".whl":
+                 # Wheels would extract into a directory with the name of the package, but
+                 # we want the platform signifiers, minus the version number.
+                 # Wheel filenames look like:
+                 # {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}
+-                bits = base.split('-')
++                bits = base.split("-")
+ 
+                 # Remove the version number.
+                 bits.pop(1)
+-                target = os.path.join(dest, '-'.join(bits))
++                target = os.path.join(dest, "-".join(bits))
+                 mozfile.remove(target)  # remove existing version of vendored package
+                 os.mkdir(target)
+                 mozfile.extract(os.path.join(finder.base, path), target)
+             else:
+                 # packages extract into package-version directory name and we strip the version
+                 tld = mozfile.extract(os.path.join(finder.base, path), dest)[0]
+-                target = os.path.join(dest, tld.rpartition('-')[0])
++                target = os.path.join(dest, tld.rpartition("-")[0])
+                 mozfile.remove(target)  # remove existing version of vendored package
+                 mozfile.move(tld, target)
+             # If any files inside the vendored package were symlinks, turn them into normal files
+             # because hg.mozilla.org forbids symlinks in the repository.
+             link_finder = FileFinder(target)
+-            for _, f in link_finder.find('**'):
++            for _, f in link_finder.find("**"):
+                 if os.path.islink(f.path):
+                     link_target = os.path.realpath(f.path)
+                     os.unlink(f.path)
+                     shutil.copyfile(link_target, f.path)
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_rust.py b/python/mozbuild/mozbuild/vendor/vendor_rust.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_rust.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_rust.py
+@@ -16,104 +16,124 @@ from distutils.version import LooseVersi
+ 
+ import mozpack.path as mozpath
+ from mozbuild.base import (
+     BuildEnvironmentNotFoundException,
+     MozbuildObject,
+ )
+ 
+ 
+-CARGO_LOCK_NOTICE = '''
++CARGO_LOCK_NOTICE = """
+ NOTE: `cargo vendor` may have made changes to your Cargo.lock. To restore your
+ Cargo.lock to the HEAD version, run `git checkout -- Cargo.lock` or
+ `hg revert Cargo.lock`.
+-'''
++"""
+ 
+ 
+ class VendorRust(MozbuildObject):
+     def get_cargo_path(self):
+         try:
+-            return self.substs['CARGO']
++            return self.substs["CARGO"]
+         except (BuildEnvironmentNotFoundException, KeyError):
+             # Default if this tree isn't configured.
+             from mozfile import which
+-            cargo = which('cargo')
++            cargo = which("cargo")
+             if not cargo:
+                 raise OSError(errno.ENOENT, "Could not find 'cargo' on your $PATH.")
+             return cargo
+ 
+     def check_cargo_version(self, cargo):
+-        '''
++        """
+         Ensure that cargo is new enough. cargo 1.37 added support
+         for the vendor command.
+-        '''
+-        out = subprocess.check_output([cargo, '--version']).splitlines()[0].decode('UTF-8')
+-        if not out.startswith('cargo'):
++        """
++        out = (
++            subprocess.check_output([cargo, "--version"])
++            .splitlines()[0]
++            .decode("UTF-8")
++        )
++        if not out.startswith("cargo"):
+             return False
+-        return LooseVersion(out.split()[1]) >= '1.37'
++        return LooseVersion(out.split()[1]) >= "1.37"
+ 
+     def check_modified_files(self):
+-        '''
++        """
+         Ensure that there aren't any uncommitted changes to files
+         in the working copy, since we're going to change some state
+         on the user. Allow changes to Cargo.{toml,lock} since that's
+         likely to be a common use case.
+-        '''
+-        modified = [f for f in self.repository.get_changed_files(
+-            'M') if os.path.basename(f) not in ('Cargo.toml', 'Cargo.lock')]
++        """
++        modified = [
++            f
++            for f in self.repository.get_changed_files("M")
++            if os.path.basename(f) not in ("Cargo.toml", "Cargo.lock")
++        ]
+         if modified:
+-            self.log(logging.ERROR, 'modified_files', {},
+-                     '''You have uncommitted changes to the following files:
++            self.log(
++                logging.ERROR,
++                "modified_files",
++                {},
++                """You have uncommitted changes to the following files:
+ 
+ {files}
+ 
+ Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+-'''.format(files='\n'.join(sorted(modified))))
++""".format(
++                    files="\n".join(sorted(modified))
++                ),
++            )
+             sys.exit(1)
+ 
+     def check_openssl(self):
+-        '''
++        """
+         Set environment flags for building with openssl.
+ 
+         MacOS doesn't include openssl, but the openssl-sys crate used by
+         mach-vendor expects one of the system. It's common to have one
+         installed in /usr/local/opt/openssl by homebrew, but custom link
+         flags are necessary to build against it.
+-        '''
++        """
+ 
+-        test_paths = ['/usr/include', '/usr/local/include']
+-        if any([os.path.exists(os.path.join(path, 'openssl/ssl.h')) for path in test_paths]):
++        test_paths = ["/usr/include", "/usr/local/include"]
++        if any(
++            [os.path.exists(os.path.join(path, "openssl/ssl.h")) for path in test_paths]
++        ):
+             # Assume we can use one of these system headers.
+             return None
+ 
+-        if os.path.exists('/usr/local/opt/openssl/include/openssl/ssl.h'):
++        if os.path.exists("/usr/local/opt/openssl/include/openssl/ssl.h"):
+             # Found a likely homebrew install.
+-            self.log(logging.INFO, 'openssl', {},
+-                     'Using OpenSSL in /usr/local/opt/openssl')
++            self.log(
++                logging.INFO, "openssl", {}, "Using OpenSSL in /usr/local/opt/openssl"
++            )
+             return {
+-                'OPENSSL_INCLUDE_DIR': '/usr/local/opt/openssl/include',
+-                'OPENSSL_LIB_DIR': '/usr/local/opt/openssl/lib',
++                "OPENSSL_INCLUDE_DIR": "/usr/local/opt/openssl/include",
++                "OPENSSL_LIB_DIR": "/usr/local/opt/openssl/lib",
+             }
+ 
+-        self.log(logging.ERROR, 'openssl', {}, "OpenSSL not found!")
++        self.log(logging.ERROR, "openssl", {}, "OpenSSL not found!")
+         return None
+ 
+     def _ensure_cargo(self):
+-        '''
++        """
+         Ensures all the necessary cargo bits are installed.
+ 
+         Returns the path to cargo if successful, None otherwise.
+-        '''
++        """
+         cargo = self.get_cargo_path()
+         if not self.check_cargo_version(cargo):
+-            self.log(logging.ERROR, 'cargo_version', {},
+-                     'Cargo >= 1.37 required (install Rust 1.37 or newer)')
++            self.log(
++                logging.ERROR,
++                "cargo_version",
++                {},
++                "Cargo >= 1.37 required (install Rust 1.37 or newer)",
++            )
+             return None
+         else:
+-            self.log(logging.DEBUG, 'cargo_version', {}, 'cargo is new enough')
++            self.log(logging.DEBUG, "cargo_version", {}, "cargo is new enough")
+ 
+         return cargo
+ 
+     # A whitelist of acceptable license identifiers for the
+     # packages.license field from https://spdx.org/licenses/.  Cargo
+     # documentation claims that values are checked against the above
+     # list and that multiple entries can be separated by '/'.  We
+     # choose to list all combinations instead for the sake of
+@@ -122,39 +142,39 @@ Please commit or stash these changes bef
+     #
+     # It is insufficient to have additions to this whitelist reviewed
+     # solely by a build peer; any additions must be checked by somebody
+     # competent to review licensing minutiae.
+ 
+     # Licenses for code used at runtime. Please see the above comment before
+     # adding anything to this list.
+     RUNTIME_LICENSE_WHITELIST = [
+-        'Apache-2.0',
+-        'Apache-2.0 WITH LLVM-exception',
++        "Apache-2.0",
++        "Apache-2.0 WITH LLVM-exception",
+         # BSD-2-Clause and BSD-3-Clause are ok, but packages using them
+         # must be added to the appropriate section of about:licenses.
+         # To encourage people to remember to do that, we do not whitelist
+         # the licenses themselves, and we require the packages to be added
+         # to RUNTIME_LICENSE_PACKAGE_WHITELIST below.
+-        'CC0-1.0',
+-        'ISC',
+-        'MIT',
+-        'MPL-2.0',
+-        'Unlicense',
+-        'Zlib',
++        "CC0-1.0",
++        "ISC",
++        "MIT",
++        "MPL-2.0",
++        "Unlicense",
++        "Zlib",
+     ]
+ 
+     # Licenses for code used at build time (e.g. code generators). Please see the above
+     # comments before adding anything to this list.
+     BUILDTIME_LICENSE_WHITELIST = {
+-        'BSD-3-Clause': [
+-            'bindgen',
+-            'fuchsia-zircon',
+-            'fuchsia-zircon-sys',
+-            'fuchsia-cprng',
++        "BSD-3-Clause": [
++            "bindgen",
++            "fuchsia-zircon",
++            "fuchsia-zircon-sys",
++            "fuchsia-cprng",
+         ]
+     }
+ 
+     # This whitelist should only be used for packages that use an acceptable
+     # license, but that also need to explicitly mentioned in about:license.
+     RUNTIME_LICENSE_PACKAGE_WHITELIST = {
+         'BSD-2-Clause': [
+             'arrayref',
+@@ -172,20 +192,20 @@ Please commit or stash these changes bef
+     # reviewed.  The table is keyed by package names and maps to the
+     # sha256 hash of the license file that we reviewed.
+     #
+     # As above, it is insufficient to have additions to this whitelist
+     # reviewed solely by a build peer; any additions must be checked by
+     # somebody competent to review licensing minutiae.
+     RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST = {
+         # MIT
+-        'deque': '6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb',
++        "deque": "6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb",
+         # we're whitelisting this fuchsia crate because it doesn't get built in the final
+         # product but has a license-file that needs ignoring
+-        'fuchsia-cprng': '03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b',
++        "fuchsia-cprng": "03b114f53e6587a398931762ee11e2395bfdba252a329940e2c8c9e81813845b",
+     }
+ 
+     @staticmethod
+     def runtime_license(package, license_string):
+         """Cargo docs say:
+         ---
+         https://doc.rust-lang.org/cargo/reference/manifest.html
+ 
+@@ -204,194 +224,269 @@ Please commit or stash these changes bef
+ 
+         # This specific AND combination has been reviewed for encoding_rs.
+         if (
+             license_string == "(Apache-2.0 OR MIT) AND BSD-3-Clause"
+             and package == "encoding_rs"
+         ):
+             return True
+ 
+-        if re.search(r'\s+AND', license_string):
++        if re.search(r"\s+AND", license_string):
+             return False
+ 
+-        license_list = re.split(r'\s*/\s*|\s+OR\s+', license_string)
++        license_list = re.split(r"\s*/\s*|\s+OR\s+", license_string)
+         for license in license_list:
+             if license in VendorRust.RUNTIME_LICENSE_WHITELIST:
+                 return True
+             if package in VendorRust.RUNTIME_LICENSE_PACKAGE_WHITELIST.get(license, []):
+                 return True
+         return False
+ 
+     def _check_licenses(self, vendor_dir):
+         LICENSE_LINE_RE = re.compile(r'\s*license\s*=\s*"([^"]+)"')
+         LICENSE_FILE_LINE_RE = re.compile(r'\s*license[-_]file\s*=\s*"([^"]+)"')
+ 
+         def verify_acceptable_license(package, license):
+-            self.log(logging.DEBUG, 'package_license', {},
+-                     'has license {}'.format(license))
++            self.log(
++                logging.DEBUG, "package_license", {}, "has license {}".format(license)
++            )
+ 
+             if not self.runtime_license(package, license):
+                 if license not in self.BUILDTIME_LICENSE_WHITELIST:
+-                    self.log(logging.ERROR, 'package_license_error', {},
+-                             '''Package {} has a non-approved license: {}.
++                    self.log(
++                        logging.ERROR,
++                        "package_license_error",
++                        {},
++                        """Package {} has a non-approved license: {}.
+ 
+     Please request license review on the package's license.  If the package's license
+     is approved, please add it to the whitelist of suitable licenses.
+-    '''.format(package, license))
++    """.format(
++                            package, license
++                        ),
++                    )
+                     return False
+                 elif package not in self.BUILDTIME_LICENSE_WHITELIST[license]:
+-                    self.log(logging.ERROR, 'package_license_error', {},
+-                             '''Package {} has a license that is approved for build-time dependencies: {}
++                    self.log(
++                        logging.ERROR,
++                        "package_license_error",
++                        {},
++                        """Package {} has a license that is approved for build-time dependencies: {}
+     but the package itself is not whitelisted as being a build-time only package.
+ 
+     If your package is build-time only, please add it to the whitelist of build-time
+     only packages. Otherwise, you need to request license review on the package's license.
+     If the package's license is approved, please add it to the whitelist of suitable licenses.
+-    '''.format(package, license))
++    """.format(
++                            package, license
++                        ),
++                    )
+                     return False
+ 
+         def check_package(package):
+-            self.log(logging.DEBUG, 'package_check', {},
+-                     'Checking license for {}'.format(package))
++            self.log(
++                logging.DEBUG,
++                "package_check",
++                {},
++                "Checking license for {}".format(package),
++            )
+ 
+-            toml_file = os.path.join(vendor_dir, package, 'Cargo.toml')
++            toml_file = os.path.join(vendor_dir, package, "Cargo.toml")
+ 
+             # pytoml is not sophisticated enough to parse Cargo.toml files
+             # with [target.'cfg(...)'.dependencies sections, so we resort
+             # to scanning individual lines.
+-            with io.open(toml_file, 'r', encoding='utf-8') as f:
+-                license_lines = [l for l in f if l.strip().startswith('license')]
++            with io.open(toml_file, "r", encoding="utf-8") as f:
++                license_lines = [l for l in f if l.strip().startswith("license")]
+                 license_matches = list(
+-                    filter(lambda x: x, [LICENSE_LINE_RE.match(l) for l in license_lines]))
++                    filter(
++                        lambda x: x, [LICENSE_LINE_RE.match(l) for l in license_lines]
++                    )
++                )
+                 license_file_matches = list(
+-                    filter(lambda x: x, [LICENSE_FILE_LINE_RE.match(l) for l in license_lines]))
++                    filter(
++                        lambda x: x,
++                        [LICENSE_FILE_LINE_RE.match(l) for l in license_lines],
++                    )
++                )
+ 
+                 # License information is optional for crates to provide, but
+                 # we require it.
+                 if not license_matches and not license_file_matches:
+-                    self.log(logging.ERROR, 'package_no_license', {},
+-                             'package {} does not provide a license'.format(package))
++                    self.log(
++                        logging.ERROR,
++                        "package_no_license",
++                        {},
++                        "package {} does not provide a license".format(package),
++                    )
+                     return False
+ 
+                 # The Cargo.toml spec suggests that crates should either have
+                 # `license` or `license-file`, but not both.  We might as well
+                 # be defensive about that, though.
+-                if len(license_matches) > 1 or len(license_file_matches) > 1 or \
+-                   license_matches and license_file_matches:
+-                    self.log(logging.ERROR, 'package_many_licenses', {},
+-                             'package {} provides too many licenses'.format(package))
++                if (
++                    len(license_matches) > 1
++                    or len(license_file_matches) > 1
++                    or license_matches
++                    and license_file_matches
++                ):
++                    self.log(
++                        logging.ERROR,
++                        "package_many_licenses",
++                        {},
++                        "package {} provides too many licenses".format(package),
++                    )
+                     return False
+ 
+                 if license_matches:
+                     license = license_matches[0].group(1)
+                     verify_acceptable_license(package, license)
+                 else:
+                     license_file = license_file_matches[0].group(1)
+-                    self.log(logging.DEBUG, 'package_license_file', {},
+-                             'has license-file {}'.format(license_file))
++                    self.log(
++                        logging.DEBUG,
++                        "package_license_file",
++                        {},
++                        "has license-file {}".format(license_file),
++                    )
+ 
+                     if package not in self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST:
+-                        self.log(logging.ERROR, 'package_license_file_unknown', {},
+-                                 '''Package {} has an unreviewed license file: {}.
++                        self.log(
++                            logging.ERROR,
++                            "package_license_file_unknown",
++                            {},
++                            """Package {} has an unreviewed license file: {}.
+ 
+ Please request review on the provided license; if approved, the package can be added
+ to the whitelist of packages whose licenses are suitable.
+-'''.format(package, license_file))
++""".format(
++                                package, license_file
++                            ),
++                        )
+                         return False
+ 
+                     approved_hash = self.RUNTIME_LICENSE_FILE_PACKAGE_WHITELIST[package]
+-                    license_contents = open(os.path.join(
+-                        vendor_dir, package, license_file), 'r').read()
+-                    current_hash = hashlib.sha256(license_contents.encode('UTF-8')).hexdigest()
++                    license_contents = open(
++                        os.path.join(vendor_dir, package, license_file), "r"
++                    ).read()
++                    current_hash = hashlib.sha256(
++                        license_contents.encode("UTF-8")
++                    ).hexdigest()
+                     if current_hash != approved_hash:
+-                        self.log(logging.ERROR, 'package_license_file_mismatch', {},
+-                                 '''Package {} has changed its license file: {} (hash {}).
++                        self.log(
++                            logging.ERROR,
++                            "package_license_file_mismatch",
++                            {},
++                            """Package {} has changed its license file: {} (hash {}).
+ 
+ Please request review on the provided license; if approved, please update the
+ license file's hash.
+-'''.format(package, license_file, current_hash))
++""".format(
++                                package, license_file, current_hash
++                            ),
++                        )
+                         return False
+ 
+                 return True
+ 
+         # Force all of the packages to be checked for license information
+         # before reducing via `all`, so all license issues are found in a
+         # single `mach vendor rust` invocation.
+-        results = [check_package(p) for p in os.listdir(vendor_dir)
+-                   if os.path.isdir(os.path.join(vendor_dir, p))]
++        results = [
++            check_package(p)
++            for p in os.listdir(vendor_dir)
++            if os.path.isdir(os.path.join(vendor_dir, p))
++        ]
+         return all(results)
+ 
+-    def vendor(self, ignore_modified=False,
+-               build_peers_said_large_imports_were_ok=False):
++    def vendor(
++        self, ignore_modified=False, build_peers_said_large_imports_were_ok=False
++    ):
+         self.populate_logger()
+         self.log_manager.enable_unstructured()
+         if not ignore_modified:
+             self.check_modified_files()
+ 
+         cargo = self._ensure_cargo()
+         if not cargo:
+             return
+ 
+-        relative_vendor_dir = 'third_party/rust'
++        relative_vendor_dir = "third_party/rust"
+         vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir)
+ 
+         # We use check_call instead of mozprocess to ensure errors are displayed.
+         # We do an |update -p| here to regenerate the Cargo.lock file with minimal
+         # changes. See bug 1324462
+-        subprocess.check_call([cargo, 'update', '-p', 'gkrust'], cwd=self.topsrcdir)
++        subprocess.check_call([cargo, "update", "-p", "gkrust"], cwd=self.topsrcdir)
+ 
+-        subprocess.check_call([cargo, 'vendor', '--quiet', vendor_dir], cwd=self.topsrcdir)
++        subprocess.check_call([cargo, "vendor", "--quiet", vendor_dir], cwd=self.topsrcdir)
+ 
+         if not self._check_licenses(vendor_dir):
+             self.log(
+-                logging.ERROR, 'license_check_failed', {},
+-                '''The changes from `mach vendor rust` will NOT be added to version control.
++                logging.ERROR,
++                "license_check_failed",
++                {},
++                """The changes from `mach vendor rust` will NOT be added to version control.
+ 
+-{notice}'''.format(notice=CARGO_LOCK_NOTICE))
++{notice}""".format(
++                    notice=CARGO_LOCK_NOTICE
++                ),
++            )
+             self.repository.clean_directory(vendor_dir)
+             sys.exit(1)
+ 
+         self.repository.add_remove_files(vendor_dir)
+ 
+         # 100k is a reasonable upper bound on source file size.
+         FILESIZE_LIMIT = 100 * 1024
+         large_files = set()
+         cumulative_added_size = 0
+-        for f in self.repository.get_changed_files('A'):
++        for f in self.repository.get_changed_files("A"):
+             path = mozpath.join(self.topsrcdir, f)
+             size = os.stat(path).st_size
+             cumulative_added_size += size
+             if size > FILESIZE_LIMIT:
+                 large_files.add(f)
+ 
+         # Forcefully complain about large files being added, as history has
+         # shown that large-ish files typically are not needed.
+         if large_files and not build_peers_said_large_imports_were_ok:
+-            self.log(logging.ERROR, 'filesize_check', {},
+-                     '''The following files exceed the filesize limit of {size}:
++            self.log(
++                logging.ERROR,
++                "filesize_check",
++                {},
++                """The following files exceed the filesize limit of {size}:
+ 
+ {files}
+ 
+ If you can't reduce the size of these files, talk to a build peer (on the #build
+ channel at https://chat.mozilla.org) about the particular large files you are
+ adding.
+ 
+ The changes from `mach vendor rust` will NOT be added to version control.
+ 
+-{notice}'''.format(files='\n'.join(sorted(large_files)), size=FILESIZE_LIMIT,
+-                   notice=CARGO_LOCK_NOTICE))
++{notice}""".format(
++                    files="\n".join(sorted(large_files)),
++                    size=FILESIZE_LIMIT,
++                    notice=CARGO_LOCK_NOTICE,
++                ),
++            )
+             self.repository.forget_add_remove_files(vendor_dir)
+             self.repository.clean_directory(vendor_dir)
+             sys.exit(1)
+ 
+         # Only warn for large imports, since we may just have large code
+         # drops from time to time (e.g. importing features into m-c).
+         SIZE_WARN_THRESHOLD = 5 * 1024 * 1024
+         if cumulative_added_size >= SIZE_WARN_THRESHOLD:
+-            self.log(logging.WARN, 'filesize_check', {},
+-                     '''Your changes add {size} bytes of added files.
++            self.log(
++                logging.WARN,
++                "filesize_check",
++                {},
++                """Your changes add {size} bytes of added files.
+ 
+ Please consider finding ways to reduce the size of the vendored packages.
+ For instance, check the vendored packages for unusually large test or
+ benchmark files that don't need to be published to crates.io and submit
+-a pull request upstream to ignore those files when publishing.'''.format(
+-                size=cumulative_added_size)
++a pull request upstream to ignore those files when publishing.""".format(
++                    size=cumulative_added_size
++                ),
+             )
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_rust.py.1637845-02.later b/python/mozbuild/mozbuild/vendor/vendor_rust.py.1637845-02.later
+new file mode 100644
+--- /dev/null
++++ b/python/mozbuild/mozbuild/vendor/vendor_rust.py.1637845-02.later
+@@ -0,0 +1,151 @@
++--- vendor_rust.py
+++++ vendor_rust.py
++@@ -19,17 +19,17 @@ from itertools import dropwhile
++ import pytoml
++ import mozpack.path as mozpath
++ from mozbuild.base import (
++     BuildEnvironmentNotFoundException,
++     MozbuildObject,
++ )
++ 
++ 
++-CARGO_CONFIG_TEMPLATE = '''\
+++CARGO_CONFIG_TEMPLATE = """\
++ # This file contains vendoring instructions for cargo.
++ # It was generated by `mach vendor rust`.
++ # Please do not edit.
++ 
++ {config}
++ 
++ # Take advantage of the fact that cargo will treat lines starting with #
++ # as comments to add preprocessing directives. This file can thus by copied
++@@ -46,107 +46,128 @@ CARGO_CONFIG_TEMPLATE = '''\
++ directory = "{directory}"
++ #endif
++ 
++ # Thankfully, @REPLACE_NAME@ is unlikely to be a legitimate source, so
++ # cargo will ignore it when it's here verbatim.
++ #filter substitution
++ [source."@REPLACE_NAME@"]
++ directory = "@top_srcdir@/@VENDORED_DIRECTORY@"
++-'''
+++"""
++ 
++ 
++ CARGO_LOCK_NOTICE = """
++ NOTE: `cargo vendor` may have made changes to your Cargo.lock. To restore your
++ Cargo.lock to the HEAD version, run `git checkout -- Cargo.lock` or
++ `hg revert Cargo.lock`.
++ """
++ 
++@@ -231,258 +251,339 @@ Please commit or stash these changes bef
++         relative_vendor_dir = "third_party/rust"
++         vendor_dir = mozpath.join(self.topsrcdir, relative_vendor_dir)
++ 
++         # We use check_call instead of mozprocess to ensure errors are displayed.
++         # We do an |update -p| here to regenerate the Cargo.lock file with minimal
++         # changes. See bug 1324462
++         subprocess.check_call([cargo, "update", "-p", "gkrust"], cwd=self.topsrcdir)
++ 
++-        output = subprocess.check_output([cargo, 'vendor', vendor_dir],
++-                                         stderr=subprocess.STDOUT,
++-                                         cwd=self.topsrcdir).decode('UTF-8')
+++        output = subprocess.check_output(
+++            [cargo, "vendor", vendor_dir], stderr=subprocess.STDOUT, cwd=self.topsrcdir
+++        ).decode("UTF-8")
++ 
++         # Get the snippet of configuration that cargo vendor outputs, and
++         # update .cargo/config with it.
++         # XXX(bug 1576765): Hopefully do something better after
++         # https://github.com/rust-lang/cargo/issues/7280 is addressed.
++-        config = '\n'.join(dropwhile(lambda l: not l.startswith('['),
++-                                     output.splitlines()))
+++        config = "\n".join(
+++            dropwhile(lambda l: not l.startswith("["), output.splitlines())
+++        )
++ 
++         # The config is toml, parse it as such.
++         config = pytoml.loads(config)
++ 
++         # For each replace-with, extract their configuration and update the
++         # corresponding directory to be relative to topsrcdir.
++         replaces = {
++-            v['replace-with']
++-            for v in config['source'].values()
++-            if 'replace-with' in v
+++            v["replace-with"] for v in config["source"].values() if "replace-with" in v
++         }
++ 
++         # We only really expect one replace-with
++         if len(replaces) != 1:
++             self.log(
++-                logging.ERROR, 'vendor_failed', {},
++-                '''cargo vendor didn't output a unique replace-with. Found: %s.''' % replaces)
+++                logging.ERROR,
+++                "vendor_failed",
+++                {},
+++                """cargo vendor didn't output a unique replace-with. Found: %s."""
+++                % replaces,
+++            )
++             sys.exit(1)
++ 
++         replace_name = replaces.pop()
++-        replace = config['source'].pop(replace_name)
++-        replace['directory'] = mozpath.relpath(
++-            mozpath.normsep(os.path.normcase(replace['directory'])),
+++        replace = config["source"].pop(replace_name)
+++        replace["directory"] = mozpath.relpath(
+++            mozpath.normsep(os.path.normcase(replace["directory"])),
++             mozpath.normsep(os.path.normcase(self.topsrcdir)),
++         )
++ 
++         # Introduce some determinism for the output.
++         def recursive_sort(obj):
++             if isinstance(obj, dict):
++-                return OrderedDict(sorted(
++-                    (k, recursive_sort(v)) for k, v in obj.items()))
+++                return OrderedDict(
+++                    sorted((k, recursive_sort(v)) for k, v in obj.items())
+++                )
++             if isinstance(obj, list):
++                 return [recursive_sort(o) for o in obj]
++             return obj
++ 
++         config = recursive_sort(config)
++ 
++         # Normalize pytoml output:
++         # - removing empty lines
++         # - remove empty [section]
++         def toml_dump(data):
++             dump = pytoml.dumps(data)
++             if isinstance(data, dict):
++                 for k, v in data.items():
++                     if all(isinstance(v2, dict) for v2 in v.values()):
++-                        dump = dump.replace('[%s]' % k, '')
+++                        dump = dump.replace("[%s]" % k, "")
++             return dump.strip()
++ 
++-        cargo_config = os.path.join(self.topsrcdir, '.cargo', 'config.in')
++-        with open(cargo_config, 'w') as fh:
++-            fh.write(CARGO_CONFIG_TEMPLATE.format(
++-                config=toml_dump(config),
++-                replace_name=replace_name,
++-                directory=replace['directory'],
++-            ))
+++        cargo_config = os.path.join(self.topsrcdir, ".cargo", "config.in")
+++        with open(cargo_config, "w") as fh:
+++            fh.write(
+++                CARGO_CONFIG_TEMPLATE.format(
+++                    config=toml_dump(config),
+++                    replace_name=replace_name,
+++                    directory=replace["directory"],
+++                )
+++            )
++ 
++         if not self._check_licenses(vendor_dir):
++             self.log(
++                 logging.ERROR,
++                 "license_check_failed",
++                 {},
++                 """The changes from `mach vendor rust` will NOT be added to version control.
++ 

+ 187 - 0
mozilla-release/patches/1637845-03-79a1.patch

@@ -0,0 +1,187 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800051 0
+# Node ID 5c7bae6fcd367b8c2031c3286743ce7b00245b6f
+# Parent  8c407f598a57d4833281b8f20972ac95d05d6c5d
+Bug 1637845 - Add 'license-file' to origin, remove 'revision' from vendoring, add 'vendor-directory' r=glob
+
+
+Add license-file to allow one to specify a special file the
+project's license is in.
+
+Add 'vendor-directory' to allow vendoring the files into a
+separate directory (e.g. under third_party.) Remove a contradictory
+comment.
+
+Remove 'revision' from 'vendoring'.  This doesn't make sense to me:
+'vendoring' is about how to update the library. In a future iteration
+we may wish to restrict updates to a branch; but specifying a revision
+is specifically about _not_ updating. It appears the intent of revision
+was to identify what revision was currently in-tree; but that information
+should live in the 'origin' section.
+
+Differential Revision: https://phabricator.services.mozilla.com/D75694
+
+Depends on D75896
+
+diff --git a/media/libdav1d/moz.yaml b/media/libdav1d/moz.yaml
+--- a/media/libdav1d/moz.yaml
++++ b/media/libdav1d/moz.yaml
+@@ -17,13 +17,23 @@ origin:
+   # Full URL for the package's homepage/etc
+   # Usually different from repository url
+   url: https://code.videolan.org/videolan/dav1d
+ 
+   # Human-readable identifier for this version/release
+   # Generally "version NNN", "tag SSS", "bookmark SSS"
+   release: commit efd9e5518e0ed5114f8b4579debd7ee6dbede21f (2020-03-06T00:16:53.000+01:00).
+ 
++  # Revision to pull in
++  # Must be a long or short commit SHA (long preferred)
++  revision: efd9e5518e0ed5114f8b4579debd7ee6dbede21f
++
+   # The package's license, where possible using the mnemonic from
+   # https://spdx.org/licenses/
+   # Multiple licenses can be specified (as a YAML list)
+   # A "LICENSE" file must exist containing the full license text
+   license: BSD-2-Clause
++
++  license-file: COPYING
++
++vendoring:
++  url: https://code.videolan.org/videolan/dav1d.git
++  vendor-directory: third_party/dav1d
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -79,37 +79,44 @@ origin:
+   # Full URL for the package's homepage/etc
+   # Usually different from repository url
+   url: package's homepage url
+ 
+   # Human-readable identifier for this version/release
+   # Generally "version NNN", "tag SSS", "bookmark SSS"
+   release: identifier
+ 
++  # Revision to pull in
++  # Must be a long or short commit SHA (long preferred)
++  revision: sha
++
+   # The package's license, where possible using the mnemonic from
+   # https://spdx.org/licenses/
+   # Multiple licenses can be specified (as a YAML list)
+   # A "LICENSE" file must exist containing the full license text
+   license: MPL-2.0
+ 
++  # If the package's license is specified in a particular file,
++  # this is the name of the file.
++  # optional
++  license-file: COPYING
++
+ # Configuration for the automated vendoring system.
+-# Files are always vendored into a directory structure that matches the source
+-# repository, into the same directory as the moz.yaml file
+ # optional
+ vendoring:
+ 
+   # Repository URL to vendor from
+   # eg. https://github.com/kinetiknz/nestegg.git
+   # Any repository host can be specified here, however initially we'll only
+-  # support automated vendoring from selected sources initiall.
++  # support automated vendoring from selected sources initially.
+   url: source url (generally repository clone url)
+ 
+-  # Revision to pull in
+-  # Must be a long or short commit SHA (long preferred)
+-  revision: sha
++  # Base directory of the location where the source files will live in-tree.
++  # If omitted, will default to the location the moz.yaml file is in.
++  vendor-directory: third_party/directory
+ 
+   # List of patch files to apply after vendoring. Applied in the order
+   # specified, and alphabetically if globbing is used. Patches must apply
+   # cleanly before changes are pushed
+   # All patch files are implicitly added to the keep file list.
+   # optional
+   patches:
+     - file
+@@ -225,23 +232,23 @@ def update_moz_yaml(filename, release, r
+                 section = m.group(1)
+             else:
+                 m = RE_FIELD(line)
+                 if m:
+                     (name, value) = m.groups()
+                     if section == "origin" and name == "release":
+                         line = "  release: %s\n" % release
+                         found_release = True
+-                    elif section == "vendoring" and name == "revision":
++                    elif section == "origin" and name == "revision":
+                         line = "  revision: %s\n" % revision
+                         found_revision = True
+             lines.append(line)
+ 
+         if not found_release and found_revision:
+-            raise ValueError("Failed to find origin:release and " "vendoring:revision")
++            raise ValueError("Failed to find origin:release and " "origin:revision")
+ 
+     if write:
+         with open(filename, "w") as f:
+             f.writelines(lines)
+ 
+ 
+ def _schema_1():
+     """Returns Voluptuous Schema object."""
+@@ -252,42 +259,47 @@ def _schema_1():
+                 Required("product"): All(str, Length(min=1)),
+                 Required("component"): All(str, Length(min=1)),
+             },
+             "origin": {
+                 Required("name"): All(str, Length(min=1)),
+                 Required("description"): All(str, Length(min=1)),
+                 Required("url"): FqdnUrl(),
+                 Required("license"): Msg(License(), msg="Unsupported License"),
++                "license-file": All(str, Length(min=1)),
+                 Required("release"): All(str, Length(min=1)),
++                Required("revision"): Match(r"^[a-fA-F0-9]{12,40}$"),
+             },
+             "vendoring": {
+                 Required("url"): FqdnUrl(),
+-                Required("revision"): Match(r"^[a-fA-F0-9]{12,40}$"),
++                "vendor-directory": All(str, Length(min=1)),
+                 "patches": Unique([str]),
+                 "keep": Unique([str]),
+                 "exclude": Unique([str]),
+                 "include": Unique([str]),
+                 "run_after": Unique([str]),
+             },
+         }
+     )
+ 
+ 
+ def _schema_1_additional(filename, manifest, require_license_file=True):
+     """Additional schema/validity checks"""
+ 
++    vendor_directory = os.path.dirname(filename)
++    if "vendoring" in manifest and "vendor-directory" in manifest["vendoring"]:
++        vendor_directory = manifest["vendoring"]["vendor-directory"]
++
+     # LICENSE file must exist.
+     if require_license_file and "origin" in manifest:
+-        files = [
+-            f.lower()
+-            for f in os.listdir(os.path.dirname(filename))
+-            if f.lower().startswith("license")
+-        ]
++        files = [f.lower() for f in os.listdir(vendor_directory)]
+         if not (
++            "license-file" in manifest["origin"]
++            and manifest["origin"]["license-file"].lower() in files
++        ) and not (
+             "license" in files
+             or "license.txt" in files
+             or "license.rst" in files
+             or "license.html" in files
+             or "license.md" in files
+         ):
+             license = manifest["origin"]["license"]
+             if isinstance(license, list):

+ 121 - 0
mozilla-release/patches/1637845-04-79a1.patch

@@ -0,0 +1,121 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800094 0
+# Node ID 3ad87515c7fdb2a0ab480e52a3a7e34d6a51b564
+# Parent  9f52cfe37c46dc8e1fb336e513fcad39840fb3d0
+Bug 1637845 - Add source-hosting to the schema and vendoring section r=glob
+
+
+This will tell us how to interact with the upstream repository.
+
+Differential Revision: https://phabricator.services.mozilla.com/D75695
+
+Depends on D75694
+
+diff --git a/media/libdav1d/moz.yaml b/media/libdav1d/moz.yaml
+--- a/media/libdav1d/moz.yaml
++++ b/media/libdav1d/moz.yaml
+@@ -31,9 +31,10 @@ origin:
+   # Multiple licenses can be specified (as a YAML list)
+   # A "LICENSE" file must exist containing the full license text
+   license: BSD-2-Clause
+ 
+   license-file: COPYING
+ 
+ vendoring:
+   url: https://code.videolan.org/videolan/dav1d.git
++  source-hosting: gitlab
+   vendor-directory: third_party/dav1d
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -16,26 +16,17 @@ import sys
+ 
+ HERE = os.path.abspath(os.path.dirname(__file__))
+ lib_path = os.path.join(HERE, "..", "..", "..", "third_party", "python")
+ sys.path.append(os.path.join(lib_path, "voluptuous"))
+ sys.path.append(os.path.join(lib_path, "pyyaml", "lib"))
+ 
+ import voluptuous
+ import yaml
+-from voluptuous import (
+-    All,
+-    FqdnUrl,
+-    Length,
+-    Match,
+-    Msg,
+-    Required,
+-    Schema,
+-    Unique,
+-)
++from voluptuous import All, FqdnUrl, Length, Match, Msg, Required, Schema, Unique, In
+ from yaml.error import MarkedYAMLError
+ 
+ # TODO ensure this matches the approved list of licenses
+ VALID_LICENSES = [
+     # Standard Licenses (as per https://spdx.org/licenses/)
+     "Apache-2.0",
+     "BSD-2-Clause",
+     "BSD-3-Clause-Clear",
+@@ -50,16 +41,18 @@ VALID_LICENSES = [
+     # Unique Licenses
+     "ACE",  # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
+     "Anti-Grain-Geometry",  # http://www.antigrain.com/license/index.html
+     "JPNIC",  # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
+     "Khronos",  # https://www.khronos.org/openmaxdl
+     "Unicode",  # http://www.unicode.org/copyright.html
+ ]
+ 
++VALID_SOURCE_HOSTS = ["gitlab"]
++
+ """
+ ---
+ # Third-Party Library Template
+ # All fields are mandatory unless otherwise noted
+ 
+ # Version of this schema
+ schema: 1
+ 
+@@ -104,16 +97,20 @@ origin:
+ vendoring:
+ 
+   # Repository URL to vendor from
+   # eg. https://github.com/kinetiknz/nestegg.git
+   # Any repository host can be specified here, however initially we'll only
+   # support automated vendoring from selected sources initially.
+   url: source url (generally repository clone url)
+ 
++  # Type of hosting for the upstream repository
++  # Valid values are 'gitlab', 'github'
++  source-hosting: gitlab
++
+   # Base directory of the location where the source files will live in-tree.
+   # If omitted, will default to the location the moz.yaml file is in.
+   vendor-directory: third_party/directory
+ 
+   # List of patch files to apply after vendoring. Applied in the order
+   # specified, and alphabetically if globbing is used. Patches must apply
+   # cleanly before changes are pushed
+   # All patch files are implicitly added to the keep file list.
+@@ -265,16 +262,21 @@ def _schema_1():
+                 Required("url"): FqdnUrl(),
+                 Required("license"): Msg(License(), msg="Unsupported License"),
+                 "license-file": All(str, Length(min=1)),
+                 Required("release"): All(str, Length(min=1)),
+                 Required("revision"): Match(r"^[a-fA-F0-9]{12,40}$"),
+             },
+             "vendoring": {
+                 Required("url"): FqdnUrl(),
++                Required("source-hosting"): All(
++                    str,
++                    Length(min=1),
++                    In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"),
++                ),
+                 "vendor-directory": All(str, Length(min=1)),
+                 "patches": Unique([str]),
+                 "keep": Unique([str]),
+                 "exclude": Unique([str]),
+                 "include": Unique([str]),
+                 "run_after": Unique([str]),
+             },
+         }

+ 117 - 0
mozilla-release/patches/1637845-05-79a1.patch

@@ -0,0 +1,117 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800133 0
+# Node ID 287275c7275ef6705800f791c024f0a8dfa06421
+# Parent  2ffb90ccc651bf238b9f2872f098c666fe86ea57
+Bug 1637845 - Add 'file-updates' to the moz.yaml schema r=glob
+
+
+This section is for expressing actions that occur after extracting
+the files, before vendoring is completed. While we support running
+scripts (or at least, we will...) this section can be used for simple
+actions that don't need scripts.
+
+Also, add the dav1d excluded files.
+
+Differential Revision: https://phabricator.services.mozilla.com/D75696
+
+Depends on D75695
+
+diff --git a/media/libdav1d/moz.yaml b/media/libdav1d/moz.yaml
+--- a/media/libdav1d/moz.yaml
++++ b/media/libdav1d/moz.yaml
+@@ -33,8 +33,22 @@ origin:
+   license: BSD-2-Clause
+ 
+   license-file: COPYING
+ 
+ vendoring:
+   url: https://code.videolan.org/videolan/dav1d.git
+   source-hosting: gitlab
+   vendor-directory: third_party/dav1d
++
++  exclude:
++    - build/.gitattributes
++    - build/.gitignore
++
++  file-updates:
++    - action: replace-in-file
++      pattern: '@VCS_TAG@'
++      with: '{revision}'
++      file: include/vcs_version.h.in
++    - action: replace-in-file
++      pattern: '@VCS_TAG@'
++      with: '{revision}'
++      file: '{yaml_dir}/vcs_version.h'
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -156,16 +156,38 @@ vendoring:
+   # All three file/path parameters ("keep", "exclude", and "include") support
+   # filenames, directory names, and globs/wildcards.
+ 
+   # In-tree scripts to be executed after vendoring but before pushing.
+   # optional
+   run_after:
+     - script
+     - another script
++
++  # Strings to replace in various files after vendoring.
++  # All subfields are required
++  #   Action must be 'replace-in-file'
++  #   Pattern is what in the file to search for. It is an exact strng match
++  #   With is the string to replace it with. Accepts the special keyword
++  #     '{revision}' for the commit we are updating to.
++  #   File is the file to replace it in. It is relative to the vendor-directory.
++  #     If the vendor-directory is different from the directory of the yaml file,
++  #     the keyword '{yaml_dir}' may be used to make the path relative to that
++  #     directory
++  # optional
++  file-updates:
++    - action: replace-in-file
++      pattern: '@VCS_TAG@'
++      with: '{revision}'
++      file: include/vcs_version.h.in
++
++    - action: replace-in-file
++      pattern: '@VCS_TAG@'
++      with: '{revision}'
++      file: '{yaml_dir}/vcs_version.h'
+ """
+ 
+ RE_SECTION = re.compile(r"^(\S[^:]*):").search
+ RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
+ 
+ 
+ class VerifyError(Exception):
+     def __init__(self, filename, error):
+@@ -273,16 +295,29 @@ def _schema_1():
+                     In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"),
+                 ),
+                 "vendor-directory": All(str, Length(min=1)),
+                 "patches": Unique([str]),
+                 "keep": Unique([str]),
+                 "exclude": Unique([str]),
+                 "include": Unique([str]),
+                 "run_after": Unique([str]),
++                "file-updates": [
++                    {
++                        Required("action"): All(
++                            In(
++                                ["replace-in-file"],
++                                msg="Invalid action specified in file-updates",
++                            )
++                        ),
++                        Required("pattern"): All(str, Length(min=1)),
++                        Required("with"): All(str, Length(min=1)),
++                        Required("file"): All(str, Length(min=1)),
++                    }
++                ],
+             },
+         }
+     )
+ 
+ 
+ def _schema_1_additional(filename, manifest, require_license_file=True):
+     """Additional schema/validity checks"""
+ 

+ 228 - 0
mozilla-release/patches/1637845-06-79a1.patch

@@ -0,0 +1,228 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800175 0
+# Node ID a55dd5ad2285979e4b3258225e06542e60e7e44c
+# Parent  197788031e24a4533769e311014b6ce16465caa1
+Bug 1637845 - Extend the file-updates schema to support a copy-file action r)glob
+
+
+Additionally, raise voluptuous.Invalid errors so the message provided in
+the Exception is shown to the user on error.
+
+Differential Revision: https://phabricator.services.mozilla.com/D75697
+
+Depends on D75696
+
+diff --git a/media/libdav1d/moz.yaml b/media/libdav1d/moz.yaml
+--- a/media/libdav1d/moz.yaml
++++ b/media/libdav1d/moz.yaml
+@@ -39,16 +39,15 @@ vendoring:
+   source-hosting: gitlab
+   vendor-directory: third_party/dav1d
+ 
+   exclude:
+     - build/.gitattributes
+     - build/.gitignore
+ 
+   file-updates:
+-    - action: replace-in-file
+-      pattern: '@VCS_TAG@'
+-      with: '{revision}'
+-      file: include/vcs_version.h.in
++    - action: copy-file
++      from: include/vcs_version.h.in
++      to: '{yaml_dir}/vcs_version.h'
+     - action: replace-in-file
+       pattern: '@VCS_TAG@'
+       with: '{revision}'
+       file: '{yaml_dir}/vcs_version.h'
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -16,17 +16,28 @@ import sys
+ 
+ HERE = os.path.abspath(os.path.dirname(__file__))
+ lib_path = os.path.join(HERE, "..", "..", "..", "third_party", "python")
+ sys.path.append(os.path.join(lib_path, "voluptuous"))
+ sys.path.append(os.path.join(lib_path, "pyyaml", "lib"))
+ 
+ import voluptuous
+ import yaml
+-from voluptuous import All, FqdnUrl, Length, Match, Msg, Required, Schema, Unique, In
++from voluptuous import (
++    All,
++    FqdnUrl,
++    Length,
++    Match,
++    Msg,
++    Required,
++    Schema,
++    Unique,
++    In,
++    Invalid,
++)
+ from yaml.error import MarkedYAMLError
+ 
+ # TODO ensure this matches the approved list of licenses
+ VALID_LICENSES = [
+     # Standard Licenses (as per https://spdx.org/licenses/)
+     "Apache-2.0",
+     "BSD-2-Clause",
+     "BSD-3-Clause-Clear",
+@@ -157,32 +168,40 @@ vendoring:
+   # filenames, directory names, and globs/wildcards.
+ 
+   # In-tree scripts to be executed after vendoring but before pushing.
+   # optional
+   run_after:
+     - script
+     - another script
+ 
+-  # Strings to replace in various files after vendoring.
+-  # All subfields are required
+-  #   Action must be 'replace-in-file'
+-  #   Pattern is what in the file to search for. It is an exact strng match
+-  #   With is the string to replace it with. Accepts the special keyword
++  # Actions to take on files after updating. Applied in order.
++  # The action subfield is required. It must be one of 'copy-file', replace-in-file'.
++  # Unless otherwise noted, all subfields of action are required.
++  #
++  # If the action is copy-file:
++  #   from is the source file
++  #   to is the destination
++  #
++  # If the action is replace-in-file:
++  #   pattern is what in the file to search for. It is an exact strng match.
++  #   with is the string to replace it with. Accepts the special keyword
+   #     '{revision}' for the commit we are updating to.
+-  #   File is the file to replace it in. It is relative to the vendor-directory.
++  #   File is the file to replace it in.
++  #
++  # Unless specified otherwise, all files are relative to the vendor-directory.
+   #     If the vendor-directory is different from the directory of the yaml file,
+   #     the keyword '{yaml_dir}' may be used to make the path relative to that
+   #     directory
++  #
+   # optional
+   file-updates:
+-    - action: replace-in-file
+-      pattern: '@VCS_TAG@'
+-      with: '{revision}'
+-      file: include/vcs_version.h.in
++    - action: copy-file
++      from: include/vcs_version.h.in
++      to: '{yaml_dir}/vcs_version.h'
+ 
+     - action: replace-in-file
+       pattern: '@VCS_TAG@'
+       with: '{revision}'
+       file: '{yaml_dir}/vcs_version.h'
+ """
+ 
+ RE_SECTION = re.compile(r"^(\S[^:]*):").search
+@@ -295,29 +314,32 @@ def _schema_1():
+                     In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"),
+                 ),
+                 "vendor-directory": All(str, Length(min=1)),
+                 "patches": Unique([str]),
+                 "keep": Unique([str]),
+                 "exclude": Unique([str]),
+                 "include": Unique([str]),
+                 "run_after": Unique([str]),
+-                "file-updates": [
+-                    {
+-                        Required("action"): All(
+-                            In(
+-                                ["replace-in-file"],
++                "file-updates": All(
++                    FileUpdate(),
++                    [
++                        {
++                            Required("action"): In(
++                                ["copy-file", "replace-in-file"],
+                                 msg="Invalid action specified in file-updates",
+-                            )
+-                        ),
+-                        Required("pattern"): All(str, Length(min=1)),
+-                        Required("with"): All(str, Length(min=1)),
+-                        Required("file"): All(str, Length(min=1)),
+-                    }
+-                ],
++                            ),
++                            "from": All(str, Length(min=1)),
++                            "to": All(str, Length(min=1)),
++                            "pattern": All(str, Length(min=1)),
++                            "with": All(str, Length(min=1)),
++                            "file": All(str, Length(min=1)),
++                        }
++                    ],
++                ),
+             },
+         }
+     )
+ 
+ 
+ def _schema_1_additional(filename, manifest, require_license_file=True):
+     """Additional schema/validity checks"""
+ 
+@@ -359,24 +381,58 @@ def _schema_1_additional(filename, manif
+         if not has_schema:
+             raise ValueError("Not simple YAML")
+ 
+     # Verify YAML can be updated.
+     if "vendor" in manifest:
+         update_moz_yaml(filename, "", "", verify=False, write=True)
+ 
+ 
++class FileUpdate(object):
++    """Voluptuous validator which verifies the license(s) are valid as per our
++    whitelist."""
++
++    def __call__(self, values):
++        for v in values:
++            if "action" not in v:
++                raise Invalid("All file-update entries must specify a valid action")
++            if v["action"] == "copy-file":
++                if "from" not in v or "to" not in v or len(v.keys()) != 3:
++                    raise Invalid(
++                        "copy-file action must (only) specify 'from' and 'to' keys"
++                    )
++            elif v["action"] == "replace-in-file":
++                if (
++                    "pattern" not in v
++                    or "with" not in v
++                    or "file" not in v
++                    or len(v.keys()) != 4
++                ):
++                    raise Invalid(
++                        "replace-in-file action must (only) specify "
++                        + "'pattern', 'with', and 'file' keys"
++                    )
++            else:
++                # This check occurs before the validator above, so the above is
++                # redundant but we leave it to be verbose.
++                raise Invalid("Supplied action " + v["action"] + " is invalid.")
++        return values
++
++    def __repr__(self):
++        return "FileUpdate"
++
++
+ class License(object):
+     """Voluptuous validator which verifies the license(s) are valid as per our
+     whitelist."""
+ 
+     def __call__(self, values):
+         if isinstance(values, str):
+             values = [values]
+         elif not isinstance(values, list):
+-            raise ValueError("Must be string or list")
++            raise Invalid("Must be string or list")
+         for v in values:
+             if v not in VALID_LICENSES:
+-                raise ValueError("Bad License")
++                raise Invalid("Bad License")
+         return values
+ 
+     def __repr__(self):
+         return "License"

+ 80 - 0
mozilla-release/patches/1637845-07-79a1.patch

@@ -0,0 +1,80 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800213 0
+# Node ID 3a08c2a3b3027e16b1f38a2b2641c9a7568a4873
+# Parent  fa47b0c82c3ab4ad468e375e0d3290df1f3e7878
+Bug 1637845 - Rename VerifyError to MozYamlVerifyError r=glob
+
+
+This is more descriptive for where this error is coming from.
+
+Differential Revision: https://phabricator.services.mozilla.com/D75698
+
+Depends on D75697
+
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -203,17 +203,17 @@ vendoring:
+       with: '{revision}'
+       file: '{yaml_dir}/vcs_version.h'
+ """
+ 
+ RE_SECTION = re.compile(r"^(\S[^:]*):").search
+ RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
+ 
+ 
+-class VerifyError(Exception):
++class MozYamlVerifyError(Exception):
+     def __init__(self, filename, error):
+         self.filename = filename
+         self.error = error
+ 
+     def __str__(self):
+         return "%s: %s" % (self.filename, self.error)
+ 
+ 
+@@ -221,38 +221,38 @@ def load_moz_yaml(filename, verify=True,
+     """Loads and verifies the specified manifest."""
+ 
+     # Load and parse YAML.
+     try:
+         with open(filename, "r") as f:
+             manifest = yaml.safe_load(f)
+     except IOError as e:
+         if e.errno == errno.ENOENT:
+-            raise VerifyError(filename, "Failed to find manifest: %s" % filename)
++            raise MozYamlVerifyError(filename, "Failed to find manifest: %s" % filename)
+         raise
+     except MarkedYAMLError as e:
+-        raise VerifyError(filename, e)
++        raise MozYamlVerifyError(filename, e)
+ 
+     if not verify:
+         return manifest
+ 
+     # Verify schema.
+     if "schema" not in manifest:
+-        raise VerifyError(filename, 'Missing manifest "schema"')
++        raise MozYamlVerifyError(filename, 'Missing manifest "schema"')
+     if manifest["schema"] == 1:
+         schema = _schema_1()
+         schema_additional = _schema_1_additional
+     else:
+-        raise VerifyError(filename, "Unsupported manifest schema")
++        raise MozYamlVerifyError(filename, "Unsupported manifest schema")
+ 
+     try:
+         schema(manifest)
+         schema_additional(filename, manifest, require_license_file=require_license_file)
+     except (voluptuous.Error, ValueError) as e:
+-        raise VerifyError(filename, e)
++        raise MozYamlVerifyError(filename, e)
+ 
+     return manifest
+ 
+ 
+ def update_moz_yaml(filename, release, revision, verify=True, write=True):
+     """Update origin:release and vendoring:revision without stripping
+     comments or reordering fields."""
+ 

+ 261 - 0
mozilla-release/patches/1637845-08-79a1.patch

@@ -0,0 +1,261 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800253 0
+# Node ID b459114e8008bd289607361b39c2e25b1fa9cb3f
+# Parent  12d2bc30007acd925b9520eb35ffd3fcb904e7d5
+Bug 1637845 - Add a new function 'copy_contents' to mozfile r=glob
+
+
+When you have a directory src and you want to move all the contents
+to dst, this is surprisingly annoying. shutil.move doesn't operate
+on globs. shutil.copytree does exactly what we need *except* it
+requires the destination to not exist. In Python 3.7 it gained the
+ability to not care about the destination existing but... we need
+to handle the pre-Python 3.7 case.
+
+Differential Revision: https://phabricator.services.mozilla.com/D76083
+
+Depends on D75698
+
+diff --git a/testing/mozbase/mozfile/mozfile/mozfile.py b/testing/mozbase/mozfile/mozfile/mozfile.py
+--- a/testing/mozbase/mozfile/mozfile/mozfile.py
++++ b/testing/mozbase/mozfile/mozfile/mozfile.py
+@@ -19,16 +19,17 @@ from contextlib import contextmanager
+ from six.moves import urllib
+ 
+ 
+ __all__ = ['extract_tarball',
+            'extract_zip',
+            'extract',
+            'is_url',
+            'load',
++           'copy_contents',
+            'move',
+            'remove',
+            'rmtree',
+            'tree',
+            'which',
+            'NamedTemporaryFile',
+            'TemporaryDirectory']
+ 
+@@ -227,16 +228,60 @@ def remove(path):
+ 
+         # We're ensuring that every nested item has writable permission.
+         for root, dirs, files in os.walk(path):
+             for entry in dirs + files:
+                 _update_permissions(os.path.join(root, entry))
+         _call_with_windows_retry(shutil.rmtree, (path,))
+ 
+ 
++def copy_contents(srcdir, dstdir):
++    """
++    Copy the contents of the srcdir into the dstdir, preserving
++    subdirectories.
++
++    If an existing file of the same name exists in dstdir, it will be overwritten.
++    """
++    import shutil
++    # dirs_exist_ok was introduced in Python 3.8
++    # On earlier versions, or Windows, use the verbose mechanism.
++    # We use it on Windows because _call_with_windows_retry doesn't allow
++    # named arguments to be passed.
++    if ((sys.version_info.major < 3 or sys.version_info.minor < 8)
++        or (os.name == 'nt')):
++        names = os.listdir(srcdir)
++        if not os.path.isdir(dstdir):
++            os.makedirs(dstdir)
++        errors = []
++        for name in names:
++            srcname = os.path.join(srcdir, name)
++            dstname = os.path.join(dstdir, name)
++            try:
++                if os.path.islink(srcname):
++                    linkto = os.readlink(srcname)
++                    os.symlink(linkto, dstname)
++                elif os.path.isdir(srcname):
++                    copy_contents(srcname, dstname)
++                else:
++                    _call_windows_retry(shutil.copy2, (srcname, dstname))
++            except OSError as why:
++                errors.append((srcname, dstname, str(why)))
++            except Exception as err:
++                errors.extend(err)
++        try:
++            _call_windows_retry(shutil.copystat, (srcdir, dstdir))
++        except OSError as why:
++            if why.winerror is None:
++                errors.extend((srcdir, dstdir, str(why)))
++        if errors:
++            raise Exception(errors)
++    else:
++        shutil.copytree(srcdir, dstdir, dirs_exist_ok=True)
++
++
+ def move(src, dst):
+     """
+     Move a file or directory path.
+ 
+     This is a replacement for shutil.move that works better under windows,
+     retrying operations on some known errors due to various things keeping
+     a handle on file paths.
+     """
+diff --git a/testing/mozbase/mozfile/tests/stubs.py b/testing/mozbase/mozfile/tests/stubs.py
+--- a/testing/mozbase/mozfile/tests/stubs.py
++++ b/testing/mozbase/mozfile/tests/stubs.py
+@@ -11,16 +11,21 @@ files = [('foo.txt',),
+          ('foo', 'bar', 'fleem.txt',),
+          ('foobar', 'fleem.txt',),
+          ('bar.txt',),
+          ('nested_tree', 'bar', 'fleem.txt',),
+          ('readonly.txt',),
+          ]
+ 
+ 
++def create_empty_stub():
++    tempdir = tempfile.mkdtemp()
++    return tempdir
++
++
+ def create_stub():
+     """create a stub directory"""
+ 
+     tempdir = tempfile.mkdtemp()
+     try:
+         for path in files:
+             fullpath = os.path.join(tempdir, *path)
+             dirname = os.path.dirname(fullpath)
+diff --git a/testing/mozbase/mozfile/tests/test_copycontents.py b/testing/mozbase/mozfile/tests/test_copycontents.py
+new file mode 100644
+--- /dev/null
++++ b/testing/mozbase/mozfile/tests/test_copycontents.py
+@@ -0,0 +1,130 @@
++#!/usr/bin/env python
++
++from __future__ import absolute_import
++
++import os
++import shutil
++import unittest
++
++import mozunit
++
++import mozfile
++
++import stubs
++
++
++class MozfileCopyContentsTestCase(unittest.TestCase):
++    """Test our ability to copy the contents of directories"""
++
++    def _directory_is_subset(self, set_, subset_):
++        """
++        Confirm that all the contents of 'subset_' are contained in 'set_'
++        """
++        names = os.listdir(subset_)
++        for name in names:
++            full_set_path = os.path.join(set_, name)
++            full_subset_path = os.path.join(subset_, name)
++            if os.path.isdir(full_subset_path):
++                self.assertTrue(os.path.isdir(full_set_path))
++                self._directory_is_subset(full_set_path, full_subset_path)
++            elif os.path.islink(full_subset_path):
++                self.assertTrue(os.path.islink(full_set_path))
++            else:
++                self.assertTrue(os.stat(full_set_path))
++
++    def _directories_are_equal(self, dir1, dir2):
++        """
++        Confirm that the contents of 'dir1' are the same as 'dir2'
++        """
++        names1 = os.listdir(dir1)
++        names2 = os.listdir(dir2)
++        self.assertTrue(len(names1) == len(names2))
++        for name in names1:
++            self.assertTrue(name in names2)
++            dir1_path = os.path.join(dir1, name)
++            dir2_path = os.path.join(dir2, name)
++            if os.path.isdir(dir1_path):
++                self.assertTrue(os.path.isdir(dir2_path))
++                self._directories_are_equal(dir1_path, dir2_path)
++            elif os.path.islink(dir1_path):
++                self.assertTrue(os.path.islink(dir2_path))
++            else:
++                self.assertTrue(os.stat(dir2_path))
++
++    def test_copy_empty_directory(self):
++        tempdir = stubs.create_empty_stub()
++        dstdir = stubs.create_empty_stub()
++        self.assertTrue(os.path.isdir(tempdir))
++
++        mozfile.copy_contents(tempdir, dstdir)
++        self._directories_are_equal(dstdir, tempdir)
++
++        if os.path.isdir(tempdir):
++            shutil.rmtree(tempdir)
++        if os.path.isdir(dstdir):
++            shutil.rmtree(dstdir)
++
++    def test_copy_full_directory(self):
++        tempdir = stubs.create_stub()
++        dstdir = stubs.create_empty_stub()
++        self.assertTrue(os.path.isdir(tempdir))
++
++        mozfile.copy_contents(tempdir, dstdir)
++        self._directories_are_equal(dstdir, tempdir)
++
++        if os.path.isdir(tempdir):
++            shutil.rmtree(tempdir)
++        if os.path.isdir(dstdir):
++            shutil.rmtree(dstdir)
++
++    def test_copy_full_directory_with_existing_file(self):
++        tempdir = stubs.create_stub()
++        dstdir = stubs.create_empty_stub()
++
++        filename = "i_dont_exist_in_tempdir"
++        f = open(os.path.join(dstdir, filename), "w")
++        f.write("Hello World")
++        f.close()
++
++        self.assertTrue(os.path.isdir(tempdir))
++
++        mozfile.copy_contents(tempdir, dstdir)
++        self._directory_is_subset(dstdir, tempdir)
++        self.assertTrue(os.path.exists(os.path.join(dstdir, filename)))
++
++        if os.path.isdir(tempdir):
++            shutil.rmtree(tempdir)
++        if os.path.isdir(dstdir):
++            shutil.rmtree(dstdir)
++
++    def test_copy_full_directory_with_overlapping_file(self):
++        tempdir = stubs.create_stub()
++        dstdir = stubs.create_empty_stub()
++
++        filename = "i_do_exist_in_tempdir"
++        for d in [tempdir, dstdir]:
++            f = open(os.path.join(d, filename), "w")
++            f.write("Hello " + d)
++            f.close()
++
++        self.assertTrue(os.path.isdir(tempdir))
++        self.assertTrue(os.path.exists(os.path.join(tempdir, filename)))
++        self.assertTrue(os.path.exists(os.path.join(dstdir, filename)))
++
++        line = open(os.path.join(dstdir, filename), "r").readlines()[0]
++        self.assertTrue(line == "Hello " + dstdir)
++
++        mozfile.copy_contents(tempdir, dstdir)
++
++        line = open(os.path.join(dstdir, filename), "r").readlines()[0]
++        self.assertTrue(line == "Hello " + tempdir)
++        self._directories_are_equal(tempdir, dstdir)
++
++        if os.path.isdir(tempdir):
++            shutil.rmtree(tempdir)
++        if os.path.isdir(dstdir):
++            shutil.rmtree(dstdir)
++
++
++if __name__ == "__main__":
++    mozunit.main()

+ 421 - 0
mozilla-release/patches/1637845-09-79a1.patch

@@ -0,0 +1,421 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800288 0
+# Node ID 47744e5c0829eeb89120965d2983e871e817396f
+# Parent  617c93017b4f55f1bcee38bfc10945ed25357ed8
+Bug 1637845 - Clean up mozfile r=gbrown
+
+
+Run file through black, then re-order the imports at the top in order of strlen
+
+Differential Revision: https://phabricator.services.mozilla.com/D77690
+
+Depends on D76083
+
+diff --git a/testing/mozbase/mozfile/mozfile/mozfile.py b/testing/mozbase/mozfile/mozfile/mozfile.py
+--- a/testing/mozbase/mozfile/mozfile/mozfile.py
++++ b/testing/mozbase/mozfile/mozfile/mozfile.py
+@@ -14,29 +14,31 @@ import stat
+ import sys
+ import time
+ import warnings
+ from contextlib import contextmanager
+ 
+ from six.moves import urllib
+ 
+ 
+-__all__ = ['extract_tarball',
+-           'extract_zip',
+-           'extract',
+-           'is_url',
+-           'load',
+-           'copy_contents',
+-           'move',
+-           'remove',
+-           'rmtree',
+-           'tree',
+-           'which',
+-           'NamedTemporaryFile',
+-           'TemporaryDirectory']
++__all__ = [
++    "extract_tarball",
++    "extract_zip",
++    "extract",
++    "is_url",
++    "load",
++    "copy_contents",
++    "move",
++    "remove",
++    "rmtree",
++    "tree",
++    "which",
++    "NamedTemporaryFile",
++    "TemporaryDirectory",
++]
+ 
+ # utilities for extracting archives
+ 
+ 
+ def extract_tarball(src, dest):
+     """extract a .tar file"""
+ 
+     import tarfile
+@@ -98,46 +100,52 @@ def extract(src, dest=None):
+         os.makedirs(dest)
+     assert not os.path.isfile(dest), "dest cannot be a file"
+ 
+     if tarfile.is_tarfile(src):
+         namelist = extract_tarball(src, dest)
+     elif zipfile.is_zipfile(src):
+         namelist = extract_zip(src, dest)
+     else:
+-        raise Exception("mozfile.extract: no archive format found for '%s'" %
+-                        src)
++        raise Exception("mozfile.extract: no archive format found for '%s'" % src)
+ 
+     # namelist returns paths with forward slashes even in windows
+-    top_level_files = [os.path.join(dest, name.rstrip('/')) for name in namelist
+-                       if len(name.rstrip('/').split('/')) == 1]
++    top_level_files = [
++        os.path.join(dest, name.rstrip("/"))
++        for name in namelist
++        if len(name.rstrip("/").split("/")) == 1
++    ]
+ 
+     # namelist doesn't include folders, append these to the list
+     for name in namelist:
+-        index = name.find('/')
++        index = name.find("/")
+         if index != -1:
+             root = os.path.join(dest, name[:index])
+             if root not in top_level_files:
+                 top_level_files.append(root)
+ 
+     return top_level_files
+ 
+ 
+ # utilities for removal of files and directories
+ 
++
+ def rmtree(dir):
+     """Deprecated wrapper method to remove a directory tree.
+ 
+     Ensure to update your code to use mozfile.remove() directly
+ 
+     :param dir: directory to be removed
+     """
+ 
+-    warnings.warn("mozfile.rmtree() is deprecated in favor of mozfile.remove()",
+-                  PendingDeprecationWarning, stacklevel=2)
++    warnings.warn(
++        "mozfile.rmtree() is deprecated in favor of mozfile.remove()",
++        PendingDeprecationWarning,
++        stacklevel=2,
++    )
+     return remove(dir)
+ 
+ 
+ def _call_windows_retry(func, args=(), retry_max=5, retry_delay=0.5):
+     """
+     It's possible to see spurious errors on Windows due to various things
+     keeping a handle to the directory open (explorer, virus scanners, etc)
+     So we try a few times if it fails with a known error.
+@@ -154,18 +162,20 @@ def _call_windows_retry(func, args=(), r
+             if e.errno not in (errno.EACCES, errno.ENOTEMPTY):
+                 raise
+ 
+             if retry_count == retry_max:
+                 raise
+ 
+             retry_count += 1
+ 
+-            print('%s() failed for "%s". Reason: %s (%s). Retrying...' %
+-                  (func.__name__, args, e.strerror, e.errno))
++            print(
++                '%s() failed for "%s". Reason: %s (%s). Retrying...'
++                % (func.__name__, args, e.strerror, e.errno)
++            )
+             time.sleep(retry_count * retry_delay)
+         else:
+             # If no exception has been thrown it should be done
+             break
+ 
+ 
+ def remove(path):
+     """Removes the specified file, link, or directory tree.
+@@ -236,22 +246,22 @@ def remove(path):
+ def copy_contents(srcdir, dstdir):
+     """
+     Copy the contents of the srcdir into the dstdir, preserving
+     subdirectories.
+ 
+     If an existing file of the same name exists in dstdir, it will be overwritten.
+     """
+     import shutil
++
+     # dirs_exist_ok was introduced in Python 3.8
+     # On earlier versions, or Windows, use the verbose mechanism.
+     # We use it on Windows because _call_with_windows_retry doesn't allow
+     # named arguments to be passed.
+-    if ((sys.version_info.major < 3 or sys.version_info.minor < 8)
+-        or (os.name == 'nt')):
++    if (sys.version_info.major < 3 or sys.version_info.minor < 8) or (os.name == "nt"):
+         names = os.listdir(srcdir)
+         if not os.path.isdir(dstdir):
+             os.makedirs(dstdir)
+         errors = []
+         for name in names:
+             srcname = os.path.join(srcdir, name)
+             dstname = os.path.join(dstdir, name)
+             try:
+@@ -281,16 +291,17 @@ def move(src, dst):
+     """
+     Move a file or directory path.
+ 
+     This is a replacement for shutil.move that works better under windows,
+     retrying operations on some known errors due to various things keeping
+     a handle on file paths.
+     """
+     import shutil
++
+     _call_windows_retry(shutil.move, (src, dst))
+ 
+ 
+ def depth(directory):
+     """returns the integer depth of a directory or path relative to '/' """
+ 
+     directory = os.path.abspath(directory)
+     level = 0
+@@ -299,19 +310,19 @@ def depth(directory):
+         level += 1
+         if not remainder:
+             break
+     return level
+ 
+ 
+ def tree(directory, sort_key=lambda x: x.lower()):
+     """Display tree directory structure for `directory`."""
+-    vertical_line = u'│'
+-    item_marker = u'├'
+-    last_child = u'└'
++    vertical_line = u"│"
++    item_marker = u"├"
++    last_child = u"└"
+ 
+     retval = []
+     indent = []
+     last = {}
+     top = depth(directory)
+ 
+     for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
+ 
+@@ -332,36 +343,46 @@ def tree(directory, sort_key=lambda x: x
+             files_end = item_marker
+             last[abspath] = dirnames[-1]
+         else:
+             files_end = last_child
+ 
+         if last.get(parent) == os.path.basename(abspath):
+             # last directory of parent
+             dirpath_mark = last_child
+-            indent[-1] = ' '
++            indent[-1] = " "
+         elif not indent:
+-            dirpath_mark = ''
++            dirpath_mark = ""
+         else:
+             dirpath_mark = item_marker
+ 
+         # append the directory and piece of tree structure
+         # if the top-level entry directory, print as passed
+-        retval.append('%s%s%s' % (''.join(indent[:-1]),
+-                                  dirpath_mark,
+-                                  basename if retval else directory))
++        retval.append(
++            "%s%s%s"
++            % ("".join(indent[:-1]), dirpath_mark, basename if retval else directory)
++        )
+         # add the files
+         if filenames:
+             last_file = filenames[-1]
+-            retval.extend([('%s%s%s' % (''.join(indent),
+-                                        files_end if filename == last_file else item_marker,
+-                                        filename))
+-                           for index, filename in enumerate(filenames)])
++            retval.extend(
++                [
++                    (
++                        "%s%s%s"
++                        % (
++                            "".join(indent),
++                            files_end if filename == last_file else item_marker,
++                            filename,
++                        )
++                    )
++                    for index, filename in enumerate(filenames)
++                ]
++            )
+ 
+-    return '\n'.join(retval)
++    return "\n".join(retval)
+ 
+ 
+ def which(cmd, mode=os.F_OK | os.X_OK, path=None, exts=None):
+     """A wrapper around `shutil.which` to make the behavior on Windows
+     consistent with other platforms.
+ 
+     On non-Windows platforms, this is a direct call to `shutil.which`. On
+     Windows, this:
+@@ -396,42 +417,44 @@ def which(cmd, mode=os.F_OK | os.X_OK, p
+     # See: https://bugs.python.org/issue31405
+     if "." not in exts:
+         exts.append(".")
+ 
+     os.environ["PATHEXT"] = os.pathsep.join(exts)
+     try:
+         path = shutil_which(cmd, mode=mode, path=path)
+         if path:
+-            return os.path.abspath(path.rstrip('.'))
++            return os.path.abspath(path.rstrip("."))
+     finally:
+         if oldexts:
+             os.environ["PATHEXT"] = oldexts
+         else:
+             del os.environ["PATHEXT"]
+ 
+     # If we've gotten this far, we need to check for registered executables
+     # before giving up.
+     try:
+         import winreg
+     except ImportError:
+         import _winreg as winreg
+-    if not cmd.lower().endswith('.exe'):
+-        cmd += '.exe'
++    if not cmd.lower().endswith(".exe"):
++        cmd += ".exe"
+     try:
+         ret = winreg.QueryValue(
+             winreg.HKEY_LOCAL_MACHINE,
+-            r'SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\%s' % cmd)
++            r"SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\%s" % cmd,
++        )
+         return os.path.abspath(ret) if ret else None
+     except winreg.error:
+         return None
+ 
+ 
+ # utilities for temporary resources
+ 
++
+ class NamedTemporaryFile(object):
+     """
+     Like tempfile.NamedTemporaryFile except it works on Windows
+     in the case where you open the created file a second time.
+ 
+     This behaves very similarly to tempfile.NamedTemporaryFile but may
+     not behave exactly the same. For example, this function does not
+     prevent fd inheritance by children.
+@@ -441,50 +464,52 @@ class NamedTemporaryFile(object):
+     with NamedTemporaryFile() as fh:
+         fh.write(b'foobar')
+ 
+         print('Filename: %s' % fh.name)
+ 
+     see https://bugzilla.mozilla.org/show_bug.cgi?id=821362
+     """
+ 
+-    def __init__(self, mode='w+b', bufsize=-1, suffix='', prefix='tmp',
+-                 dir=None, delete=True):
++    def __init__(
++        self, mode="w+b", bufsize=-1, suffix="", prefix="tmp", dir=None, delete=True
++    ):
+ 
+         import tempfile
+-        fd, path = tempfile.mkstemp(suffix, prefix, dir, 't' in mode)
++
++        fd, path = tempfile.mkstemp(suffix, prefix, dir, "t" in mode)
+         os.close(fd)
+ 
+         self.file = open(path, mode)
+         self._path = path
+         self._delete = delete
+         self._unlinked = False
+ 
+     def __getattr__(self, k):
+-        return getattr(self.__dict__['file'], k)
++        return getattr(self.__dict__["file"], k)
+ 
+     def __iter__(self):
+-        return self.__dict__['file']
++        return self.__dict__["file"]
+ 
+     def __enter__(self):
+         self.file.__enter__()
+         return self
+ 
+     def __exit__(self, exc, value, tb):
+         self.file.__exit__(exc, value, tb)
+-        if self.__dict__['_delete']:
+-            os.unlink(self.__dict__['_path'])
++        if self.__dict__["_delete"]:
++            os.unlink(self.__dict__["_path"])
+             self._unlinked = True
+ 
+     def __del__(self):
+-        if self.__dict__['_unlinked']:
++        if self.__dict__["_unlinked"]:
+             return
+         self.file.__exit__(None, None, None)
+-        if self.__dict__['_delete']:
+-            os.unlink(self.__dict__['_path'])
++        if self.__dict__["_delete"]:
++            os.unlink(self.__dict__["_path"])
+ 
+ 
+ @contextmanager
+ def TemporaryDirectory():
+     """
+     create a temporary directory using tempfile.mkdtemp, and then clean it up.
+ 
+     Example usage:
+@@ -500,36 +525,37 @@ def TemporaryDirectory():
+     try:
+         yield tempdir
+     finally:
+         shutil.rmtree(tempdir)
+ 
+ 
+ # utilities dealing with URLs
+ 
++
+ def is_url(thing):
+     """
+     Return True if thing looks like a URL.
+     """
+ 
+     parsed = urllib.parse.urlparse(thing)
+-    if 'scheme' in parsed:
++    if "scheme" in parsed:
+         return len(parsed.scheme) >= 2
+     else:
+         return len(parsed[0]) >= 2
+ 
+ 
+ def load(resource):
+     """
+     open a file or URL for reading.  If the passed resource string is not a URL,
+     or begins with 'file://', return a ``file``.  Otherwise, return the
+     result of urllib.urlopen()
+     """
+ 
+     # handle file URLs separately due to python stdlib limitations
+-    if resource.startswith('file://'):
+-        resource = resource[len('file://'):]
++    if resource.startswith("file://"):
++        resource = resource[len("file://"):]
+ 
+     if not is_url(resource):
+         # if no scheme is given, it is a file path
+         return open(resource)
+ 
+     return urllib.request.urlopen(resource)

+ 486 - 0
mozilla-release/patches/1637845-10-79a1.patch

@@ -0,0 +1,486 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591646664 18000
+# Node ID f495b58337dc18cabe732a534c359c330c2458b3
+# Parent  9735c3d4c193a4e57bf9cd1e8181dc2d7a47ba73
+Bug 1637845 - Re-implement mach vendor r=glob
+
+This copies the steps of dav1d but tries to make them generic
+for future libraries.
+
+Differential Revision: https://phabricator.services.mozilla.com/D75699
+
+diff --git a/python/mozbuild/mozbuild/vendor/__init__.py b/python/mozbuild/mozbuild/vendor/__init__.py
+new file mode 100644
+diff --git a/python/mozbuild/mozbuild/vendor/host_gitlab.py b/python/mozbuild/mozbuild/vendor/host_gitlab.py
+new file mode 100644
+--- /dev/null
++++ b/python/mozbuild/mozbuild/vendor/host_gitlab.py
+@@ -0,0 +1,31 @@
++# This Source Code Form is subject to the terms of the Mozilla Public
++# License, v. 2.0. If a copy of the MPL was not distributed with this
++# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
++
++from __future__ import absolute_import, print_function, unicode_literals
++
++import urllib
++import requests
++
++
++class GitLabHost:
++    def __init__(self, manifest):
++        self.manifest = manifest
++
++    def upstream_commit(self, revision):
++        """Query the gitlab api for a git commit id and timestamp."""
++        repo_url = urllib.parse.urlparse(self.manifest["origin"]["url"])
++        gitlab_api = repo_url.scheme + "://" + repo_url.netloc + "/api/v4/projects/"
++        gitlab_api += repo_url.path[1:].replace("/", "%2F")
++        gitlab_api += "/repository/commits"
++
++        url = "/".join([gitlab_api, revision])
++        req = requests.get(url)
++        req.raise_for_status()
++        info = req.json()
++        return (info["id"], info["committed_date"])
++
++    def upstream_snapshot(self, revision):
++        return "/".join(
++            [self.manifest["origin"]["url"], "-", "archive", revision + ".tar.gz"]
++        )
+diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py
+--- a/python/mozbuild/mozbuild/vendor/mach_commands.py
++++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
+@@ -1,39 +1,111 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+ import sys
++import logging
+ 
+ from mach.decorators import (
+     CommandArgument,
+     CommandArgumentGroup,
+     CommandProvider,
+     Command,
+     SubCommand,
+ )
+ 
+ from mozbuild.base import MachCommandBase
++from mozbuild.vendor.moz_yaml import load_moz_yaml, MozYamlVerifyError
+ 
+ 
+ @CommandProvider
+ class Vendor(MachCommandBase):
+     """Vendor third-party dependencies into the source repository."""
+ 
+     @Command(
+         "vendor",
+         category="misc",
+         description="Vendor third-party dependencies into the source repository.",
+     )
+-    def vendor(self):
+-        self._sub_mach(["help", "vendor"])
+-        return 1
++    @CommandArgument("--check-for-update", action="store_true", default=False)
++    @CommandArgument(
++        "--ignore-modified",
++        action="store_true",
++        help="Ignore modified files in current checkout",
++        default=False,
++    )
++    @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
++    @CommandArgument("library", nargs=1)
++    @CommandArgumentGroup("verify")
++    @CommandArgument("--verify", "-v", action="store_true", help="Verify manifest")
++    def vendor(
++        self,
++        library,
++        revision,
++        ignore_modified=False,
++        check_for_update=False,
++        verify=False,
++    ):
++        """
++        Fun quirk of ./mach - you can specify a default argument as well as subcommands.
++        If the default argument matches a subcommand, the subcommand gets called. If it
++        doesn't, we wind up here to handle it.
++        """
++        library = library[0]
++        assert library not in ["rust", "python"]
++
++        self.populate_logger()
++        self.log_manager.enable_unstructured()
++
++        try:
++            manifest = load_moz_yaml(library)
++            if verify:
++                print("%s: OK" % library)
++                sys.exit(0)
++        except MozYamlVerifyError as e:
++            print(e)
++            sys.exit(1)
++
++        if not ignore_modified:
++            self.check_modified_files()
++        if not revision:
++            revision = "master"
++
++        from mozbuild.vendor.vendor_manifest import VendorManifest
++
++        vendor_command = self._spawn(VendorManifest)
++        vendor_command.vendor(library, manifest, revision, check_for_update)
++
++        sys.exit(0)
++
++    def check_modified_files(self):
++        """
++        Ensure that there aren't any uncommitted changes to files
++        in the working copy, since we're going to change some state
++        on the user.
++        """
++        modified = self.repository.get_changed_files("M")
++        if modified:
++            self.log(
++                logging.ERROR,
++                "modified_files",
++                {},
++                """You have uncommitted changes to the following files:
++
++{files}
++
++Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
++""".format(
++                    files="\n".join(sorted(modified))
++                ),
++            )
++            sys.exit(1)
+ 
+     @SubCommand(
+         "vendor",
+         "rust",
+         description="Vendor rust crates from crates.io into third_party/rust",
+     )
+     @CommandArgument(
+         "--ignore-modified",
+@@ -47,17 +119,17 @@ class Vendor(MachCommandBase):
+         help=(
+             "Permit overly-large files to be added to the repository. "
+             "To get permission to set this, raise a question in the #build "
+             "channel at https://chat.mozilla.org."
+         ),
+         default=False,
+     )
+     def vendor_rust(self, **kwargs):
+-        from mozbuild.vendor_rust import VendorRust
++        from mozbuild.vendor.vendor_rust import VendorRust
+ 
+         vendor_command = self._spawn(VendorRust)
+         vendor_command.vendor(**kwargs)
+ 
+     @SubCommand(
+         "vendor",
+         "aom",
+         description="Vendor av1 video codec reference implementation into the "
+@@ -71,17 +143,17 @@ class Vendor(MachCommandBase):
+     )
+     @CommandArgument(
+         "--ignore-modified",
+         action="store_true",
+         help="Ignore modified files in current checkout",
+         default=False,
+     )
+     def vendor_aom(self, **kwargs):
+-        from mozbuild.vendor_aom import VendorAOM
++        from mozbuild.vendor.vendor_aom import VendorAOM
+ 
+         vendor_command = self._spawn(VendorAOM)
+         vendor_command.vendor(**kwargs)
+ 
+     @SubCommand(
+         "vendor",
+         "dav1d",
+         description="Vendor dav1d implementation of AV1 into the source repository.",
+@@ -92,17 +164,17 @@ class Vendor(MachCommandBase):
+     )
+     @CommandArgument(
+         "--ignore-modified",
+         action="store_true",
+         help="Ignore modified files in current checkout",
+         default=False,
+     )
+     def vendor_dav1d(self, **kwargs):
+-        from mozbuild.vendor_dav1d import VendorDav1d
++        from mozbuild.vendor.vendor_dav1d import VendorDav1d
+ 
+         vendor_command = self._spawn(VendorDav1d)
+         vendor_command.vendor(**kwargs)
+ 
+     @SubCommand(
+         "vendor",
+         "python",
+         description="Vendor Python packages from pypi.org into third_party/python",
+@@ -118,17 +190,17 @@ class Vendor(MachCommandBase):
+         default=None,
+         nargs="*",
+         help="Packages to vendor. If omitted, packages and their dependencies "
+         "defined in Pipfile.lock will be vendored. If Pipfile has been modified, "
+         "then Pipfile.lock will be regenerated. Note that transient dependencies "
+         "may be updated when running this command.",
+     )
+     def vendor_python(self, **kwargs):
+-        from mozbuild.vendor_python import VendorPython
++        from mozbuild.vendor.vendor_python import VendorPython
+ 
+         vendor_command = self._spawn(VendorPython)
+         vendor_command.vendor(**kwargs)
+ 
+     @SubCommand(
+         "vendor",
+         "manifest",
+         description="Vendor externally hosted repositories into this " "repository.",
+@@ -139,11 +211,11 @@ class Vendor(MachCommandBase):
+         "--verify",
+         "-v",
+         action="store_true",
+         group="verify",
+         required=True,
+         help="Verify manifest",
+     )
+     def vendor_manifest(self, files, verify):
+-        from mozbuild.vendor_manifest import verify_manifests
++        from mozbuild.vendor.vendor_manifest import verify_manifests
+ 
+         verify_manifests(files)
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -1,21 +1,213 @@
+ # This Source Code Form is subject to the terms of the Mozilla Public
+ # License, v. 2.0. If a copy of the MPL was not distributed with this
+ # file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+ 
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+-import sys
++import os
++import re
++import glob
++import logging
++import tarfile
++import requests
+ 
+-from . import moz_yaml
++import mozfile
++import mozpack.path as mozpath
++
++from mozbuild.base import MozbuildObject
++
++DEFAULT_EXCLUDE_FILES = [
++    ".git*",
++]
+ 
+ 
+-def verify_manifests(files):
+-    success = True
+-    for fn in files:
+-        try:
+-            moz_yaml.load_moz_yaml(fn)
+-            print("%s: OK" % fn)
+-        except moz_yaml.VerifyError as e:
+-            success = False
+-            print(e)
+-    sys.exit(0 if success else 1)
++class VendorManifest(MozbuildObject):
++    def vendor(self, yaml_file, manifest, revision, check_for_update):
++        self.manifest = manifest
++        if "vendor-directory" not in self.manifest["vendoring"]:
++            self.manifest["vendoring"]["vendor-directory"] = os.path.dirname(yaml_file)
++
++        self.source_host = self.get_source_host()
++
++        commit, timestamp = self.source_host.upstream_commit(revision)
++        self.log(
++            logging.INFO,
++            "vendor",
++            {"commit": commit, "timestamp": timestamp},
++            "Latest commit is {commit} from {timestamp}",
++        )
++
++        if self.manifest["origin"]["revision"] == commit:
++            self.log(
++                logging.INFO,
++                "vendor",
++                {},
++                "Latest upstream commit matches commit in-tree. Returning.",
++            )
++            return
++        elif check_for_update:
++            self.log(logging.ERROR, "vendor", {}, commit)
++            return
++
++        self.fetch_and_unpack(commit)
++
++        self.log(logging.INFO, "clean_upstream", {}, "Removing unnecessary files.")
++        self.clean_upstream()
++
++        self.log(logging.INFO, "update_moz.yaml", {}, "Updating moz.yaml.")
++        self.update_yaml(yaml_file, commit, timestamp)
++
++        self.log(logging.INFO, "update_files", {}, "Updating files")
++        self.update_files(commit, yaml_file)
++
++        self.log(
++            logging.INFO,
++            "add_remove_files",
++            {},
++            "Registering changes with version control.",
++        )
++        self.repository.add_remove_files(
++            self.manifest["vendoring"]["vendor-directory"], os.path.dirname(yaml_file)
++        )
++
++        self.log(
++            logging.INFO,
++            "done",
++            {"revision": revision},
++            "Update to version '{revision}' ready to commit.",
++        )
++
++    def get_source_host(self):
++        if self.manifest["vendoring"]["source-hosting"] == "gitlab":
++            from mozbuild.vendor.host_gitlab import GitLabHost
++
++            return GitLabHost(self.manifest)
++        else:
++            raise Exception(
++                "Unknown source host: " + self.manifest["vendoring"]["source-hosting"]
++            )
++
++    def fetch_and_unpack(self, revision):
++        """Fetch and unpack upstream source"""
++        url = self.source_host.upstream_snapshot(revision)
++        self.log(
++            logging.INFO,
++            "vendor",
++            {"revision_url": url},
++            "Fetching code archive from {revision_url}",
++        )
++
++        prefix = self.manifest["origin"]["name"] + "-" + revision
++        with mozfile.NamedTemporaryFile() as tmptarfile:
++            req = requests.get(url, stream=True)
++            for data in req.iter_content(4096):
++                tmptarfile.write(data)
++            tmptarfile.seek(0)
++
++            tar = tarfile.open(tmptarfile.name)
++
++            if any(
++                [
++                    name
++                    for name in tar.getnames()
++                    if name.startswith("/") or ".." in name
++                ]
++            ):
++                raise Exception(
++                    "Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
++                )
++
++            vendor_dir = self.manifest["vendoring"]["vendor-directory"]
++            self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % vendor_dir)
++            mozfile.remove(vendor_dir)
++
++            self.log(
++                logging.INFO,
++                "unpack",
++                {"vendor_dir": vendor_dir},
++                "Unpacking upstream files from {vendor_dir}.",
++            )
++            tar.extractall(vendor_dir)
++
++            # GitLab puts everything properly down a directory; move it up.
++            if all(map(lambda name: name.startswith(prefix), tar.getnames())):
++                tardir = mozpath.join(vendor_dir, prefix)
++                mozfile.copy_contents(tardir, vendor_dir)
++                mozfile.remove(tardir)
++
++    def clean_upstream(self):
++        """Remove files we don't want to import."""
++        to_exclude = []
++        vendor_dir = self.manifest["vendoring"]["vendor-directory"]
++        for pattern in self.manifest["vendoring"]["exclude"] + DEFAULT_EXCLUDE_FILES:
++            if "*" in pattern:
++                to_exclude.extend(glob.iglob(mozpath.join(vendor_dir, pattern)))
++            else:
++                to_exclude.append(mozpath.join(vendor_dir, pattern))
++        self.log(
++            logging.INFO,
++            "clean_upstream",
++            {"files": to_exclude},
++            "Removing: " + str(to_exclude),
++        )
++        for f in to_exclude:
++            mozfile.remove(f)
++
++    def update_yaml(self, yaml_file, revision, timestamp):
++        with open(yaml_file) as f:
++            yaml = f.readlines()
++
++        replaced = 0
++        replacements = [
++            ["  release: commit", " %s (%s)." % (revision, timestamp)],
++            ["  revision:", " %s" % (revision)],
++        ]
++
++        for i in range(0, len(yaml)):
++            l = yaml[i]
++
++            for r in replacements:
++                if r[0] in l:
++                    print("Found " + l)
++                    replaced += 1
++                    yaml[i] = re.sub(r[0] + " [v\.a-f0-9]+.*$", r[0] + r[1], yaml[i])
++
++        assert len(replacements) == replaced
++
++        with open(yaml_file, "w") as f:
++            f.write("".join(yaml))
++
++    def update_files(self, revision, yaml_file):
++        def get_full_path(file):
++            if "{yaml_dir}" in file:
++                file = file.replace("{yaml_dir}", os.path.dirname(yaml_file))
++            else:
++                file = mozpath.join(
++                    self.manifest["vendoring"]["vendor-directory"], file
++                )
++            return file
++
++        if "file-updates" not in self.manifest["vendoring"]:
++            return
++
++        for update in self.manifest["vendoring"]["file-updates"]:
++            if update["action"] == "copy-file":
++                src = get_full_path(update["from"])
++                dst = get_full_path(update["to"])
++
++                with open(src) as f:
++                    contents = f.read()
++                with open(dst, "w") as f:
++                    f.write(contents)
++            elif update["action"] == "replace-in-file":
++                file = get_full_path(update["file"])
++                with open(file) as f:
++                    contents = f.read()
++
++                replacement = update["with"].replace("{revision}", revision)
++                contents = contents.replace(update["pattern"], replacement)
++
++                with open(file, "w") as f:
++                    f.write(contents)
++            else:
++                assert False, "Unknown action supplied (how did this pass validation?)"

+ 260 - 0
mozilla-release/patches/1637845-11-79a1.patch

@@ -0,0 +1,260 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800363 0
+# Node ID 98ae9c7d8dfb49c32621284048f199e9d9b8d2c3
+# Parent  85dfc55ac4334787b51c408916dbc061cb5ead7f
+Bug 1637845 - Add/change support for running scripts to the moz.yaml schema r=glob
+
+
+Here we unify the 'run_after' section with 'file-updates' just naming
+them 'update-actions'.  This will allow a simpler schema and a clearer
+picture of the order of actions that are taken.
+
+Differential Revision: https://phabricator.services.mozilla.com/D76428
+
+diff --git a/media/libdav1d/moz.yaml b/media/libdav1d/moz.yaml
+--- a/media/libdav1d/moz.yaml
++++ b/media/libdav1d/moz.yaml
+@@ -38,16 +38,16 @@ vendoring:
+   url: https://code.videolan.org/videolan/dav1d.git
+   source-hosting: gitlab
+   vendor-directory: third_party/dav1d
+ 
+   exclude:
+     - build/.gitattributes
+     - build/.gitignore
+ 
+-  file-updates:
++  update-actions:
+     - action: copy-file
+       from: include/vcs_version.h.in
+       to: '{yaml_dir}/vcs_version.h'
+     - action: replace-in-file
+       pattern: '@VCS_TAG@'
+       with: '{revision}'
+       file: '{yaml_dir}/vcs_version.h'
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -162,51 +162,58 @@ vendoring:
+   # If neither "exclude" or "include" are set, all files will be vendored
+   # Files/paths in "include" will always be vendored, even if excluded
+   # eg. excluding "docs/" then including "docs/LICENSE" will vendor just the
+   #     LICENSE file from the docs directory
+ 
+   # All three file/path parameters ("keep", "exclude", and "include") support
+   # filenames, directory names, and globs/wildcards.
+ 
+-  # In-tree scripts to be executed after vendoring but before pushing.
+-  # optional
+-  run_after:
+-    - script
+-    - another script
+-
+-  # Actions to take on files after updating. Applied in order.
+-  # The action subfield is required. It must be one of 'copy-file', replace-in-file'.
++  # Actions to take after updating. Applied in order.
++  # The action subfield is required. It must be one of:
++  #   - copy-file
++  #   - replace-in-file
++  #   - run-script
+   # Unless otherwise noted, all subfields of action are required.
+   #
+   # If the action is copy-file:
+   #   from is the source file
+   #   to is the destination
+   #
+   # If the action is replace-in-file:
+   #   pattern is what in the file to search for. It is an exact strng match.
+   #   with is the string to replace it with. Accepts the special keyword
+   #     '{revision}' for the commit we are updating to.
+   #   File is the file to replace it in.
+   #
+-  # Unless specified otherwise, all files are relative to the vendor-directory.
+-  #     If the vendor-directory is different from the directory of the yaml file,
+-  #     the keyword '{yaml_dir}' may be used to make the path relative to that
+-  #     directory
++  # If the action is run-script:
++  #   script is the script to run
++  #   cwd is the directory the script should run with as its cwd
++  #
++  # Unless specified otherwise, all files/directories are relative to the
++  #     vendor-directory. If the vendor-directory is different from the
++  #     directory of the yaml file, the keyword '{yaml_dir}' may be used
++  #     to make the path relative to that directory.
++  # 'run-script' supports the addictional keyword {cwd} which, if used,
++  #     must only be used at the beginning of the path.
+   #
+   # optional
+-  file-updates:
++  update-actions:
+     - action: copy-file
+       from: include/vcs_version.h.in
+       to: '{yaml_dir}/vcs_version.h'
+ 
+     - action: replace-in-file
+       pattern: '@VCS_TAG@'
+       with: '{revision}'
+       file: '{yaml_dir}/vcs_version.h'
++
++    - action: run-script
++      script: '{cwd}/generate_sources.sh'
++      cwd: '{yaml_dir}'
+ """
+ 
+ RE_SECTION = re.compile(r"^(\S[^:]*):").search
+ RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
+ 
+ 
+ class MozYamlVerifyError(Exception):
+     def __init__(self, filename, error):
+@@ -313,30 +320,31 @@ def _schema_1():
+                     Length(min=1),
+                     In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"),
+                 ),
+                 "vendor-directory": All(str, Length(min=1)),
+                 "patches": Unique([str]),
+                 "keep": Unique([str]),
+                 "exclude": Unique([str]),
+                 "include": Unique([str]),
+-                "run_after": Unique([str]),
+-                "file-updates": All(
+-                    FileUpdate(),
++                "update-actions": All(
++                    UpdateActions(),
+                     [
+                         {
+                             Required("action"): In(
+-                                ["copy-file", "replace-in-file"],
+-                                msg="Invalid action specified in file-updates",
++                                ["copy-file", "replace-in-file", "run-script"],
++                                msg="Invalid action specified in update-actions",
+                             ),
+                             "from": All(str, Length(min=1)),
+                             "to": All(str, Length(min=1)),
+                             "pattern": All(str, Length(min=1)),
+                             "with": All(str, Length(min=1)),
+                             "file": All(str, Length(min=1)),
++                            "script": All(str, Length(min=1)),
++                            "cwd": All(str, Length(min=1)),
+                         }
+                     ],
+                 ),
+             },
+         }
+     )
+ 
+ 
+@@ -381,19 +389,18 @@ def _schema_1_additional(filename, manif
+         if not has_schema:
+             raise ValueError("Not simple YAML")
+ 
+     # Verify YAML can be updated.
+     if "vendor" in manifest:
+         update_moz_yaml(filename, "", "", verify=False, write=True)
+ 
+ 
+-class FileUpdate(object):
+-    """Voluptuous validator which verifies the license(s) are valid as per our
+-    whitelist."""
++class UpdateActions(object):
++    """Voluptuous validator which verifies the update actions(s) are valid."""
+ 
+     def __call__(self, values):
+         for v in values:
+             if "action" not in v:
+                 raise Invalid("All file-update entries must specify a valid action")
+             if v["action"] == "copy-file":
+                 if "from" not in v or "to" not in v or len(v.keys()) != 3:
+                     raise Invalid(
+@@ -405,24 +412,29 @@ class FileUpdate(object):
+                     or "with" not in v
+                     or "file" not in v
+                     or len(v.keys()) != 4
+                 ):
+                     raise Invalid(
+                         "replace-in-file action must (only) specify "
+                         + "'pattern', 'with', and 'file' keys"
+                     )
++            elif v["action"] == "run-script":
++                if "script" not in v or "cwd" not in v or len(v.keys()) != 3:
++                    raise Invalid(
++                        "run-script action must (only) specify 'script' and 'cwd' keys"
++                    )
+             else:
+                 # This check occurs before the validator above, so the above is
+                 # redundant but we leave it to be verbose.
+                 raise Invalid("Supplied action " + v["action"] + " is invalid.")
+         return values
+ 
+     def __repr__(self):
+-        return "FileUpdate"
++        return "UpdateActions"
+ 
+ 
+ class License(object):
+     """Voluptuous validator which verifies the license(s) are valid as per our
+     whitelist."""
+ 
+     def __call__(self, values):
+         if isinstance(values, str):
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -173,29 +173,31 @@ class VendorManifest(MozbuildObject):
+                     yaml[i] = re.sub(r[0] + " [v\.a-f0-9]+.*$", r[0] + r[1], yaml[i])
+ 
+         assert len(replacements) == replaced
+ 
+         with open(yaml_file, "w") as f:
+             f.write("".join(yaml))
+ 
+     def update_files(self, revision, yaml_file):
+-        def get_full_path(file):
+-            if "{yaml_dir}" in file:
+-                file = file.replace("{yaml_dir}", os.path.dirname(yaml_file))
++        def get_full_path(path, support_cwd=False):
++            if support_cwd and path[0:5] == "{cwd}":
++                path = path.replace("{cwd}", ".")
++            elif "{yaml_dir}" in path:
++                path = path.replace("{yaml_dir}", os.path.dirname(yaml_file))
+             else:
+-                file = mozpath.join(
+-                    self.manifest["vendoring"]["vendor-directory"], file
++                path = mozpath.join(
++                    self.manifest["vendoring"]["vendor-directory"], path
+                 )
+-            return file
++            return path
+ 
+-        if "file-updates" not in self.manifest["vendoring"]:
++        if "update-actions" not in self.manifest["vendoring"]:
+             return
+ 
+-        for update in self.manifest["vendoring"]["file-updates"]:
++        for update in self.manifest["vendoring"]["update-actions"]:
+             if update["action"] == "copy-file":
+                 src = get_full_path(update["from"])
+                 dst = get_full_path(update["to"])
+ 
+                 with open(src) as f:
+                     contents = f.read()
+                 with open(dst, "w") as f:
+                     f.write(contents)
+@@ -204,10 +206,16 @@ class VendorManifest(MozbuildObject):
+                 with open(file) as f:
+                     contents = f.read()
+ 
+                 replacement = update["with"].replace("{revision}", revision)
+                 contents = contents.replace(update["pattern"], replacement)
+ 
+                 with open(file, "w") as f:
+                     f.write(contents)
++            elif update["action"] == "run-script":
++                script = get_full_path(update["script"], support_cwd=True)
++                run_dir = get_full_path(update["cwd"])
++                self.run_process(
++                    args=[script], cwd=run_dir, log_name=script,
++                )
+             else:
+                 assert False, "Unknown action supplied (how did this pass validation?)"

+ 145 - 0
mozilla-release/patches/1637845-12-79a1.patch

@@ -0,0 +1,145 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800402 0
+# Node ID d22bc1280d8fdb6730efdec49fa8df7d45d2a881
+# Parent  0df841760d4453cf6f30ebdfd0458aa332a3ab06
+Bug 1637845 - Add a delete-path action to the update-actions r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D76429
+
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -166,29 +166,33 @@ vendoring:
+ 
+   # All three file/path parameters ("keep", "exclude", and "include") support
+   # filenames, directory names, and globs/wildcards.
+ 
+   # Actions to take after updating. Applied in order.
+   # The action subfield is required. It must be one of:
+   #   - copy-file
+   #   - replace-in-file
++  #   - delete-path
+   #   - run-script
+   # Unless otherwise noted, all subfields of action are required.
+   #
+   # If the action is copy-file:
+   #   from is the source file
+   #   to is the destination
+   #
+   # If the action is replace-in-file:
+   #   pattern is what in the file to search for. It is an exact strng match.
+   #   with is the string to replace it with. Accepts the special keyword
+   #     '{revision}' for the commit we are updating to.
+   #   File is the file to replace it in.
+   #
++  # If the action is delete-path
++  #   path is the file or directory to recursively delete
++  #
+   # If the action is run-script:
+   #   script is the script to run
+   #   cwd is the directory the script should run with as its cwd
+   #
+   # Unless specified otherwise, all files/directories are relative to the
+   #     vendor-directory. If the vendor-directory is different from the
+   #     directory of the yaml file, the keyword '{yaml_dir}' may be used
+   #     to make the path relative to that directory.
+@@ -201,16 +205,19 @@ vendoring:
+       from: include/vcs_version.h.in
+       to: '{yaml_dir}/vcs_version.h'
+ 
+     - action: replace-in-file
+       pattern: '@VCS_TAG@'
+       with: '{revision}'
+       file: '{yaml_dir}/vcs_version.h'
+ 
++    - action: delete-path
++      path: '{yaml_dir}/config'
++
+     - action: run-script
+       script: '{cwd}/generate_sources.sh'
+       cwd: '{yaml_dir}'
+ """
+ 
+ RE_SECTION = re.compile(r"^(\S[^:]*):").search
+ RE_FIELD = re.compile(r"^\s\s([^:]+):\s+(\S+)$").search
+ 
+@@ -325,26 +332,32 @@ def _schema_1():
+                 "keep": Unique([str]),
+                 "exclude": Unique([str]),
+                 "include": Unique([str]),
+                 "update-actions": All(
+                     UpdateActions(),
+                     [
+                         {
+                             Required("action"): In(
+-                                ["copy-file", "replace-in-file", "run-script"],
++                                [
++                                    "copy-file",
++                                    "replace-in-file",
++                                    "run-script",
++                                    "delete-path",
++                                ],
+                                 msg="Invalid action specified in update-actions",
+                             ),
+                             "from": All(str, Length(min=1)),
+                             "to": All(str, Length(min=1)),
+                             "pattern": All(str, Length(min=1)),
+                             "with": All(str, Length(min=1)),
+                             "file": All(str, Length(min=1)),
+                             "script": All(str, Length(min=1)),
+                             "cwd": All(str, Length(min=1)),
++                            "path": All(str, Length(min=1)),
+                         }
+                     ],
+                 ),
+             },
+         }
+     )
+ 
+ 
+@@ -412,16 +425,21 @@ class UpdateActions(object):
+                     or "with" not in v
+                     or "file" not in v
+                     or len(v.keys()) != 4
+                 ):
+                     raise Invalid(
+                         "replace-in-file action must (only) specify "
+                         + "'pattern', 'with', and 'file' keys"
+                     )
++            elif v["action"] == "delete-path":
++                if "path" not in v or len(v.keys()) != 2:
++                    raise Invalid(
++                        "delete-path action must (only) specify the 'path' key"
++                    )
+             elif v["action"] == "run-script":
+                 if "script" not in v or "cwd" not in v or len(v.keys()) != 3:
+                     raise Invalid(
+                         "run-script action must (only) specify 'script' and 'cwd' keys"
+                     )
+             else:
+                 # This check occurs before the validator above, so the above is
+                 # redundant but we leave it to be verbose.
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -206,16 +206,19 @@ class VendorManifest(MozbuildObject):
+                 with open(file) as f:
+                     contents = f.read()
+ 
+                 replacement = update["with"].replace("{revision}", revision)
+                 contents = contents.replace(update["pattern"], replacement)
+ 
+                 with open(file, "w") as f:
+                     f.write(contents)
++            elif update["action"] == "delete-path":
++                path = get_full_path(update["path"])
++                mozfile.remove(path)
+             elif update["action"] == "run-script":
+                 script = get_full_path(update["script"], support_cwd=True)
+                 run_dir = get_full_path(update["cwd"])
+                 self.run_process(
+                     args=[script], cwd=run_dir, log_name=script,
+                 )
+             else:
+                 assert False, "Unknown action supplied (how did this pass validation?)"

+ 133 - 0
mozilla-release/patches/1637845-13-79a1.patch

@@ -0,0 +1,133 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800443 0
+# Node ID 5877126012cdfc91d8cdcc374d48bb52c9641c77
+# Parent  8a8ec8a1948c49ec796ccb6f72d362cf085e2b18
+Bug 1637845 - Add support for github and googlesource as hosting repositories r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D76430
+
+diff --git a/python/mozbuild/mozbuild/vendor/host_github.py b/python/mozbuild/mozbuild/vendor/host_github.py
+new file mode 100644
+--- /dev/null
++++ b/python/mozbuild/mozbuild/vendor/host_github.py
+@@ -0,0 +1,29 @@
++# This Source Code Form is subject to the terms of the Mozilla Public
++# License, v. 2.0. If a copy of the MPL was not distributed with this
++# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
++
++from __future__ import absolute_import, print_function, unicode_literals
++
++import urllib
++import requests
++
++
++class GitHubHost:
++    def __init__(self, manifest):
++        self.manifest = manifest
++
++    def upstream_commit(self, revision):
++        """Query the github api for a git commit id and timestamp."""
++        github_api = "https://api.github.com/"
++        repo_url = urllib.parse.urlparse(self.manifest["origin"]["url"])
++        repo = repo_url.path[1:]
++        url = "/".join([github_api, "repos", repo, "commits", revision])
++        req = requests.get(url)
++        req.raise_for_status()
++        info = req.json()
++        return (info["sha"], info["commit"]["committer"]["date"])
++
++    def upstream_snapshot(self, revision):
++        return "/".join(
++            [self.manifest["origin"]["url"], "archive", revision + ".tar.gz"]
++        )
+diff --git a/python/mozbuild/mozbuild/vendor/host_googlesource.py b/python/mozbuild/mozbuild/vendor/host_googlesource.py
+new file mode 100644
+--- /dev/null
++++ b/python/mozbuild/mozbuild/vendor/host_googlesource.py
+@@ -0,0 +1,33 @@
++# This Source Code Form is subject to the terms of the Mozilla Public
++# License, v. 2.0. If a copy of the MPL was not distributed with this
++# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
++
++from __future__ import absolute_import, print_function, unicode_literals
++
++import requests
++
++
++class GoogleSourceHost:
++    def __init__(self, manifest):
++        self.manifest = manifest
++
++    def upstream_commit(self, revision):
++        """Query for a git commit and timestamp."""
++        url = "/".join([self.manifest["origin"]["url"], "+", revision + "?format=JSON"])
++        req = requests.get(url)
++        req.raise_for_status()
++        try:
++            info = req.json()
++        except ValueError:
++            # As of 2017 May, googlesource sends 4 garbage characters
++            # at the beginning of the json response. Work around this.
++            # https://bugs.chromium.org/p/chromium/issues/detail?id=718550
++            import json
++
++            info = json.loads(req.text[4:])
++        return (info["commit"], info["committer"]["time"])
++
++    def upstream_snapshot(self, revision):
++        return "/".join(
++            [self.manifest["origin"]["url"], "+archive", revision + ".tar.gz"]
++        )
+diff --git a/python/mozbuild/mozbuild/vendor/moz_yaml.py b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+--- a/python/mozbuild/mozbuild/vendor/moz_yaml.py
++++ b/python/mozbuild/mozbuild/vendor/moz_yaml.py
+@@ -52,17 +52,17 @@ VALID_LICENSES = [
+     # Unique Licenses
+     "ACE",  # http://www.cs.wustl.edu/~schmidt/ACE-copying.html
+     "Anti-Grain-Geometry",  # http://www.antigrain.com/license/index.html
+     "JPNIC",  # https://www.nic.ad.jp/ja/idn/idnkit/download/index.html
+     "Khronos",  # https://www.khronos.org/openmaxdl
+     "Unicode",  # http://www.unicode.org/copyright.html
+ ]
+ 
+-VALID_SOURCE_HOSTS = ["gitlab"]
++VALID_SOURCE_HOSTS = ["gitlab", "googlesource", "github"]
+ 
+ """
+ ---
+ # Third-Party Library Template
+ # All fields are mandatory unless otherwise noted
+ 
+ # Version of this schema
+ schema: 1
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -75,18 +75,25 @@ class VendorManifest(MozbuildObject):
+             "done",
+             {"revision": revision},
+             "Update to version '{revision}' ready to commit.",
+         )
+ 
+     def get_source_host(self):
+         if self.manifest["vendoring"]["source-hosting"] == "gitlab":
+             from mozbuild.vendor.host_gitlab import GitLabHost
++            return GitLabHost(self.manifest)
++        elif self.manifest["vendoring"]["source-hosting"] == "github":
++            from mozbuild.vendor.host_github import GitHubHost
+ 
+-            return GitLabHost(self.manifest)
++            return GitHubHost(self.manifest)
++        elif self.manifest["vendoring"]["source-hosting"] == "googlesource":
++            from mozbuild.vendor.host_googlesource import GoogleSourceHost
++
++            return GoogleSourceHost(self.manifest)
+         else:
+             raise Exception(
+                 "Unknown source host: " + self.manifest["vendoring"]["source-hosting"]
+             )
+ 
+     def fetch_and_unpack(self, revision):
+         """Fetch and unpack upstream source"""
+         url = self.source_host.upstream_snapshot(revision)

+ 95 - 0
mozilla-release/patches/1637845-14-79a1.patch

@@ -0,0 +1,95 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800477 0
+# Node ID f100ac23b748b575a9a387c75435802961b69cb0
+# Parent  843beb95bf744868d7c98fc83a729ad5e8b4c7d5
+Bug 1637845 - Add a moz.yaml for libaom and remove version information from the README r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D76432
+
+diff --git a/media/libaom/README_MOZILLA b/media/libaom/README_MOZILLA
+--- a/media/libaom/README_MOZILLA
++++ b/media/libaom/README_MOZILLA
+@@ -3,23 +3,18 @@ codec reference implementation. The actu
+ source is in $TOPSRCDIR/third_party/aom/
+ 
+ Any patches or additional configuration to be applied to the
+ upstream source should be kept here in the media/libaom
+ directory.
+ 
+ To update the library source and build config files, execute
+ 
+-  ./mach vendor aom
++  ./mach vendor media/libaom/moz.yaml
+ 
+ To update to a specific upstream git tag or commit, use
+ 
+-  ./mach vendor aom -r <commit>
++  ./mach vendor media/libaom/moz.yaml -r <commit>
+ 
+ The upstream git repository is https://aomedia.googlesource.com/aom
+ 
+-To update to a fork, use
+-
+-  ./mach vendor aom --repo <repository url> [-r <commit>]
+-
+-The last update was pulled from https://aomedia.googlesource.com/aom/
+-
+-The git commit ID used was 1e227d41f0616de9548a673a83a21ef990b62591 (Tue Sep 18 17:30:35 2018 +0000).
++To view the information about the current version, check the
++'origin' section of moz.yaml.
+\ No newline at end of file
+diff --git a/media/libaom/moz.yaml b/media/libaom/moz.yaml
+new file mode 100644
+--- /dev/null
++++ b/media/libaom/moz.yaml
+@@ -0,0 +1,49 @@
++# Version of this schema
++schema: 1
++
++bugzilla:
++  # Bugzilla product and component for this directory and subdirectories
++  product: Core
++  component: "Audio/Video: Playback"
++
++# Document the source of externally hosted code
++origin:
++
++  # Short name of the package/library
++  name: aom
++
++  description: av1 decoder
++
++  # Full URL for the package's homepage/etc
++  # Usually different from repository url
++  url: https://aomedia.googlesource.com/aom/
++
++  # Human-readable identifier for this version/release
++  # Generally "version NNN", "tag SSS", "bookmark SSS"
++  release: commit 1e227d41f0616de9548a673a83a21ef990b62591 (Tue Sep 18 17:30:35 2018 +0000).
++
++  # Revision to pull in
++  # Must be a long or short commit SHA (long preferred)
++  revision: 1e227d41f0616de9548a673a83a21ef990b62591
++
++  # The package's license, where possible using the mnemonic from
++  # https://spdx.org/licenses/
++  # Multiple licenses can be specified (as a YAML list)
++  # A "LICENSE" file must exist containing the full license text
++  license: BSD-2-Clause
++
++vendoring:
++  url: https://aomedia.googlesource.com/aom
++  source-hosting: googlesource
++  vendor-directory: third_party/aom
++
++  exclude:
++    - build/.gitattributes
++    - build/.gitignore
++
++  update-actions:
++    - action: delete-path
++      path: '{yaml_dir}/config'
++    - action: run-script
++      script: '{cwd}/generate_sources_mozbuild.sh'
++      cwd: '{yaml_dir}'

+ 110 - 0
mozilla-release/patches/1637845-15-79a1.patch

@@ -0,0 +1,110 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800515 0
+# Node ID a15f6c29bbeb3fa01f1d823d8b87f5dcf3f46738
+# Parent  ecd4cd0fc5f6233af822f30c5e2a77de0643d73b
+Bug 1637845 - Allow ./mach command --help to print out the help for both sub-commands and command arguments r=ahal
+
+
+It is possible to have both a default command (with positional arguments) and
+sub-commands (with arguments) in mach. If the subcommand exists, it
+is dispatched to; if it doesn't the default command is called the positional
+argument filled in.
+
+However, when you run ./mach command --help, it will detect the subcommands
+and only print out their help section.  If the default command has arguments,
+they were not printed out.  Now they are.
+
+Small papercuts in this patch are that the Default Command Arguments are
+printed after the subcommands and that subcommand help without default
+arguments have an extra newline after them. Both of these seem small
+enough that the refactoring necessary to abate them is undesirable.
+
+Differential Revision: https://phabricator.services.mozilla.com/D76505
+
+diff --git a/python/mach/mach/dispatcher.py b/python/mach/mach/dispatcher.py
+--- a/python/mach/mach/dispatcher.py
++++ b/python/mach/mach/dispatcher.py
+@@ -302,26 +302,17 @@ class CommandAction(argparse.Action):
+         for arg in handler.arguments:
+             # Apply our group keyword.
+             group_name = arg[1].get('group')
+             if group_name:
+                 del arg[1]['group']
+                 group = extra_groups[group_name]
+             group.add_argument(*arg[0], **arg[1])
+ 
+-    def _handle_command_help(self, parser, command, args):
+-        handler = self._mach_registrar.command_handlers.get(command)
+-
+-        if not handler:
+-            raise UnknownCommandError(command, 'query')
+-
+-        if handler.subcommand_handlers:
+-            self._handle_subcommand_help(parser, handler, args)
+-            return
+-
++    def _get_command_arguments_help(self, handler):
+         # This code is worth explaining. Because we are doing funky things with
+         # argument registration to allow the same option in both global and
+         # command arguments, we can't simply put all arguments on the same
+         # parser instance because argparse would complain. We can't register an
+         # argparse subparser here because it won't properly show help for
+         # global arguments. So, we employ a strategy similar to command
+         # execution where we construct a 2nd, independent ArgumentParser for
+         # just the command data then supplement the main help's output with
+@@ -351,16 +342,30 @@ class CommandAction(argparse.Action):
+                 handler.description = c_parser.description
+                 c_parser.description = None
+         else:
+             c_parser = argparse.ArgumentParser(**parser_args)
+             group = c_parser.add_argument_group('Command Arguments')
+ 
+         self._populate_command_group(c_parser, handler, group)
+ 
++        return c_parser
++
++    def _handle_command_help(self, parser, command, args):
++        handler = self._mach_registrar.command_handlers.get(command)
++
++        if not handler:
++            raise UnknownCommandError(command, 'query')
++
++        if handler.subcommand_handlers:
++            self._handle_subcommand_help(parser, handler, args)
++            return
++
++        c_parser = self._get_command_arguments_help(handler)
++
+         # Set the long help of the command to the docstring (if present) or
+         # the command decorator description argument (if present).
+         if handler.docstring:
+             parser.description = format_docstring(handler.docstring)
+         elif handler.description:
+             parser.description = handler.description
+ 
+         parser.usage = '%(prog)s [global arguments] ' + command + \
+@@ -387,19 +392,23 @@ class CommandAction(argparse.Action):
+                 key=by_decl_order if handler.order == 'declaration' else by_name
+         ):
+             group.add_argument(subcommand, help=subhandler.description,
+                                action='store_true')
+ 
+         if handler.docstring:
+             parser.description = format_docstring(handler.docstring)
+ 
++        c_parser = self._get_command_arguments_help(handler)
++
+         parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ 
+         parser.print_help()
++        print('')
++        c_parser.print_help()
+ 
+     def _handle_subcommand_help(self, parser, handler, args):
+         subcommand = set(args).intersection(list(handler.subcommand_handlers.keys()))
+         if not subcommand:
+             return self._handle_subcommand_main_help(parser, handler)
+ 
+         subcommand = subcommand.pop()
+         subhandler = handler.subcommand_handlers[subcommand]

+ 617 - 0
mozilla-release/patches/1637845-16-79a1.patch

@@ -0,0 +1,617 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800549 0
+# Node ID 84ed48eb7d0c2df89d9bf410c771acda6ea44f15
+# Parent  b73655ecb83c31d3f29724fae3cc57af06f65482
+Bug 1637845 - Remove the old implementations of dav1d and aom vendoring and clean up the mach command r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D76506
+
+diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py
+--- a/python/mozbuild/mozbuild/vendor/mach_commands.py
++++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
+@@ -4,58 +4,60 @@
+ 
+ from __future__ import absolute_import, print_function, unicode_literals
+ 
+ import sys
+ import logging
+ 
+ from mach.decorators import (
+     CommandArgument,
+-    CommandArgumentGroup,
+     CommandProvider,
+     Command,
+     SubCommand,
+ )
+ 
+ from mozbuild.base import MachCommandBase
+ from mozbuild.vendor.moz_yaml import load_moz_yaml, MozYamlVerifyError
+ 
+ 
+ @CommandProvider
+ class Vendor(MachCommandBase):
+     """Vendor third-party dependencies into the source repository."""
+ 
++    # Fun quirk of ./mach - you can specify a default argument as well as subcommands.
++    # If the default argument matches a subcommand, the subcommand gets called. If it
++    # doesn't, we wind up in the default command.
+     @Command(
+         "vendor",
+         category="misc",
+         description="Vendor third-party dependencies into the source repository.",
+     )
+     @CommandArgument("--check-for-update", action="store_true", default=False)
+     @CommandArgument(
+         "--ignore-modified",
+         action="store_true",
+         help="Ignore modified files in current checkout",
+         default=False,
+     )
+     @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
+-    @CommandArgument("library", nargs=1)
+-    @CommandArgumentGroup("verify")
+-    @CommandArgument("--verify", "-v", action="store_true", help="Verify manifest")
++    @CommandArgument("--verify", "-v", action="store_true", help="(Only) verify the manifest")
++    @CommandArgument("library", nargs=1, help="The moz.yaml file of the library to vendor.")
+     def vendor(
+         self,
+         library,
+         revision,
+         ignore_modified=False,
+         check_for_update=False,
+         verify=False,
+     ):
+         """
+-        Fun quirk of ./mach - you can specify a default argument as well as subcommands.
+-        If the default argument matches a subcommand, the subcommand gets called. If it
+-        doesn't, we wind up here to handle it.
++        Vendor third-party dependencies into the source repository.
++
++        Vendoring rust and python can be done with ./mach vendor [rust/python].
++        Vendoring other libraries can be done with ./mach vendor [arguments] path/to/file.yaml
+         """
+         library = library[0]
+         assert library not in ["rust", "python"]
+ 
+         self.populate_logger()
+         self.log_manager.enable_unstructured()
+ 
+         try:
+@@ -97,16 +99,18 @@ class Vendor(MachCommandBase):
+ 
+ Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+ """.format(
+                     files="\n".join(sorted(modified))
+                 ),
+             )
+             sys.exit(1)
+ 
++# =====================================================================
++
+     @SubCommand(
+         "vendor",
+         "rust",
+         description="Vendor rust crates from crates.io into third_party/rust",
+     )
+     @CommandArgument(
+         "--ignore-modified",
+         action="store_true",
+@@ -124,60 +128,17 @@ Please commit or stash these changes bef
+         default=False,
+     )
+     def vendor_rust(self, **kwargs):
+         from mozbuild.vendor.vendor_rust import VendorRust
+ 
+         vendor_command = self._spawn(VendorRust)
+         vendor_command.vendor(**kwargs)
+ 
+-    @SubCommand(
+-        "vendor",
+-        "aom",
+-        description="Vendor av1 video codec reference implementation into the "
+-        "source repository.",
+-    )
+-    @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
+-    @CommandArgument(
+-        "--repo",
+-        help="Repository url to pull a snapshot from. "
+-        "Supports github and googlesource.",
+-    )
+-    @CommandArgument(
+-        "--ignore-modified",
+-        action="store_true",
+-        help="Ignore modified files in current checkout",
+-        default=False,
+-    )
+-    def vendor_aom(self, **kwargs):
+-        from mozbuild.vendor.vendor_aom import VendorAOM
+-
+-        vendor_command = self._spawn(VendorAOM)
+-        vendor_command.vendor(**kwargs)
+-
+-    @SubCommand(
+-        "vendor",
+-        "dav1d",
+-        description="Vendor dav1d implementation of AV1 into the source repository.",
+-    )
+-    @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
+-    @CommandArgument(
+-        "--repo", help="Repository url to pull a snapshot from. Supports gitlab."
+-    )
+-    @CommandArgument(
+-        "--ignore-modified",
+-        action="store_true",
+-        help="Ignore modified files in current checkout",
+-        default=False,
+-    )
+-    def vendor_dav1d(self, **kwargs):
+-        from mozbuild.vendor.vendor_dav1d import VendorDav1d
+-
+-        vendor_command = self._spawn(VendorDav1d)
+-        vendor_command.vendor(**kwargs)
++# =====================================================================
+ 
+     @SubCommand(
+         "vendor",
+         "python",
+         description="Vendor Python packages from pypi.org into third_party/python",
+     )
+     @CommandArgument(
+         "--with-windows-wheel",
+@@ -194,28 +155,8 @@ Please commit or stash these changes bef
+         "then Pipfile.lock will be regenerated. Note that transient dependencies "
+         "may be updated when running this command.",
+     )
+     def vendor_python(self, **kwargs):
+         from mozbuild.vendor.vendor_python import VendorPython
+ 
+         vendor_command = self._spawn(VendorPython)
+         vendor_command.vendor(**kwargs)
+-
+-    @SubCommand(
+-        "vendor",
+-        "manifest",
+-        description="Vendor externally hosted repositories into this " "repository.",
+-    )
+-    @CommandArgument("files", nargs="+", help="Manifest files to work on")
+-    @CommandArgumentGroup("verify")
+-    @CommandArgument(
+-        "--verify",
+-        "-v",
+-        action="store_true",
+-        group="verify",
+-        required=True,
+-        help="Verify manifest",
+-    )
+-    def vendor_manifest(self, files, verify):
+-        from mozbuild.vendor.vendor_manifest import verify_manifests
+-
+-        verify_manifests(files)
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_aom.py b/python/mozbuild/mozbuild/vendor/vendor_aom.py
+deleted file mode 100644
+--- a/python/mozbuild/mozbuild/vendor/vendor_aom.py
++++ /dev/null
+@@ -1,227 +0,0 @@
+-# This Source Code Form is subject to the terms of the Mozilla Public
+-# License, v. 2.0. If a copy of the MPL was not distributed with this
+-# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+-
+-from __future__ import absolute_import, print_function, unicode_literals
+-
+-import logging
+-from mozbuild.base import MozbuildObject
+-import mozfile
+-import mozpack.path as mozpath
+-import os
+-import requests
+-import re
+-import sys
+-import tarfile
+-from urllib.parse import urlparse
+-
+-
+-class VendorAOM(MozbuildObject):
+-    def upstream_snapshot(self, revision):
+-        """Construct a url for a tarball snapshot of the given revision."""
+-        if "googlesource" in self.repo_url:
+-            return mozpath.join(self.repo_url, "+archive", revision + ".tar.gz")
+-        elif "github" in self.repo_url:
+-            return mozpath.join(self.repo_url, "archive", revision + ".tar.gz")
+-        else:
+-            raise ValueError("Unknown git host, no snapshot lookup method")
+-
+-    def upstream_commit(self, revision):
+-        """Convert a revision to a git commit and timestamp.
+-
+-        Ask the upstream repo to convert the requested revision to
+-        a git commit id and timestamp, so we can be precise in
+-        what we're vendoring."""
+-        if "googlesource" in self.repo_url:
+-            return self.upstream_googlesource_commit(revision)
+-        elif "github" in self.repo_url:
+-            return self.upstream_github_commit(revision)
+-        else:
+-            raise ValueError("Unknown git host, no commit lookup method")
+-
+-    def upstream_validate(self, url):
+-        """Validate repository urls to make sure we can handle them."""
+-        host = urlparse(url).netloc
+-        valid_domains = ("googlesource.com", "github.com")
+-        if not any(filter(lambda domain: domain in host, valid_domains)):
+-            self.log(
+-                logging.ERROR,
+-                "upstream_url",
+-                {},
+-                """Unsupported git host %s; cannot fetch snapshots.
+-
+-Please set a repository url with --repo on either googlesource or github."""
+-                % host,
+-            )
+-            sys.exit(1)
+-
+-    def upstream_googlesource_commit(self, revision):
+-        """Query gitiles for a git commit and timestamp."""
+-        url = mozpath.join(self.repo_url, "+", revision + "?format=JSON")
+-        self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
+-        req = requests.get(url)
+-        req.raise_for_status()
+-        try:
+-            info = req.json()
+-        except ValueError:
+-            # As of 2017 May, googlesource sends 4 garbage characters
+-            # at the beginning of the json response. Work around this.
+-            # https://bugs.chromium.org/p/chromium/issues/detail?id=718550
+-            import json
+-
+-            info = json.loads(req.text[4:])
+-        return (info["commit"], info["committer"]["time"])
+-
+-    def upstream_github_commit(self, revision):
+-        """Query the github api for a git commit id and timestamp."""
+-        github_api = "https://api.github.com/"
+-        repo = urlparse(self.repo_url).path[1:]
+-        url = mozpath.join(github_api, "repos", repo, "commits", revision)
+-        self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
+-        req = requests.get(url)
+-        req.raise_for_status()
+-        info = req.json()
+-        return (info["sha"], info["commit"]["committer"]["date"])
+-
+-    def fetch_and_unpack(self, revision, target):
+-        """Fetch and unpack upstream source"""
+-        url = self.upstream_snapshot(revision)
+-        self.log(logging.INFO, "fetch", {"url": url}, "Fetching {url}")
+-        prefix = "aom-" + revision
+-        filename = prefix + ".tar.gz"
+-        with open(filename, "wb") as f:
+-            req = requests.get(url, stream=True)
+-            for data in req.iter_content(4096):
+-                f.write(data)
+-        tar = tarfile.open(filename)
+-        bad_paths = filter(
+-            lambda name: name.startswith("/") or ".." in name, tar.getnames()
+-        )
+-        if any(bad_paths):
+-            raise Exception(
+-                "Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
+-            )
+-        self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % target)
+-        mozfile.remove(target)
+-        self.log(logging.INFO, "unpack", {}, "Unpacking upstream files.")
+-        tar.extractall(target)
+-        # Github puts everything properly down a directory; move it up.
+-        if all(map(lambda name: name.startswith(prefix), tar.getnames())):
+-            tardir = mozpath.join(target, prefix)
+-            os.system("mv %s/* %s/.* %s" % (tardir, tardir, target))
+-            os.rmdir(tardir)
+-        # Remove the tarball.
+-        mozfile.remove(filename)
+-
+-    def update_readme(self, revision, timestamp, target):
+-        filename = mozpath.join(target, "README_MOZILLA")
+-        with open(filename) as f:
+-            readme = f.read()
+-
+-        prefix = "The git commit ID used was"
+-        if prefix in readme:
+-            new_readme = re.sub(
+-                prefix + " [v\.a-f0-9]+.*$",
+-                prefix + " %s (%s)." % (revision, timestamp),
+-                readme,
+-            )
+-        else:
+-            new_readme = "%s\n\n%s %s." % (readme, prefix, revision)
+-
+-        prefix = "The last update was pulled from"
+-        new_readme = re.sub(
+-            prefix + " https*://.*", prefix + " %s" % self.repo_url, new_readme
+-        )
+-
+-        if readme != new_readme:
+-            with open(filename, "w") as f:
+-                f.write(new_readme)
+-
+-    def clean_upstream(self, target):
+-        """Remove files we don't want to import."""
+-        mozfile.remove(mozpath.join(target, ".gitattributes"))
+-        mozfile.remove(mozpath.join(target, ".gitignore"))
+-        mozfile.remove(mozpath.join(target, "build", ".gitattributes"))
+-        mozfile.remove(mozpath.join(target, "build", ".gitignore"))
+-
+-    def generate_sources(self, target):
+-        """
+-        Run the library's native build system to update ours.
+-
+-        Invoke configure for each supported platform to generate
+-        appropriate config and header files, then invoke the
+-        makefile to obtain a list of source files, writing
+-        these out in the appropriate format for our build
+-        system to use.
+-        """
+-        config_dir = mozpath.join(target, "config")
+-        self.log(logging.INFO, "rm_confg_dir", {}, "rm -rf %s" % config_dir)
+-        mozfile.remove(config_dir)
+-        self.run_process(
+-            args=["./generate_sources_mozbuild.sh"],
+-            cwd=target,
+-            log_name="generate_sources",
+-        )
+-
+-    def check_modified_files(self):
+-        """
+-        Ensure that there aren't any uncommitted changes to files
+-        in the working copy, since we're going to change some state
+-        on the user.
+-        """
+-        modified = self.repository.get_changed_files("M")
+-        if modified:
+-            self.log(
+-                logging.ERROR,
+-                "modified_files",
+-                {},
+-                """You have uncommitted changes to the following files:
+-
+-{files}
+-
+-Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+-""".format(
+-                    files="\n".join(sorted(modified))
+-                ),
+-            )
+-            sys.exit(1)
+-
+-    def vendor(self, revision, repo, ignore_modified=False):
+-        self.populate_logger()
+-        self.log_manager.enable_unstructured()
+-
+-        if not ignore_modified:
+-            self.check_modified_files()
+-        if not revision:
+-            revision = "master"
+-        if repo:
+-            self.repo_url = repo
+-        else:
+-            self.repo_url = "https://aomedia.googlesource.com/aom/"
+-        self.upstream_validate(self.repo_url)
+-
+-        commit, timestamp = self.upstream_commit(revision)
+-
+-        vendor_dir = mozpath.join(self.topsrcdir, "third_party/aom")
+-        self.fetch_and_unpack(commit, vendor_dir)
+-        self.log(logging.INFO, "clean_upstream", {}, """Removing unnecessary files.""")
+-        self.clean_upstream(vendor_dir)
+-        glue_dir = mozpath.join(self.topsrcdir, "media/libaom")
+-        self.log(logging.INFO, "generate_sources", {}, """Generating build files...""")
+-        self.generate_sources(glue_dir)
+-        self.log(logging.INFO, "update_readme", {}, """Updating README_MOZILLA.""")
+-        self.update_readme(commit, timestamp, glue_dir)
+-        self.log(
+-            logging.INFO,
+-            "add_remove_files",
+-            {},
+-            """Registering changes with version control.""",
+-        )
+-        self.repository.add_remove_files(vendor_dir, glue_dir)
+-        self.repository.add_remove_files(glue_dir)
+-        self.log(
+-            logging.INFO,
+-            "done",
+-            {"revision": revision},
+-            """Update to aom version '{revision}' ready to commit.""",
+-        )
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_dav1d.py b/python/mozbuild/mozbuild/vendor/vendor_dav1d.py
+deleted file mode 100644
+--- a/python/mozbuild/mozbuild/vendor/vendor_dav1d.py
++++ /dev/null
+@@ -1,190 +0,0 @@
+-# This Source Code Form is subject to the terms of the Mozilla Public
+-# License, v. 2.0. If a copy of the MPL was not distributed with this
+-# file, # You can obtain one at http://mozilla.org/MPL/2.0/.
+-
+-from __future__ import absolute_import, print_function, unicode_literals
+-
+-import logging
+-from mozbuild.base import MozbuildObject
+-import mozfile
+-import mozpack.path as mozpath
+-import os
+-import requests
+-import re
+-import sys
+-import tarfile
+-from urllib.parse import urlparse
+-
+-
+-class VendorDav1d(MozbuildObject):
+-    def upstream_snapshot(self, revision):
+-        """Construct a url for a tarball snapshot of the given revision."""
+-        if "code.videolan.org" in self.repo_url:
+-            return mozpath.join(self.repo_url, "-", "archive", revision + ".tar.gz")
+-        else:
+-            raise ValueError("Unknown git host, no snapshot lookup method")
+-
+-    def upstream_commit(self, revision):
+-        """Convert a revision to a git commit and timestamp.
+-
+-        Ask the upstream repo to convert the requested revision to
+-        a git commit id and timestamp, so we can be precise in
+-        what we're vendoring."""
+-        if "code.videolan.org" in self.repo_url:
+-            return self.upstream_gitlab_commit(revision)
+-        else:
+-            raise ValueError("Unknown git host, no commit lookup method")
+-
+-    def upstream_validate(self, url):
+-        """Validate repository urls to make sure we can handle them."""
+-        host = urlparse(url).netloc
+-        valid_domains = "code.videolan.org"
+-        if not any(filter(lambda domain: domain in host, valid_domains)):
+-            self.log(
+-                logging.ERROR,
+-                "upstream_url",
+-                {},
+-                """Unsupported git host %s; cannot fetch snapshots.
+-
+-Please set a repository url with --repo on either googlesource or github."""
+-                % host,
+-            )
+-            sys.exit(1)
+-
+-    def upstream_gitlab_commit(self, revision):
+-        """Query the github api for a git commit id and timestamp."""
+-        gitlab_api = "https://code.videolan.org/api/v4/projects/videolan%2Fdav1d/repository/commits"  # noqa
+-        url = mozpath.join(gitlab_api, revision)
+-        self.log(logging.INFO, "fetch", {"url": url}, "Fetching commit id from {url}")
+-        req = requests.get(url)
+-        req.raise_for_status()
+-        info = req.json()
+-        return (info["id"], info["committed_date"])
+-
+-    def fetch_and_unpack(self, revision, target):
+-        """Fetch and unpack upstream source"""
+-        url = self.upstream_snapshot(revision)
+-        self.log(logging.INFO, "fetch", {"url": url}, "Fetching {url}")
+-        prefix = "dav1d-" + revision
+-        filename = prefix + ".tar.gz"
+-        with open(filename, "wb") as f:
+-            req = requests.get(url, stream=True)
+-            for data in req.iter_content(4096):
+-                f.write(data)
+-        tar = tarfile.open(filename)
+-        bad_paths = filter(
+-            lambda name: name.startswith("/") or ".." in name, tar.getnames()
+-        )
+-        if any(bad_paths):
+-            raise Exception(
+-                "Tar archive contains non-local paths," "e.g. '%s'" % bad_paths[0]
+-            )
+-        self.log(logging.INFO, "rm_vendor_dir", {}, "rm -rf %s" % target)
+-        mozfile.remove(target)
+-        self.log(logging.INFO, "unpack", {}, "Unpacking upstream files.")
+-        tar.extractall(target)
+-        # Github puts everything properly down a directory; move it up.
+-        if all(map(lambda name: name.startswith(prefix), tar.getnames())):
+-            tardir = mozpath.join(target, prefix)
+-            os.system("mv %s/* %s/.* %s" % (tardir, tardir, target))
+-            os.rmdir(tardir)
+-        # Remove the tarball.
+-        mozfile.remove(filename)
+-
+-    def update_yaml(self, revision, timestamp, target):
+-        filename = mozpath.join(target, "moz.yaml")
+-        with open(filename) as f:
+-            yaml = f.read()
+-
+-        prefix = "  release: commit"
+-        if prefix in yaml:
+-            new_yaml = re.sub(
+-                prefix + " [v\.a-f0-9]+.*$",
+-                prefix + " %s (%s)." % (revision, timestamp),
+-                yaml,
+-                flags=re.MULTILINE,
+-            )
+-        else:
+-            new_yaml = "%s\n\n%s %s." % (yaml, prefix, revision)
+-
+-        if yaml != new_yaml:
+-            with open(filename, "w") as f:
+-                f.write(new_yaml)
+-
+-    def update_vcs_version(self, revision, vendor_dir, glue_dir):
+-        src_filename = mozpath.join(vendor_dir, "include/vcs_version.h.in")
+-        dst_filename = mozpath.join(glue_dir, "vcs_version.h")
+-        with open(src_filename) as f:
+-            vcs_version_in = f.read()
+-        vcs_version = vcs_version_in.replace("@VCS_TAG@", revision)
+-        with open(dst_filename, "w") as f:
+-            f.write(vcs_version)
+-
+-    def clean_upstream(self, target):
+-        """Remove files we don't want to import."""
+-        mozfile.remove(mozpath.join(target, ".gitattributes"))
+-        mozfile.remove(mozpath.join(target, ".gitignore"))
+-        mozfile.remove(mozpath.join(target, "build", ".gitattributes"))
+-        mozfile.remove(mozpath.join(target, "build", ".gitignore"))
+-
+-    def check_modified_files(self):
+-        """
+-        Ensure that there aren't any uncommitted changes to files
+-        in the working copy, since we're going to change some state
+-        on the user.
+-        """
+-        modified = self.repository.get_changed_files("M")
+-        if modified:
+-            self.log(
+-                logging.ERROR,
+-                "modified_files",
+-                {},
+-                """You have uncommitted changes to the following files:
+-
+-{files}
+-
+-Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+-""".format(
+-                    files="\n".join(sorted(modified))
+-                ),
+-            )
+-            sys.exit(1)
+-
+-    def vendor(self, revision, repo, ignore_modified=False):
+-        self.populate_logger()
+-        self.log_manager.enable_unstructured()
+-
+-        if not ignore_modified:
+-            self.check_modified_files()
+-        if not revision:
+-            revision = "master"
+-        if repo:
+-            self.repo_url = repo
+-        else:
+-            self.repo_url = "https://code.videolan.org/videolan/dav1d"
+-        self.upstream_validate(self.repo_url)
+-
+-        commit, timestamp = self.upstream_commit(revision)
+-
+-        vendor_dir = mozpath.join(self.topsrcdir, "third_party/dav1d")
+-        self.fetch_and_unpack(commit, vendor_dir)
+-        self.log(logging.INFO, "clean_upstream", {}, """Removing unnecessary files.""")
+-        self.clean_upstream(vendor_dir)
+-        glue_dir = mozpath.join(self.topsrcdir, "media/libdav1d")
+-        self.log(logging.INFO, "update_moz.yaml", {}, """Updating moz.yaml.""")
+-        self.update_yaml(commit, timestamp, glue_dir)
+-        self.log(logging.INFO, "update_vcs_version", {}, """Updating vcs_version.h.""")
+-        self.update_vcs_version(commit, vendor_dir, glue_dir)
+-        self.log(
+-            logging.INFO,
+-            "add_remove_files",
+-            {},
+-            """Registering changes with version control.""",
+-        )
+-        self.repository.add_remove_files(vendor_dir, glue_dir)
+-        self.log(
+-            logging.INFO,
+-            "done",
+-            {"revision": revision},
+-            """Update to dav1d version '{revision}' ready to commit.""",
+-        )

+ 142 - 0
mozilla-release/patches/1637845-17-79a1.patch

@@ -0,0 +1,142 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800589 0
+# Node ID 70bc5f4b56d5720fecf28afba4a116d6c7ea908b
+# Parent  bd2539d481d045c03522eedb0f92c3d8e66e41e2
+Bug 1637845 - Implement --check-for-update, controlling the output carefully for scripted use r=glob
+
+Differential Revision: https://phabricator.services.mozilla.com/D76614
+
+diff --git a/python/mozbuild/mozbuild/vendor/mach_commands.py b/python/mozbuild/mozbuild/vendor/mach_commands.py
+--- a/python/mozbuild/mozbuild/vendor/mach_commands.py
++++ b/python/mozbuild/mozbuild/vendor/mach_commands.py
+@@ -25,26 +25,35 @@ class Vendor(MachCommandBase):
+     # Fun quirk of ./mach - you can specify a default argument as well as subcommands.
+     # If the default argument matches a subcommand, the subcommand gets called. If it
+     # doesn't, we wind up in the default command.
+     @Command(
+         "vendor",
+         category="misc",
+         description="Vendor third-party dependencies into the source repository.",
+     )
+-    @CommandArgument("--check-for-update", action="store_true", default=False)
++    @CommandArgument(
++        "--check-for-update",
++        action="store_true",
++        help="For scripted use, prints the new commit to update to, or nothing if up to date.",
++        default=False,
++    )
+     @CommandArgument(
+         "--ignore-modified",
+         action="store_true",
+         help="Ignore modified files in current checkout",
+         default=False,
+     )
+     @CommandArgument("-r", "--revision", help="Repository tag or commit to update to.")
+-    @CommandArgument("--verify", "-v", action="store_true", help="(Only) verify the manifest")
+-    @CommandArgument("library", nargs=1, help="The moz.yaml file of the library to vendor.")
++    @CommandArgument(
++        "--verify", "-v", action="store_true", help="(Only) verify the manifest"
++    )
++    @CommandArgument(
++        "library", nargs=1, help="The moz.yaml file of the library to vendor."
++    )
+     def vendor(
+         self,
+         library,
+         revision,
+         ignore_modified=False,
+         check_for_update=False,
+         verify=False,
+     ):
+@@ -54,27 +63,29 @@ class Vendor(MachCommandBase):
+         Vendoring rust and python can be done with ./mach vendor [rust/python].
+         Vendoring other libraries can be done with ./mach vendor [arguments] path/to/file.yaml
+         """
+         library = library[0]
+         assert library not in ["rust", "python"]
+ 
+         self.populate_logger()
+         self.log_manager.enable_unstructured()
++        if check_for_update:
++            logging.disable()
+ 
+         try:
+             manifest = load_moz_yaml(library)
+             if verify:
+                 print("%s: OK" % library)
+                 sys.exit(0)
+         except MozYamlVerifyError as e:
+             print(e)
+             sys.exit(1)
+ 
+-        if not ignore_modified:
++        if not ignore_modified and not check_for_update:
+             self.check_modified_files()
+         if not revision:
+             revision = "master"
+ 
+         from mozbuild.vendor.vendor_manifest import VendorManifest
+ 
+         vendor_command = self._spawn(VendorManifest)
+         vendor_command.vendor(library, manifest, revision, check_for_update)
+@@ -99,17 +110,17 @@ class Vendor(MachCommandBase):
+ 
+ Please commit or stash these changes before vendoring, or re-run with `--ignore-modified`.
+ """.format(
+                     files="\n".join(sorted(modified))
+                 ),
+             )
+             sys.exit(1)
+ 
+-# =====================================================================
++    # =====================================================================
+ 
+     @SubCommand(
+         "vendor",
+         "rust",
+         description="Vendor rust crates from crates.io into third_party/rust",
+     )
+     @CommandArgument(
+         "--ignore-modified",
+@@ -128,17 +139,17 @@ Please commit or stash these changes bef
+         default=False,
+     )
+     def vendor_rust(self, **kwargs):
+         from mozbuild.vendor.vendor_rust import VendorRust
+ 
+         vendor_command = self._spawn(VendorRust)
+         vendor_command.vendor(**kwargs)
+ 
+-# =====================================================================
++    # =====================================================================
+ 
+     @SubCommand(
+         "vendor",
+         "python",
+         description="Vendor Python packages from pypi.org into third_party/python",
+     )
+     @CommandArgument(
+         "--with-windows-wheel",
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -41,17 +41,17 @@ class VendorManifest(MozbuildObject):
+             self.log(
+                 logging.INFO,
+                 "vendor",
+                 {},
+                 "Latest upstream commit matches commit in-tree. Returning.",
+             )
+             return
+         elif check_for_update:
+-            self.log(logging.ERROR, "vendor", {}, commit)
++            print("%s" % commit)
+             return
+ 
+         self.fetch_and_unpack(commit)
+ 
+         self.log(logging.INFO, "clean_upstream", {}, "Removing unnecessary files.")
+         self.clean_upstream()
+ 
+         self.log(logging.INFO, "update_moz.yaml", {}, "Updating moz.yaml.")

+ 91 - 0
mozilla-release/patches/1637845-18-79a1.patch

@@ -0,0 +1,91 @@
+# HG changeset patch
+# User Tom Ritter <tom@mozilla.com>
+# Date 1591800624 0
+# Node ID 6500c049e562946cac1a46407ea6e7a5f61dc01c
+# Parent  2b2fc8a66c3decb8a596cd2314455975d976a7aa
+Bug 1637845 - Add debug logging to the Update Actions r=glob CLOSED TREE
+
+Differential Revision: https://phabricator.services.mozilla.com/D77691
+
+diff --git a/python/mozbuild/mozbuild/vendor/vendor_manifest.py b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+--- a/python/mozbuild/mozbuild/vendor/vendor_manifest.py
++++ b/python/mozbuild/mozbuild/vendor/vendor_manifest.py
+@@ -75,16 +75,17 @@ class VendorManifest(MozbuildObject):
+             "done",
+             {"revision": revision},
+             "Update to version '{revision}' ready to commit.",
+         )
+ 
+     def get_source_host(self):
+         if self.manifest["vendoring"]["source-hosting"] == "gitlab":
+             from mozbuild.vendor.host_gitlab import GitLabHost
++
+             return GitLabHost(self.manifest)
+         elif self.manifest["vendoring"]["source-hosting"] == "github":
+             from mozbuild.vendor.host_github import GitHubHost
+ 
+             return GitHubHost(self.manifest)
+         elif self.manifest["vendoring"]["source-hosting"] == "googlesource":
+             from mozbuild.vendor.host_googlesource import GoogleSourceHost
+ 
+@@ -199,33 +200,60 @@ class VendorManifest(MozbuildObject):
+         if "update-actions" not in self.manifest["vendoring"]:
+             return
+ 
+         for update in self.manifest["vendoring"]["update-actions"]:
+             if update["action"] == "copy-file":
+                 src = get_full_path(update["from"])
+                 dst = get_full_path(update["to"])
+ 
++                self.log(
++                    logging.DEBUG,
++                    "vendor",
++                    {"src": src, "dst": dst},
++                    "Performing copy-file action src: {src} dst: {dst}",
++                )
++
+                 with open(src) as f:
+                     contents = f.read()
+                 with open(dst, "w") as f:
+                     f.write(contents)
+             elif update["action"] == "replace-in-file":
+                 file = get_full_path(update["file"])
++
++                self.log(
++                    logging.DEBUG,
++                    "vendor",
++                    {"file": file},
++                    "Performing replace-in-file action file: {file}",
++                )
++
+                 with open(file) as f:
+                     contents = f.read()
+ 
+                 replacement = update["with"].replace("{revision}", revision)
+                 contents = contents.replace(update["pattern"], replacement)
+ 
+                 with open(file, "w") as f:
+                     f.write(contents)
+             elif update["action"] == "delete-path":
+                 path = get_full_path(update["path"])
++                self.log(
++                    logging.DEBUG,
++                    "vendor",
++                    {"path": path},
++                    "Performing delete-path action path: {path}",
++                )
+                 mozfile.remove(path)
+             elif update["action"] == "run-script":
+                 script = get_full_path(update["script"], support_cwd=True)
+                 run_dir = get_full_path(update["cwd"])
++                self.log(
++                    logging.DEBUG,
++                    "vendor",
++                    {"script": script, "run_dir": run_dir},
++                    "Performing run-script action script: {script} working dir: {run_dir}",
++                )
+                 self.run_process(
+                     args=[script], cwd=run_dir, log_name=script,
+                 )
+             else:
+                 assert False, "Unknown action supplied (how did this pass validation?)"

+ 27 - 0
mozilla-release/patches/series

@@ -6872,3 +6872,30 @@ TOP-NOBUG-REGEXP-45-final-25318.patch
 TOP-NOBUG-REGEXP-46-fixes-25318.patch
 WIP-NOBUG-seamonkey-credits.patch
 1897801-about-seamonkey-mozilla-25319.patch
+1519358-66a1.patch
+1561102-1-69a1.patch
+1561102-2-69a1.patch
+1540655-01-70a1.patch
+1582114-71a1.patch
+1630047-77a1.patch
+1630668-77a1.patch
+1632688-77a1.patch
+1634391-2-78a1.patch
+1637845-01-79a1.patch
+1637845-02-79a1.patch
+1637845-03-79a1.patch
+1637845-04-79a1.patch
+1637845-05-79a1.patch
+1637845-06-79a1.patch
+1637845-07-79a1.patch
+1637845-08-79a1.patch
+1637845-09-79a1.patch
+1637845-10-79a1.patch
+1637845-11-79a1.patch
+1637845-12-79a1.patch
+1637845-13-79a1.patch
+1637845-14-79a1.patch
+1637845-15-79a1.patch
+1637845-16-79a1.patch
+1637845-17-79a1.patch
+1637845-18-79a1.patch