# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/.
# This action is used to produce test archives. # # Ideally, the data in this file should be defined in moz.build files. # It is defined inline because this was easiest to make test archive # generation faster.
import argparse import itertools import os import sys import time
import buildconfig import mozpack.path as mozpath from manifestparser import TestManifest from mozpack.archive import create_tar_gz_from_files from mozpack.copier import FileRegistry from mozpack.files import ExistingFile, FileFinder from mozpack.manifests import InstallManifest from mozpack.mozjar import JarWriter from reftest import ReftestManifest
# These entries will be used by artifact builds to re-construct an # objdir with the appropriate generated support files.
OBJDIR_TEST_FILES = { "xpcshell": { "source": buildconfig.topobjdir, "base": "_tests/xpcshell", "pattern": "**", "dest": "xpcshell/tests",
}, "mochitest": { "source": buildconfig.topobjdir, "base": "_tests/testing", "pattern": "mochitest/**",
},
}
# "common" is our catch all archive and it ignores things from other archives. # Verify nothing sneaks into ARCHIVE_FILES without a corresponding exclusion # rule in the "common" archive. for k, v in ARCHIVE_FILES.items(): # Skip mozharness because it isn't staged. if k in ("common", "mozharness"): continue
ignores = set(
itertools.chain(*(e.get("ignore", []) for e in ARCHIVE_FILES["common"]))
)
ifnot any(p.startswith("%s/" % k) for p in ignores): raise Exception('"common" ignore list probably should contain %s' % k)
def find_generated_harness_files(): # TEST_HARNESS_FILES end up in an install manifest at # $topsrcdir/_build_manifests/install/_tests.
manifest = InstallManifest(
mozpath.join(buildconfig.topobjdir, "_build_manifests", "install", "_tests")
)
registry = FileRegistry()
manifest.populate_registry(registry) # Conveniently, the generated files we care about will already # exist in the objdir, so we can identify relevant files if # they're an `ExistingFile` instance. return [
mozpath.join("_tests", p) for p in registry.paths() if isinstance(registry[p], ExistingFile)
]
if archive == "common": # Construct entries ensuring all our generated harness files are # packaged in the common tests archive.
packaged_paths = set() for entry in OBJDIR_TEST_FILES.values():
pat = mozpath.join(entry["base"], entry["pattern"]) del entry["pattern"]
patterns = [] for path in generated_harness_files: if mozpath.match(path, pat):
patterns.append(path[len(entry["base"]) + 1 :])
packaged_paths.add(path) if patterns:
entry["patterns"] = patterns
extra_entries.append(entry)
entry = {"source": buildconfig.topobjdir, "base": "_tests", "patterns": []} for path in set(generated_harness_files) - packaged_paths:
entry["patterns"].append(path[len("_tests") + 1 :])
extra_entries.append(entry)
for entry in ARCHIVE_FILES[archive] + extra_entries:
source = entry["source"]
dest = entry.get("dest")
base = entry.get("base", "")
pattern = entry.get("pattern")
patterns = entry.get("patterns", []) if pattern:
patterns.append(pattern)
manifest = entry.get("manifest")
manifests = entry.get("manifests", []) if manifest:
manifests.append(manifest) if manifests:
dirs = find_manifest_dirs(os.path.join(source, base), manifests)
patterns.extend({"{}/**".format(d) for d in dirs})
if archive notin ("common", "updater-dep") and base.startswith("_tests"): # We may have generated_harness_files to exclude from this entry. for path in generated_harness_files: if path.startswith(base):
ignore.append(path[len(base) + 1 :])
finder = FileFinder(os.path.join(source, base), **common_kwargs)
for pattern in patterns: for p, f in finder.find(pattern): if dest:
p = mozpath.join(dest, p) yield p, f
def find_manifest_dirs(topsrcdir, manifests): """Routine to retrieve directories specified in a manifest, relative to topsrcdir.
It does not recurse into manifests, as we currently have no need for that. """
dirs = set()
for p in manifests:
p = os.path.join(topsrcdir, p)
if p.endswith(".ini") or p.endswith(".toml"):
test_manifest = TestManifest()
test_manifest.read(p)
dirs |= set([os.path.dirname(m) for m in test_manifest.manifests()])
elif p.endswith(".list"):
m = ReftestManifest()
m.load(p)
dirs |= m.dirs
else: raise Exception( '"{}" is not a supported manifest format.'.format(
os.path.splitext(p)[1]
)
)
dirs = {mozpath.normpath(d[len(topsrcdir) :]).lstrip("/") for d in dirs}
# Filter out children captured by parent directories because duplicates # will confuse things later on. def parents(p): whileTrue:
p = mozpath.dirname(p) ifnot p: break yield p
seen = set() for d in sorted(dirs, key=len): ifnot any(p in seen for p in parents(d)):
seen.add(d)
return sorted(seen)
def main(argv):
parser = argparse.ArgumentParser(description="Produce test archives")
parser.add_argument("archive", help="Which archive to generate")
parser.add_argument("outputfile", help="File to write output to")
args = parser.parse_args(argv)
out_file = args.outputfile ifnot out_file.endswith((".tar.gz", ".zip")): raise Exception("expected tar.gz or zip output file")
file_count = 0
t_start = time.monotonic()
ensureParentDir(out_file)
res = find_files(args.archive) with open(out_file, "wb") as fh: # Experimentation revealed that level 5 is significantly faster and has # marginally larger sizes than higher values and is the sweet spot # for optimal compression. Read the detailed commit message that # introduced this for raw numbers. if out_file.endswith(".tar.gz"):
files = dict(res)
create_tar_gz_from_files(fh, files, compresslevel=5)
file_count = len(files) elif out_file.endswith(".zip"): with JarWriter(fileobj=fh, compress_level=5) as writer: for p, f in res:
writer.add(
p.encode("utf-8"), f.read(), mode=f.mode, skip_duplicates=True
)
file_count += 1 else: raise Exception("unhandled file extension: %s" % out_file)
duration = time.monotonic() - t_start
zip_size = os.path.getsize(args.outputfile)
basename = os.path.basename(args.outputfile)
print( "Wrote %d files in %d bytes to %s in %.2fs"
% (file_count, zip_size, basename, duration)
)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
Messung V0.5
¤ Dauer der Verarbeitung: 0.3 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.