# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # # This module needs to stay Python 2 and 3 compatible # import contextlib import os import platform import shutil import sys import tempfile import time from subprocess import PIPE, Popen
import mozlog import requests import yaml from requests.exceptions import ConnectionError from requests.packages.urllib3.util.retry import Retry
class BridgeLogger: def __init__(self, logger):
self.logger = logger
def _find(self, text, *names): # structlog's ConsoleRenderer pads values for name in names: if name + " " * STRUCTLOG_PAD_SIZE in text: returnTrue returnFalse
def get_logger(): global logger if logger isnotNone: return logger
new_logger = mozlog.get_default_logger("condprof") if new_logger isNone:
new_logger = mozlog.unstructured.getLogger("condprof")
# wrap the logger into the BridgeLogger
new_logger = BridgeLogger(new_logger)
# bridge for Arsenic if sys.version_info.major == 3: try: from arsenic import connection from structlog import wrap_logger
connection.log = wrap_logger(new_logger) except ImportError: # Arsenic is not installed for client-only usage pass
logger = new_logger return logger
# initializing the logger right away
get_logger()
def fresh_profile(profile, customization_data): from mozprofile import create_profile # NOQA
# XXX on android we mgiht need to run it on the device?
logger.info("Creating a fresh profile")
new_profile = create_profile(app="firefox")
prefs = customization_data["prefs"]
prefs.update(DEFAULT_PREFS)
logger.info("Setting prefs %s" % str(prefs.items()))
new_profile.set_preferences(prefs)
extensions = [] for name, url in customization_data["addons"].items():
logger.info("Downloading addon %s" % name) # When running on the CI, we expect the xpi files to have been # fetched by the firefox-addons fetch task dependency (see # taskcluster/kinds/fetch/browsertime.yml) and the condprof-addons # linter enforces the content of the archive to be unpacked into # a subdirectory named "firefox-addons".
extension = download_file(url, mozfetches_subdir="firefox-addons")
extensions.append(extension)
logger.info("Installing addons")
new_profile.addons.install(extensions)
shutil.copytree(new_profile.profile, profile) return profile
def get_firefox_download_link(): try: from bs4 import BeautifulSoup except ImportError: raise ImportError("You need to run pip install beautifulsoup4") if platform.system() == "Darwin":
extension = ".dmg" elif platform.system() == "Linux":
arch = platform.machine()
extension = ".linux-%s.tar.xz" % arch else: raise NotImplementedError(platform.system())
page = requests.get(link).text
soup = BeautifulSoup(page, "html.parser") for node in soup.find_all("a", href=True):
href = node["href"] if href.endswith(extension): return"https://ftp.mozilla.org" + href
raise Exception()
def check_exists(archive, server=None, all_types=False): if server isnotNone:
archive = server + "/" + archive try:
logger.info("Getting headers at %s" % archive)
resp = requests.head(archive, timeout=DOWNLOAD_TIMEOUT) except ConnectionError: returnFalse, {}
if resp.status_code in (302, 303):
logger.info("Redirected") return check_exists(resp.headers["Location"])
# see Bug 1574854 if ( not all_types and resp.status_code == 200 and"text/html"in resp.headers["Content-Type"]
):
logger.info("Got an html page back")
exists = False else:
logger.info("Response code is %d" % resp.status_code)
exists = resp.status_code
return exists, resp.headers
def check_mozfetches_dir(target, mozfetches_subdir):
logger.info("Checking for existence of: %s in MOZ_FETCHES_DIR" % target)
fetches = os.environ.get("MOZ_FETCHES_DIR") if fetches isNone: returnNone
fetches_target = os.path.join(fetches, mozfetches_subdir, target) ifnot os.path.exists(fetches_target): returnNone
logger.info("Already fetched and available in MOZ_FETCHES_DIR: %s" % fetches_target) return fetches_target
def download_file(url, target=None, mozfetches_subdir=None): if target isNone:
target = url.split("/")[-1]
# check if the assets has been fetched through a taskgraph fetch task dependency # and already available in the MOZ_FETCHES_DIR passed as an additional parameter. if mozfetches_subdir isnotNone:
filepath = check_mozfetches_dir(target, mozfetches_subdir) if filepath isnotNone: return filepath
logger.info("Checking for existence of: %s" % target) if os.path.exists(target): # XXX for now, reusing downloads without checking them # when we don't have an .etag file if etag isNoneornot os.path.exists(target + ".etag"):
logger.info("No existing etag downloads.") return target with open(target + ".etag") as f:
current_etag = f.read() if etag == current_etag:
logger.info("Already Downloaded.") # should at least check the size? return target else:
logger.info("Changed!") else:
logger.info("Could not find an existing archive.") # Add some debugging logs for the directory content try:
archivedir = os.path.dirname(target)
logger.info( "Content in cache directory %s: %s"
% (archivedir, os.listdir(archivedir))
) except Exception:
logger.info("Failed to list cache directory contents")
with open(target, "wb") as f: if TASK_CLUSTER: for chunk in req.iter_content(chunk_size=1024): if chunk:
f.write(chunk)
f.flush() else:
iter = req.iter_content(chunk_size=1024) # pylint --py3k W1619
size = total_length / 1024 + 1 for chunk in progress.bar(iter, expected_size=size): if chunk:
f.write(chunk)
f.flush()
if etag isnotNone: with open(target + ".etag", "w") as f:
f.write(etag)
return target
def extract_from_dmg(dmg, target):
mount = tempfile.mkdtemp()
cmd = "hdiutil attach -nobrowse -mountpoint %s %s"
os.system(cmd % (mount, dmg)) try:
found = False for f in os.listdir(mount): ifnot f.endswith(".app"): continue
app = os.path.join(mount, f)
shutil.copytree(app, target)
found = True break finally:
os.system("hdiutil detach " + mount)
shutil.rmtree(mount) ifnot found: raise IOError("No app file found in %s" % dmg)
@contextlib.contextmanager def latest_nightly(binary=None): if binary isNone: # we want to use the latest nightly
nightly_archive = get_firefox_download_link()
logger.info("Downloading %s" % nightly_archive)
target = download_file(nightly_archive) # on macOs we just mount the DMG # XXX replace with extract_from_dmg if platform.system() == "Darwin":
cmd = "hdiutil attach -mountpoint /Volumes/Nightly %s"
os.system(cmd % target)
binary = "/Volumes/Nightly/Firefox Nightly.app/Contents/MacOS/firefox" # on linux we unpack it elif platform.system() == "Linux": # Tar should automatically recognize the compression algo (xz/bzip2)
cmd = "tar -xvf %s" % target
os.system(cmd)
binary = "firefox/firefox"
mounted = True else:
mounted = False try: yield binary finally: # XXX replace with extract_from_dmg if mounted: if platform.system() == "Darwin":
logger.info("Unmounting Firefox")
time.sleep(10)
os.system("hdiutil detach /Volumes/Nightly") elif platform.system() == "Linux": # XXX we should keep it for next time
shutil.rmtree("firefox")
def write_yml_file(yml_file, yml_data):
logger.info("writing %s to %s" % (yml_data, yml_file)) try: with open(yml_file, "w") as outfile:
yaml.dump(yml_data, outfile, default_flow_style=False) except Exception:
logger.error("failed to write yaml file", exc_info=True)
def obfuscate(text): if"CONDPROF_RUNNER"notin os.environ: returnTrue, text
username, password = get_credentials() if username isNone: returnFalse, text if username notin text and password notin text: returnFalse, text
text = text.replace(password, "")
text = text.replace(username, "") returnTrue, text
def obfuscate_file(path): if"CONDPROF_RUNNER"notin os.environ: return with open(path) as f:
data = f.read()
hit, data = obfuscate(data) ifnot hit: return with open(path, "w") as f:
f.write(data)
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.