#!/usr/bin/env python3 # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/.
import asyncio import glob import logging import os import sys import xml.etree.ElementTree as ET from os import path
def remove_cache(): """
Removes all files in the cache folder
We don't support folders or .dot(hidden) files
By not deleting the cache directory, it allows us to use Docker tmpfs mounts,
which are the only workaround to poor mount r/w performance on MacOS
Bug Reference: https://forums.docker.com/t/file-access-in-mounted-volumes-extremely-slow-cpu-bound/8076/288 """
files = glob.glob(f"{UV_CACHE_PATH}/*") for f in files:
os.remove(f)
def _cachepath(i, ext): # Helper function: given an index, return a cache file path return path.join(UV_CACHE_PATH, f"obj_{i:0>5}.{ext}")
async def fetch_url(url, path, connector): """
Fetch/download a file to a specific path
Parameters
----------
url : str
URL to be fetched
path : str
Path to save binary
Returns
-------
dict
Request result. If error result['error'] isTrue """
# Set connection timeout to 15 minutes
timeout = aiohttp.ClientTimeout(total=900)
try:
async with aiohttp.ClientSession(
connector=connector, connector_owner=False, timeout=timeout
) as session:
log.info(f"Retrieving {url}")
async with session.get(
url, headers={"Cache-Control": "max-stale=0"}
) as response: # Any response code > 299 means something went wrong if response.status > 299:
log.info(f"Failed to download {url} with status {response.status}") return _result(response, True)
with open(path, "wb") as fd: whileTrue:
chunk = await response.content.read() ifnot chunk: break
fd.write(chunk)
result = _result(response)
log.info(f'Finished downloading {url}\n{result["headers"]}') return result
results = [] # Remove file if download failed for fetch in downloads: # If there's an error, try to remove the file, but keep going if file not present if fetch["error"]: try:
os.unlink(fetch.get("path", None)) except (TypeError, FileNotFoundError) as e:
log.info(f"Unable to cleanup error file: {e} continuing...") continue
results.append(fetch)
return results
async def download_builds(verifyConfig): """
Given UpdateVerifyConfig, download and cache all necessary updater files
Include "to"and"from"/"updater_pacakge"
Returns
-------
list : List of file paths and urls to each updater file """
updaterUrls = set() for release in verifyConfig.releases:
ftpServerFrom = release["ftp_server_from"]
ftpServerTo = release["ftp_server_to"]
for locale in release["locales"]:
toUri = verifyConfig.to if toUri isnotNoneand ftpServerTo isnotNone:
toUri = toUri.replace("%locale%", locale)
updaterUrls.add(f"{ftpServerTo}{toUri}")
for reference in ("updater_package", "from"):
uri = release.get(reference, None) if uri isNone: continue
uri = uri.replace("%locale%", locale) # /ja-JP-mac/ locale is replaced with /ja/ for updater packages
uri = uri.replace("ja-JP-mac", "ja")
updaterUrls.add(f"{ftpServerFrom}{uri}")
log.info(f"About to download {len(updaterUrls)} updater packages")
for release in verifyConfig.releases: for locale in release["locales"]:
xmlUrls.add(
urlTemplate.format(
server=AUS_SERVER,
product=product,
release=release["release"],
build=release["build_id"],
platform=release["platform"],
locale=locale,
channel=verifyConfig.channel,
)
)
# Rename files and add to cache_links for download in downloadList:
cacheLinks.append(download["url"])
fileIndex = len(cacheLinks)
os.rename(download["path"], _cachepath(fileIndex, "cache"))
cacheIndexPath = path.join(UV_CACHE_PATH, "urls.list") with open(cacheIndexPath, "w") as cache:
cache.writelines(f"{l}\n"for l in cacheLinks)
# Log cache
log.info("Cache index urls.list contents:") with open(cacheIndexPath, "r") as cache: for ln, url in enumerate(cache.readlines()):
line = url.replace("\n", "")
log.info(f"Line {ln+1}: {line}")
returnNone
def download_from_config(verifyConfig): """
Given an UpdateVerifyConfig object, download all necessary files to cache
(sync function that calls the async one)
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.