def get_triggers(event): # Set some variables that we use to get the commits on the current branch
ref_prefix = "refs/heads/"
is_pr = "pull_request"in event
branch = None ifnot is_pr and"ref"in event:
branch = event["ref"] if branch.startswith(ref_prefix):
branch = branch[len(ref_prefix):]
return is_pr, branch
def fetch_event_data(queue): try:
task_id = os.environ["TASK_ID"] except KeyError:
logger.warning("Missing TASK_ID environment variable") # For example under local testing returnNone
def filter_triggers(event, all_tasks):
is_pr, branch = get_triggers(event)
triggered = OrderedDict() for name, task in all_tasks.items(): if"trigger"in task: if is_pr and"pull-request"in task["trigger"]:
triggered[name] = task elif branch isnotNoneand"branch"in task["trigger"]: for trigger_branch in task["trigger"]["branch"]: if (trigger_branch == branch or
trigger_branch.endswith("*") and branch.startswith(trigger_branch[:-1])):
triggered[name] = task
logger.info("Triggers match tasks:\n * %s" % "\n * ".join(triggered.keys())) return triggered
def get_run_jobs(event): from tools.ci import jobs
revish = "%s..%s" % (event["pull_request"]["base"]["sha"] if"pull_request"in event else event["before"],
event["pull_request"]["head"]["sha"] if"pull_request"in event else event["after"])
logger.info("Looking for changes in range %s" % revish)
paths = jobs.get_paths(revish=revish)
logger.info("Found changes in paths:%s" % "\n".join(paths))
path_jobs = jobs.get_jobs(paths)
all_jobs = path_jobs | get_extra_jobs(event)
logger.info("Including jobs:\n * %s" % "\n * ".join(all_jobs)) return all_jobs
def get_extra_jobs(event):
body = None
jobs = set() if"commits"in event and event["commits"]:
body = event["commits"][0]["message"] elif"pull_request"in event:
body = event["pull_request"]["body"]
ifnot body: return jobs
regexp = re.compile(r"\s*tc-jobs:(.*)$")
for line in body.splitlines():
m = regexp.match(line) if m:
items = m.group(1) for item in items.split(","):
jobs.add(item.strip()) break return jobs
def filter_excluded_users(tasks, event): # Some users' pull requests are excluded from tasks, # such as pull requests from automated exports. try:
submitter = event["pull_request"]["user"]["login"] except KeyError: # Just ignore excluded users if the # username cannot be pulled from the event.
logger.debug("Unable to read username from event. Continuing.") return
excluded_tasks = [] # A separate list of items for tasks is needed to iterate over # because removing an item during iteration will raise an error. for name, task in list(tasks.items()): if submitter in task.get("exclude-users", []):
excluded_tasks.append(name)
tasks.pop(name) # removing excluded task if excluded_tasks:
logger.info(
f"Tasks excluded for user {submitter}:\n * " + "\n * ".join(excluded_tasks)
)
def filter_schedule_if(event, tasks):
scheduled = OrderedDict()
run_jobs = None for name, task in tasks.items(): if"schedule-if"in task: if"run-job"in task["schedule-if"]: if run_jobs isNone:
run_jobs = get_run_jobs(event) if"all"in run_jobs or any(item in run_jobs for item in task["schedule-if"]["run-job"]):
scheduled[name] = task else:
scheduled[name] = task
logger.info("Scheduling rules match tasks:\n * %s" % "\n * ".join(scheduled.keys())) return scheduled
def get_fetch_rev(event):
is_pr, _ = get_triggers(event) if is_pr: # Try to get the actual rev so that all non-decision tasks are pinned to that
rv = ["refs/pull/%s/merge" % event["pull_request"]["number"]] # For every PR GitHub maintains a 'head' branch with commits from the # PR, and a 'merge' branch containing a merge commit between the base # branch and the PR. for ref_type in ["head", "merge"]:
ref = "refs/pull/%s/%s" % (event["pull_request"]["number"], ref_type)
sha = None try:
output = subprocess.check_output(["git", "ls-remote", "origin", ref]) except subprocess.CalledProcessError: import traceback
logger.error(traceback.format_exc())
logger.error("Failed to get commit sha1 for %s" % ref) else: ifnot output:
logger.error("Failed to get commit for %s" % ref) else:
sha = output.decode("utf-8").split()[0]
rv.append(sha)
rv = tuple(rv) else: # For a branch push we have a ref and a head but no merge SHA
rv = (event["ref"], event["after"], None) assert len(rv) == 3 return rv
options = task.get("options", {})
options_args = []
options_args.append("--ref=%s" % fetch_ref) if head_sha isnotNone:
options_args.append("--head-rev=%s" % head_sha) if merge_sha isnotNone:
options_args.append("--merge-rev=%s" % merge_sha) if options.get("oom-killer"):
options_args.append("--oom-killer") if options.get("xvfb"):
options_args.append("--xvfb") ifnot options.get("hosts"):
options_args.append("--no-hosts") else:
options_args.append("--hosts") # Check out the expected SHA unless it is overridden (e.g. to base_head). if options.get("checkout"):
options_args.append("--checkout=%s" % options["checkout"]) for browser in options.get("browser", []):
options_args.append("--browser=%s" % browser) if options.get("channel"):
options_args.append("--channel=%s" % options["channel"]) if options.get("install-certificates"):
options_args.append("--install-certificates")
cmd_args["options_str"] = " ".join(str(item) for item in options_args)
install_packages = task.get("install") if install_packages:
install_items = ["apt update -qqy"]
install_items.extend("apt install -qqy %s" % item for item in install_packages)
cmd_args["install_str"] = "\n".join("sudo %s;" % item for item in install_items)
# The string conversion here is because if we use variables they are # converted to a string, so it's easier to use a string always if str(task.get("required", "True")) != "False"and task_name != "sink-task":
sink_task_depends_on.append(task_id)
for task_name, task in tasks.items(): if task_name == "sink-task": # sink-task will be created below at the end of the ordered dict, # so that it can depend on all other tasks. continue
add_task(task_name, task)
# GitHub branch protection for pull requests needs us to name explicit # required tasks - which doesn't suffice when using a dynamic task graph. # To work around this we declare a sink task that depends on all the other # tasks completing, and checks if they have succeeded. We can then # make the sink task the sole required task for pull requests.
sink_task = tasks.get("sink-task") if sink_task:
logger.info("Scheduling sink-task")
sink_task["command"] += " {}".format(" ".join(sink_task_depends_on))
task_id_map["sink-task"] = create_tc_task(
event, sink_task, taskgroup_id, sink_task_depends_on) else:
logger.info("sink-task is not scheduled")
return task_id_map
def create_tasks(queue, task_id_map): for (task_id, task_data) in task_id_map.values():
queue.createTask(task_id, task_data)
def get_event(queue, event_path): if event_path isnotNone: try: with open(event_path) as f:
event_str = f.read() except OSError:
logger.error("Missing event file at path %s" % event_path) raise elif"TASK_EVENT"in os.environ:
event_str = os.environ["TASK_EVENT"] else:
event_str = fetch_event_data(queue) ifnot event_str: raise ValueError("Can't find GitHub event definition; for local testing pass --event-path") try: return json.loads(event_str) except ValueError:
logger.error("Event was not valid JSON") raise
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--event-path",
help="Path to file containing serialized GitHub event")
parser.add_argument("--dry-run", action="store_true",
help="Don't actually create the tasks, just output the tasks that " "would be created")
parser.add_argument("--tasks-path",
help="Path to file in which to write payload for all scheduled tasks") return parser
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.