val selected_sessions =
full_sessions.selection(Selection(sessions = session :: session_ancestor.toList)) val info = selected_sessions(session) val ancestor = session_ancestor orElse info.parent
val (session1, infos1) = if (session_requirements && ancestor.isDefined) { val deps = Sessions.deps(selected_sessions, progress = progress) val base = deps(session)
val ancestor_loaded =
deps.get(ancestor.get) match { case Some(ancestor_base) if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) =>
ancestor_base.loaded_theories.defined _ case _ =>
error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session))
}
val required_theories = for {
thy <- base.loaded_theories.keys if !ancestor_loaded(thy) && selected_sessions.theory_qualifier(thy) != session
} yield thy
if (required_theories.isEmpty) (ancestor.get, Nil) else { val other_name = info.name + "_requirements(" + ancestor.get + ")"
Isabelle_System.isabelle_tmp_prefix()
val session_entry =
Session_Entry(
pos = info.pos,
name = other_name,
groups = info.groups,
parent = ancestor,
description = "Required theory imports from other sessions",
imports = info.deps,
theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))))
val session_info =
Info.make(session_entry,
dir = Path.explode("$ISABELLE_TMP_PREFIX"),
options = info.options,
chapter = info.chapter)
def check_errors: Background = if (errors.isEmpty) this else error(cat_lines(errors))
}
/* source dependencies */
object Deps { def load(sessions_structure: Structure,
progress: Progress = new Progress,
inlined_files: Boolean = false,
list_files: Boolean = false
): Deps = { var cache_sources = Map.empty[JFile, SHA1.Digest] def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = { for {
path <- paths
file = path.file if cache_sources.isDefinedAt(file) || file.isFile
} yield {
cache_sources.get(file) match { case Some(digest) => (path, digest) case None => val digest = SHA1.digest(file)
cache_sources = cache_sources + (file -> digest)
(path, digest)
}
}
}
val session_bases =
sessions_structure.imports_topological_order.foldLeft(Map(Base.bootstrap.session_entry)) { case (session_bases, session_name) =>
progress.expose_interrupt()
val info = sessions_structure(session_name) try { val deps_base = info.deps_base(session_bases) val session_background =
Background(base = deps_base, sessions_structure = sessions_structure) val resources = new Resources(session_background)
val required_sessions =
dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory))
.map(theory => sessions_structure.theory_qualifier(theory))
.filter(name => name != info.name && sessions_structure.defined(name))
val required_subgraph =
sessions_structure.imports_graph
.restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet)
.transitive_closure
.restrict(required_sessions.toSet)
.transitive_reduction_acyclic
val graph0 =
required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) { case (g, session) => val a = session_node(session) val bs = required_subgraph.imm_preds(session).toList.map(session_node)
bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
}
dependencies.entries.foldLeft(graph0) { case (g, entry) => val a = node(entry.name) val bs = entry.header.imports.map(node).filterNot(_ == a)
bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a))
}
}
val known_loaded_files = deps_base.known_loaded_files ++ loaded_files
val import_errors = { val known_sessions =
sessions_structure.imports_requirements(List(session_name)).toSet for {
name <- dependencies.theories
qualifier = sessions_structure.theory_qualifier(name) if !known_sessions(qualifier)
} yield"Bad import of theory " + quote(name.toString) + ": need to include sessions " + quote(qualifier) + " in ROOT"
}
val document_errors =
info.document_theories.flatMap(
{ case (thy, pos) => val build_hierarchy = if (sessions_structure.build_graph.defined(session_name)) {
sessions_structure.build_hierarchy(session_name)
} else Nil
known_theories.get(thy).map(_.name) match { case None => err("Unknown document theory") case Some(name) => val qualifier = sessions_structure.theory_qualifier(name) if (proper_session_theories.contains(name)) {
err("Redundant document theory from this session:")
} elseif (
!build_hierarchy.contains(qualifier) &&
!dependencies.theories.contains(name)
) {
err("Document theory from other session not imported properly:")
} else None
}
}) val document_theories =
info.document_theories.map({ case (thy, _) => known_theories(thy).name })
val dir_errors = { val ok = info.dirs.map(_.canonical_file).toSet val bad =
(for {
name <- proper_session_theories.iterator
path = Path.explode(name.master_dir) if !ok(path.canonical_file)
path1 = File.perhaps_relative_path(info.dir.canonical, path)
} yield (path1, name)).toList val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted
val errs1 = for { (path1, name) <- bad } yield"Implicit use of directory " + path1 + " for theory " + quote(name.toString) val errs2 = if (bad_dirs.isEmpty) Nil else List("Implicit use of session directories: " + commas(bad_dirs)) val errs3 = for (p <- info.dirs if !p.is_dir) yield"No such directory: " + p val errs4 =
(for {
name <- proper_session_theories.iterator
name1 <- resources.find_theory_node(name.theory) if name.node != name1.node
} yield { "Incoherent theory file import:\n " + quote(name.node) + " vs. \n " + quote(name1.node)
}).toList
errs1 ::: errs2 ::: errs3 ::: errs4
}
val sources_errors = for (p <- session_files if !p.is_file) yield"No such file: " + p
val path_errors = try { Path.check_case_insensitive(session_files ::: imported_files); Nil } catch { case ERROR(msg) => List(msg) }
val theories =
entry.theories.map({ case (opts, thys) =>
(session_options ++ opts,
thys.map({ case ((thy, pos), _) => val thy_name = Thy_Header.import_name(thy) if (illegal_theory(thy_name)) {
error("Illegal theory name " + quote(thy_name) + Position.here(pos))
} else (thy, pos) })) })
val global_theories = for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } yield { val thy_name = Path.explode(thy).file_name if (Long_Name.is_qualified(thy_name)) {
error("Bad qualified name for global theory " +
quote(thy_name) + Position.here(pos))
} else thy_name
}
val conditions = Conditions.make(theories.iterator.map(_._1)).toList
val document_files =
entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) })
val export_files =
entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) })
def + (entry: Chapter_Def): Chapter_Defs =
get(entry.name) match { case None => new Chapter_Defs(entry :: rev_list) case Some(old_entry) =>
error("Duplicate chapter definition " + quote(entry.name) +
Position.here(old_entry.pos) + Position.here(entry.pos))
}
}
privateobject Parsers extends Options.Parsers { privateval groups: Parser[List[String]] =
($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)
privateval description: Parser[String] =
($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")
privateval chapter_def: Parser[Chapter_Def] =
command(CHAPTER_DEFINITION) ~! (position(chapter_name) ~ groups ~ description) ^^
{ case _ ~ ((a, pos) ~ b ~ c) => Chapter_Def(pos, a, b, c) }
privateval chapter_entry: Parser[Chapter_Entry] =
command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter_Entry(a) }
privateval export_files_args: Parser[(String, Int, List[String])] =
in_path_parens("export") ~ prune ~ rep1(embedded) ^^ { case x ~ y ~ z => (x, y, z) }
val document_theories =
$$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x }
val document_files =
$$$(DOCUMENT_FILES) ~! (in_path_parens("document") ~ rep1(path)) ^^
{ case _ ~ (x ~ y) => y.map((x, _)) }
val export_files =
$$$(EXPORT_FILES) ~! export_files_args ^^ { case _ ~ x => x }
val export_classpath =
$$$(EXPORT_CLASSPATH) ~! (rep1(embedded) | success(List("*:classpath/*.jar"))) ^^
{ case _ ~ x => x }
command(SESSION) ~!
(position(session_name) ~ groups ~ in_path(".") ~
($$$("=") ~!
(opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ description ~
(($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~
(($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~
rep(theories) ~
(opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~
(rep(document_files) ^^ (x => x.flatten)) ~
rep(export_files) ~
opt(export_classpath)))) ^^
{ case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) =>
Session_Entry(pos = pos, name = a, groups = b, path = c, parent = d, description = e,
options = f, imports = g, directories = h, theories = i, document_theories = j,
document_files = k, export_files = l, export_classpath = m.getOrElse(Nil)) }
}
def parse_root(path: Path): List[Entry] = { val toks = Token.explode(root_syntax.keywords, File.read(path)) val start = Token.Pos.file(path.implode) val parser: Parser[Entry] = chapter_def | chapter_entry | session_entry
parse_all(rep(parser), Token.reader(toks, start)) match { case Success(result, _) => result case bad => error(bad.toString)
}
}
def parse_exports(str: String, start: Token.Pos): (String, Int, List[String]) = { val toks = Token.explode(root_syntax.keywords, str)
parse_all(export_files_args, Token.reader(toks, start)) match { case Success(result, _) => result case bad => error(bad.toString)
}
}
}
def load_root(select: Boolean, dir: Path): List[Root_File] = { val root = dir + ROOT if (root.is_file) List(Root_File(root, select)) else Nil
}
def load_roots(select: Boolean, dir: Path): List[Root_File] = { val roots = dir + ROOTS if (roots.is_file) { for {
entry <- parse_roots(roots)
dir1 = try { check_session_dir(dir + Path.explode(entry)) } catch { case ERROR(msg) =>
error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString)
}
res <- load_dir(select, dir1)
} yield res
} else Nil
}
val raw_roots: List[Root_File] = for {
(select, dir) <- directories(dirs, select_dirs)
root <- load_dir(select, check_session_dir(dir))
} yield root
var next_root = 0 var seen_roots = Map.empty[JFile, (Root_File, Int)] for (root <- raw_roots) {
seen_roots.get(root.key) match { case None =>
seen_roots += (root.key -> (root, next_root))
next_root += 1 case Some((root0, next0)) => val root1 = root0.copy(select = root0.select || root.select)
seen_roots += (root0.key -> (root1, next0))
}
}
seen_roots.valuesIterator.toList.sortBy(_._2).map(_._1)
}
/* Isabelle tool wrapper */
val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions",
Scala_Project.here,
{ args => var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var requirements = false var exclude_session_groups: List[String] = Nil var all_sessions = false var build_graph = false var dirs: List[Path] = Nil var session_groups: List[String] = Nil var exclude_sessions: List[String] = Nil
val getopts = Getopts("""
Usage: isabelle sessions [OPTIONS] [SESSIONS ...]
Options are:
-B NAME include session NAME and all descendants
-D DIR include session directory and select its sessions
-R refer to requirements of selected sessions
-X NAME exclude sessions from group NAME and all descendants
-a select all sessions
-b follow session build dependencies (default: source imports)
-d DIR include session directory
-g NAME select session group NAME
-x NAME exclude session NAME and all descendants
val order = if (build_graph) sessions_structure.build_topological_order else sessions_structure.imports_topological_order for (name <- order) Output.writeln(name, stdout = true)
})
}
¤ Dauer der Verarbeitung: 0.15 Sekunden
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.