From 9603d2c164b57213fee7c60354bf270fab6bab7f Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Mon, 27 May 2019 17:44:52 -0400
Subject: [PATCH] multind.sty
Goal: Axiom function reference guide
This is used for multiple index tables in a book
---
books/multind.sty | 65 +
changelog | 2 +
patch | 3613 +++++++++++++++++++++++++++++++++++++++-
src/axiom-website/patches.html | 2 +
4 files changed, 3678 insertions(+), 4 deletions(-)
create mode 100644 books/multind.sty
diff --git a/books/multind.sty b/books/multind.sty
new file mode 100644
index 0000000..ef91c28
--- /dev/null
+++ b/books/multind.sty
@@ -0,0 +1,65 @@
+% indexes document style option for producing multiple indexes
+% for use with the modified bbok style, CHbook.sty
+% Written by F.W. Long, Version 1.1, 12 August 1991.
+
+% Modified by F.W. Long, Version 1.1a, 29 August 1991
+% to get the index heading correctly spaced.
+
+% Modified by F.W. Long, Version 1.1b, 31 August 1991
+% to remove the abbreviation \ix (which should be in the document, not here).
+
+% Modified \makeindex and \index commands to allow multiple indexes
+% in both cases the first parameter is the index name.
+% They now work more like \@starttoc and \addcontentsline.
+% \index is no longer defined inside \makeindex but determines
+% whether the appropriate file is defined before writing to it.
+
+\def\makeindex#1{\begingroup
+ \makeatletter
+ \if@filesw \expandafter\newwrite\csname #1@idxfile\endcsname
+ \expandafter\immediate\openout \csname #1@idxfile\endcsname #1.idx\relax
+ \typeout{Writing index file #1.idx }\fi \endgroup}
+
+\def\index#1{\@bsphack\begingroup
+ \def\protect##1{\string##1\space}\@sanitize
+ \@wrindex{#1}}
+
+% \@wrindex now checks that the appropriate file is defined.
+
+\def\@wrindex#1#2{\let\thepage\relax
+ \xdef\@gtempa{\@ifundefined{#1@idxfile}{}{\expandafter
+ \write\csname #1@idxfile\endcsname{\string
+ \indexentry{#2}{\thepage}}}}\endgroup\@gtempa
+ \if@nobreak \ifvmode\nobreak\fi\fi\@esphack}
+
+% Modified \printindex command to allow multiple indexes.
+% This now takes over much of the work of \theindex.
+% Again, the first parameter is the index name.
+% The second parameter is the index title (as printed).
+
+\newif\if@restonecol
+\def\printindex#1#2{\@restonecoltrue\if@twocolumn\@restonecolfalse\fi
+ \columnseprule \z@ \columnsep 35pt
+ \newpage \twocolumn[{\Large\bf #2 \vskip4ex}]
+ \markright{\uppercase{#2}}
+ \addcontentsline{toc}{section}{#2}
+ \@input{#1.ind}}
+
+% The following index commands are taken from book.sty.
+% \theindex is modified to not start a chapter.
+
+\def\theindex{\parindent\z@
+ \parskip\z@ plus .3pt\relax\let\item\@idxitem}
+\def\@idxitem{\par\hangindent 40pt}
+\def\subitem{\par\hangindent 40pt \hspace*{20pt}}
+\def\subsubitem{\par\hangindent 40pt \hspace*{30pt}}
+\def\endtheindex{\if@restonecol\onecolumn\else\clearpage\fi}
+\def\indexspace{\par \vskip 10pt plus 5pt minus 3pt\relax}
+
+% the command \ix allows an abbreviation for the general index
+
+%\def\ix#1{#1\index{general}{#1}}
+
+% define the \see command from makeidx.sty
+
+\def\see#1#2{{\em see\/} #1}
diff --git a/changelog b/changelog
index 6964ba3..f118734 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,5 @@
+20190527 tpd src/axiom-website/patches.html 20190527.01.tpd.patch
+20190527 tpd books/multind.sty multiple index files in a book
20190526 tpd src/axiom-website/patches.html 20190526.01.tpd.patch
20190526 tpd src/axiom-website/index.html add names to credit list
20190526 tpd src/axiom-website/community.html add names to credit list
diff --git a/patch b/patch
index 9c264b5..8f4e9d3 100644
--- a/patch
+++ b/patch
@@ -1,6 +1,3611 @@
-books/Newsletter.September85
+books/bookvolbib added references
-Goal: Axiom History
+Goal: Proving Axiom Sane
-Eugene Surowitz kindly provided a pdf of the final
-version of the Scratchpad September newsletter.
+\index{Abadi, Martin}
+\index{Cardelli, Luca}
+\index{Pierce, Benjamin}
+\index{Plotkin, Gordon}
+\begin{chunk}{axiom.bib}
+@inproceedings{Adad89,
+ author = "Abadi, Martin and Cardelli, Luca and Pierce, Benjamin
+ and Plotkin, Gordon",
+ title = {{Dynamic Typing in a Statically Typed Language}},
+ booktitle = "16th Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "213-227",
+ year = "1989"
+ abstract =
+ "Statically typed programming languages allow earlier error
+ checking, better enforcement of disciplined programming styles,
+ and generation of more efficient object code than languages where
+ all type consistency checks are performed at run time. However,
+ even in statically typed languages, there is often the need to
+ deal with data whose type cannot be determined at compile time. To
+ handle such situations safely, we propose to add a type Dynamic
+ whose values are pairs of a value $v$ and a type tag T where $v$
+ has the type denoted by T. Instances of Dynamic are built with an
+ explicit tagging construct and inspected with a type safe typecase
+ construct.
+
+ This paper explores the syntax, operational semantics, and
+ denotational semantics of a simple language including the type
+ Dynamic. We give examples of how dynamically typed values can be
+ used in programming. Then we discuss an operational semantics for
+ our language and obtain a soundness theorem. We present two
+ formulations of the denotational semantics of this language and
+ relate them to the operational semantics. Finally, we consider the
+ implications of polymorphism and some implementation issues.",
+ paper = "Abad89.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Ait-Kaci, Hassan}
+\begin{chunk}{axiom.bib}
+@book{Aitk99,
+ author = "Ait-Kaci, Hassan",
+ title = {{Warren's Abstract Machine: A Tutorial Reconstruction}},
+ publisher = "MIT Press",
+ isbn = "0-262-51058-8",
+ year = "1999",
+ link = "\url{http://wambook.sourceforge.net/wambook.pdf}",
+ paper = "Aitk99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bagnara, Roberto}
+\index{Bagnara, Abramo}
+\index{Biselli, Fabio}
+\index{Chiari, Michele}
+\index{Gori, Roberta}
+\begin{chunk}{axiom.bib}
+@misc{Bagn19,
+ author = "Bagnara, Roberto and Bagnara, Abramo and Biselli, Fabio
+ and Chiari, Michele and Gori, Roberta",
+ title = {{Correct Approximation of IEEE 754 Floating-Point
+ Arithmetic for Program Verification}},
+ year = "2019",
+ link = "\url{https://arxiv.org/abs/1903.06119}",
+ abstract =
+ "Verification of programs using floating-point arithmetic is
+ challenging on several accounts. One of the difficulties of
+ reasoning about such programs is due to the peculiarities of
+ floating-point arithmetic: rounding errors, infinities,
+ non-numeric objects (NaNs), signed zeros, denormal numbers,
+ different rounding modes... One possibility to reason about
+ floating-point arithmetic is to model a program computation path
+ by means of a set of ternary constraints of the form $z=x op y$
+ and use constraint propagation techniques to infer new information
+ on the variables' possible values. In this setting, we define and
+ prove the correctness of algorithms to precisely bound the value
+ of one of the variables $x$, $y$, or $z$, starting from the bounds
+ known for the other two. We do this for each of the operations and
+ for each rounding mode defined by the IEEE 754 binary
+ floating-point standard, even in the case the rounding mode in
+ effect is only partially known. This is the first time that such
+ so-called filtering algorithms are defined and their correctness
+ is formally proved. This is an important slab for paving the way
+ to formal verification of programs that use floating-point
+ arithmetics.",
+ paper = "Bagn19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bakel, Steffan van}
+\begin{chunk}{axiom.bib}
+@article{Bake93,
+ author = "Bakel, Steffan van",
+ title = {{Principal Type Schemes for the Strict Type Assignment System}},
+ journal = "J. Logic and Computation",
+ volume = "3",
+ number = "6",
+ pages = "643-670",
+ year = "1993",
+ abstract =
+ "We study the strict type assignment system, a restriction on the
+ intersection type discipline and prove that it has the principal
+ type property. W define, for a term $M$, the principal pair (of
+ basis and type). We specify three operations on pairs, and prove
+ that all pairs deducible for $M$ can be obtained from the
+ principal one by these operations, and that these map deducible
+ pairs to deducible pairs.",
+ paper = "Bake93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+@inbook{Boye72,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{The Sharing of Structure in Theorem Proving Programs}},
+ booktitle = "Machine Intelligence 7",
+ pages = "110-116",
+ year = "1972",
+ abstract =
+ "We describe how clauses in resolution programs can be represented
+ and used without applying substitutions or cons-ing lists of
+ literals. The amount of space required by our representation of a
+ clause is independent of the number of literals in the clause and
+ the depth of function nesting. We introduce the concept of the
+ value of an expression in a binding environment which we use to
+ standardize clauses apart and share the structure of parents in
+ representing the resolvent. We present unification and resolution
+ algorithms for our representation. Some data comparing our
+ representation to more conventional ones is given.",
+ paper = "Boye72.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Clark, K.L.}
+\index{Tarnlund, S.-A.}
+\begin{chunk}{axiom.bib}
+@book{Clar82,
+ author = "Clark, K.L. and Tarnlund, S.-A.",
+ title = {{Logic Programming}},
+ publisher = "Academic Press",
+ year = "1982",
+ isbn = "0-12-175520-7"
+}
+
+\end{chunk}
+
+\index{Daly, Timothy}
+\begin{chunk}{axiom.bib}
+@misc{Daly18a,
+ author = "Daly, Timothy",
+ title = {{Proving Axiom Sane Talk}},
+ comment = "International Conference on Mathematical Software",
+ journal = "LNCS",
+ volume = "10931",
+ year = "2018",
+ paper = "Daly18a.pdf"
+}
+
+\index{Dzamonja, Mirna}
+\begin{chunk}{axiom.bib}
+@misc{Dzam18,
+ author = "Dzamonja, Mirna",
+ title =
+ {{A New Foundational Crisis in Mathematics, Is it really happening?}},
+ link = "\url{https://arxiv.org/pdf/1802.06221.pdf}",
+ year = "2018",
+ abstract =
+ "The article reconsiders the position of the foundations of
+ mathematics after the discovery of HoTT. Discussion that this
+ discovery has generated in the community of mathematicians,
+ philosophers and computer scientists might indicate a new crisis
+ in the foundation of mathematics. By examining the mathematical
+ facts behind HoTT and their relation with the existing
+ foundations, we conclude that the present crisis is not one. We
+ reiterate a pluralist vision of the foundations of mathematics.
+
+ The article contains a short survey of the mathematical and
+ historical background needed to understand the main tenets of the
+ fundational issues.",
+ paper = "Dzam18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Giannini, Paola}
+\begin{chunk}{axiom.bib}
+@techreport{Gian85,
+ author = "Giannini, Paola",
+ title = {{Type Checking and Type Deduction Techniques for
+ Polymorphic Programming Languages}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMU-CS-85-187",
+ year = "1985",
+ abstract =
+ "In this paper we present some of the syntactic issues that arise
+ in polymorphic programming languages. In particular we examine
+ type checking and deduction in two different polymorphic type
+ strucutres: the parametric lambda-calculus (with let construct)
+ and the polymorphic or second-order lambda-calculus. In both
+ approaches the behavior of types is formalized with type inference
+ rules. Examples of programming languages following those
+ approaches are presented and some of their specific problems
+ studied.",
+ paper = "Gian85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@article{Harp92,
+ author = "Harper, Robert",
+ title = {{Constructing Type Systems over an Operational Semantics}},
+ journal = "J. Symbolic Computation",
+ volume = "14",
+ pages = "71-84",
+ year = "1992",
+ abstract =
+ "Type theories in the sense of Martin-Lof and the NuPRL system are
+ based on taking as primitive a type-free programming language
+ given by an operational semantics, and defining types as partial
+ equivalence relations on the set of closed terms. The construction
+ of a type system is based on a general form of inductive
+ definition that may either be taken as acceptable in its own
+ right, or further explicated in terms of other patterns of
+ induction. One suc account, based on a general theory of
+ inductively defined relations, was given by Allen. An alternative
+ account, based on an essentially set theoretic argument, is
+ presented.",
+ paper = "Harp92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@misc{lion137,
+ author = "Unknown",
+ title = {{Thoughts in Free Time}},
+ link = "\url{https://lion137.blogspot.com/2019/02/fundamental-algorithms-polyomial-gcd.html}",
+ comment = "\url{https://github.com/lion137/Fundamental_Algorithsms}",
+ year = "2019"
+}
+
+\end{chunk}
+
+\index{Norrish, Michael}
+\index{Slind, Konrad}
+\begin{chunk}{axiom.bib}
+@article{Norr02,
+ author = "Norrish, Michael and Slind, Konrad",
+ title = {{A Thread of HOL Development}},
+ journal = "Computer Journal",
+ volume = "45",
+ number = "1",
+ pages = "37-45",
+ year = "2002",
+ abstract =
+ "The HOL system is a mechanized proof assistant for higher order
+ logic that has been under continuous development since the
+ mid-1980s, by an ever-changing group of developers and external
+ contributors. we give a brief overview of various implementations
+ of the HOL logic before focusing on the evolution of certain
+ important features available in a recent implementation. We also
+ illustrate how the module system of Standard ML provided security
+ and modularity in the construction of the HOL kernel, as well as
+ serving in a separate capacity as a useful representation medium
+ for persistent, hierarchical logical theories.",
+ paper = "Norr02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@inbook{Paul90b,
+ author = "Paulson, Lawrence C.",
+ title = {{Designing a Theorem Prover}},
+ booktitle = "Handbook of Logic in Computer Science, Volume 2",
+ publisher = "Oxford University Press",
+ pages = "415-475",
+ year = "1992",
+ paper = "Paul90b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Robinson, J.A.}
+\index{Sibert, E.E.}
+\begin{chunk}{axiom.bib}
+@techreport{Robi80,
+ author = "Robinson, J.A. and Sibert, E.E.",
+ title = {{Loglisp: An Alternative to Prolog}},
+ type = "technical report",
+ institution = "University of Syracuse",
+ number = "80-7",
+ year = "1980",
+ paper = "Robi80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Demers, Alan}
+\index{Donahue, James}
+\begin{chunk}{axiom.bib}
+@inproceedings{Deme80,
+ author = "Demers, Alan and Donahue, James",
+ title = {{Type Completeness as a Language Principle}},
+ booktitle = "POPL 80",
+ publisher = "ACM",
+ pages = "234-244",
+ year = "1980",
+ abstract =
+ "The problem of Von Neumann languages is that their changeable
+ parts have so little expressive power -- John Backus",
+ paper = "Deme80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Jammer, Max}
+\begin{chunk}{axiom.bib}
+@book{Jamm66,
+ author = "Jammer, Max",
+ title = {{The Conceptual Development of Quantum Mechanics}},
+ year = "1996",
+ publisher = "McGraw-Hill"
+}
+
+\end{chunk}
+
+\index{Mackie, Ian}
+\index{Pinto, Jorge Sousa}
+\begin{chunk}{axiom.bib}
+@article{Mack02,
+ author = "Mackie, Ian and Pinto, Jorge Sousa",
+ title = {{Encoding Linear Logic with Interaction Combinators}},
+ journal = "Information and Computation",
+ volume = "176",
+ pages = "153-186",
+ year = "2002",
+ abstract =
+ "The purpose of this paper is to demonstrate how Lafont's
+ interaction combinators, a system of three symbols and six
+ interaction rules, can be used to encode linear
+ logic. Specifically, we give a translation of the multiplicative,
+ exponential, and additive fragments of linear logic together with
+ a strategy for cut-elimination which can be faithfully
+ simulated. Finally, we show briefly how this encoding can be used
+ for evaluating $\lambda$-terms. In addition to offering a very
+ simple, perhaps the simplest, system of rewriting for linear logic
+ and the $\lambda$-calculus, the interaction net implementation
+ that we present has been shown by experimental testing to offer a
+ good level of sharing in terms of the number of cut-elimination
+ steps (resp. $\beta$-reduction steps). In particular it performs
+ better than all extant finite systems of interaction nets.",
+ paper = "Mack02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@inproceedings{Oisd18,
+ author = "Anonymous",
+ title = {{Solving Rings in Agda}},
+ booktitle = "Proc. ACM Program. Lang.",
+ publisher = "ACM",
+ year = "2018",
+ abstract =
+ "We present a new library which automates the construction of
+ equivalence proofs between polynomials over commutative rings and
+ semirings in the programming language Agda [Norell and Chapman
+ 2008]. It is significantly faster than Agda's existing solver. We
+ use reflection to provide a simple interface to the solver, and
+ demonstrate how to use the constructed proofs to provide
+ step-by-step solutions.",
+ paper = "Oisd18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Harvey, David}
+\index{van der Hoeven, Joris}
+\begin{chunk}{axiom.bib}
+@misc{Harv19,
+ author = "Harvey, David and van der Hoeven, Joris",
+ title = {{Integer Multiplication in Time O(n log n)}},
+ link = "\url{https://hal.archives-ouvertes.fr/hal.02070778/document}",
+ year = "2019",
+ abstract =
+ "We present an algorithm that computes the product of two
+ n-bit intgers in O(n log n) bit operations"
+ paper = "Harv19.pdf"
+}
+
+\end{chunk}
+
+\index{Dunfield, Joshua}
+\index{Krishnaswami, Neelakantan R.}
+\begin{chunk}{axiom.bib}
+@misc{Dunf13,
+ author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
+ title = {{Complete and Easy Bidirectional Typechecking for Higher-Rank
+ Polymorphism}},
+ link = "\url{https://arxiv.org/pdf/1306.6032.pdf}",
+ year = "2013",
+ abstract =
+ "Bidirectional typechecking, in which terms either synthesize a
+ type or are checked against a known type, has become popular for
+ its scalability (unlike Damas-Milner type inference, bidirectional
+ typing remains decidable even for very expressive type systems),
+ its error reporting, and its relative ease of
+ implementation. Following design principles from proof theory,
+ bidirectional typing can be applied to many type constructs. The
+ principles underlying a bidirectional approach to polymorphism,
+ however, are less obvious. We give a declarative, bidirectional
+ account of higher-rank polymorphism, grounded in proof theory;
+ this calculus enjoys many properties such as $\eta$-reduction and
+ predictability of annotations. We give an algorithm for
+ implementing the declarative system; our algorithm is remarkably
+ simple and well-behaved, despite being both sound and complete.",
+ paper = "Dunf13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Parisse, Bernard}
+\begin{chunk}{axiom.bib}
+@misc{Pari19,
+ author = "Parisse, Bernard",
+ title = {{Computing Huge Groebner Basis like Cyclic10 over
+ $\mathbb{Q}$ with Giac}}.
+ link = "\url{https://hal.archives-ouvertes.fr/hal-02081648}",
+ year = "2019",
+ abstract =
+ "We present a short description on how to fine-tune the
+ modular algorithm implemented in the Giac computer algebra system
+ to reconstruct large Groebner basis over $\mathbb{Q}$. The
+ classical cyclic10 benchmark will serve as example.",
+ paper = "Pari19.pdf"
+}
+
+\end{chunk}
+
+\index{Denes, Maxime}
+\index{Mortberg, Anders}
+\index{Siles, Vincent}
+\begin{chunk}{axiom.bib}
+@misc{Dene19,
+ author = "Denes, Maxime and Mortberg, Anders and Siles, Vincent",
+ title = {{A Refinement-based Approach to Computational Algebra in COQ}},
+ year = "2019",
+ link = "\url{www.cse.chalmers.se/~mortberg/papers/coqeal.pdf}",
+ abstract =
+ "We describe a step-by-step approach to the implementation and
+ formal verification of efficient algebraic algorithms. Formal
+ specifications are expressed on rich data types which are suitable
+ for deriving essential theoretical properties. These
+ specifications are then refined to concrete implementations on
+ more efficient data structures and linked to their abstract
+ counterparts. We illustrate this methodology on key applications:
+ matrix rank computation, Winograd's fast matrix product,
+ Karatsuba's polynomial multiplication, and the gcd of multivariate
+ polynomials.",
+ paper = "Dene19.pdf",
+ keywords = "printed",
+}
+
+\end{chunk}
+
+\index{Ly, Kim Quyen}
+\begin{chunk}{axiom.bib}
+@misc{Lyxx15,
+ author = "Ly, Kim Quyen",
+ title = {{Formalization in Coq of Polynomial Interpretations on
+ Rationals}},
+ year = "2015",
+ link = "\url{https://www.di.ens.fr/~quyen/publication/ly10.pdf}",
+ paper = "Lyxx15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\index{Voronkov, Andrei}
+\begin{chunk}{axiom.bib}
+@misc{Kova13,
+ author = "Kovacs, Laura and Voronkov, Andrei",
+ title = {{First-Order Theorem Proving and Vampire}},
+ year = "2013",
+ link = "\url{http://www.cse.chalmers.se/~laurako/pub/CAV13_Kovacs.pdf}",
+ abstract =
+ "In this paper we give a short introduction in first-order theorem
+ proving and the use of the theorem prover Vampire. We discuss the
+ superposition calculus and explain the key concepts of saturation
+ and redundancy elimination, present saturation algorithms and
+ preprocessing, and demonstrate how these concepts are implemented
+ in Vampire. Further, we also cover more recent topics and features
+ of Vampire designed for advanced applications, including
+ satisfiability checking, theory reasoning, interpolation,
+ consequence elimination, and program analysis.",
+ paper = "Kova13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kotelnikov, Evgenii}
+\index{Kovacs, Laura}
+\index{Reger, Giles}
+\index{Voronkov, Andrei}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kote16,
+ author = "Kotelnikov, Evgenii and Kovacs, Laura and Reger, Giles and
+ Voronkov, Andrei",
+ title = {{The Vampire and the FOOL}},
+ booktitle = "SIGPLAN Conf. on Certified Programs and Proofs",
+ year = "2016",
+ publisher = "ACM",
+ pages = "37-48",
+ abstract =
+ "This paper presents new features recently implemented in the
+ theorem prover Vampire, namely support for first-order logic with
+ a first class boolean sort (FOOL) and polymorphic arrays. In
+ addition to having a first class boolean sort, FOOL also contains
+ if-then-else and let-in expressions. We argue that presented
+ extensions facilitate reasoning-based program analysis, both by
+ increasing the expressivity of first-order reasoners and by gains
+ in efficiency.",
+ paper = "Kote16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Storjohann, Arne}
+\begin{chunk}{axiom.bib}
+@inproceedings{Stor97,
+ author = "Storjohann, Arne",
+ title = {{A Solution to the extended GCD problem with applications}},
+ booktitle = "ISSAC '97",
+ publisher = "ACM",
+ year = "1997",
+ paper = "Stor97.pdf",
+ keywords = "printed"
+
+}
+
+\end{chunk}
+
+\index{Corless, Robert}
+\index{Postma, Erik}
+\index{Stoutemyer, David}
+\begin{chunk}{axiom.bib}
+@inproceedings{Corl11,
+ author = "Corless, Robert and Postma, Erik and Stoutemyer, David",
+ title = {{GCD of Multivariate Approximate Polynomials using
+ Beautification with the Subtractive Algorithm}},
+ booktitle = "Int. Workshop on Symbolic-Numeric Computation",
+ publisher = "ACM",
+ year = "2011",
+ paper = "Corl11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cheng, Howard}
+\index{Labahn, George}
+\index{Zhou, Wei}
+\begin{chunk}{axiom.bib}
+@article{Chen08,
+ author = "Cheng, Howard and Labahn, George and Zhou, Wei",
+ title = {{Computing Polynomial LCD and GCD in Lagrange Basis}},
+ journal = "Communications in Computer Algebra",
+ volume = "42",
+ number = "3",
+ pages = "129-130",
+ year = "2008",
+ abstract =
+ "We discuss the verification of mathematical software solving
+ polynomial systems symbolically by way of triangular
+ decomposition. Standard verification techniques are highly
+ resource consuming and apply only to polynomial systems which are
+ easy to solve. We exhibit a new approach which manipulates
+ constructible sets represented by regular systems. We provide
+ comparative benchmarks of different verification procedures
+ applied to four solvers on a large set of well-known polynomial
+ systems. Our experimental results illustrate the high efficiency
+ of our new approach. In particular, we are able to verify
+ triangular decomposition of polynomial systems which are not easy
+ to solve.",
+ paper = "Chen08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cheng, Howard}
+\index{Labahn, George}
+\begin{chunk}{axiom.bib}
+@inproceedings{Chen06,
+ author = "Cheng, Howard and Labahn, George",
+ title = {{On Computing Polynomial GCDs in Alternate Bases}},
+ booktitle = "ISSAC '06",
+ publisher = "ACM",
+ year = "2006",
+ pages = "47-54",
+ abstract =
+ "In this paper, we examine the problem of computing the greatest
+ common divisor (GCD) of univariate polynomials represented in
+ different bases. When the polynomials are represented in Newton
+ basis or a basis of orthogonal polynomials, we show that the
+ well-known Sylvester matrix can be generalized. We give
+ fraction-free and modular algorithms to directly compute the GCD
+ in the alternate basis. These algorithms are suitable for
+ computation in domains where growth of coefficients in
+ intermediate computations are a central concern. In the cases of
+ Newton basis and bases using certain orthogonal polynomials, we
+ also show that the standard subresultant algorithm can be applied
+ easily. If the degrees of the input polynomials is at most $n$ and
+ the degree of the GCD is at least $n/2$, our algorithms outperform
+ the corresponding algorithms using the standard power basis.",
+ paper = "Chen06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Maza, Marc Moreno}
+\begin{chunk}{axiom.bib}
+@article{Chen15,
+ author = "Chen, Changbo and Maza, Marc Moreno",
+ title = {{Simplification of Cylindrical Algebraic Formulas}},
+ journal = "LNCS",
+ volume = "9301",
+ pages = "119-134",
+ comment = "Int. Workshop on Computer Algebra in Scientific Computing",
+ paper = "Chen15.pdf"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Covanov, Svyatoslav}
+\index{Mansouri, Farnam}
+\index{Maza, Marc Moreno}
+\index{Xie, Ning}
+\index{Xie, Yuzhen}
+\begin{chunk}{axiom.bib}
+@article{Chen14,
+ author = "Chen, Changbo and Covanov, Svyatoslav and Mansouri, Farnam
+ and Maza, Marc Moreno and Xie, Ning and Xie, Yuzhen",
+ title = {{Basic Polynomial Algebra Subprograms}},
+ journal = "Communications in Computer Algebra",
+ volume = "48",
+ number = "3/4",
+ pages = "197-201",
+ year = "2014",
+ paper = "Chen14.pdf"
+}
+
+\end{chunk}
+
+\index{Gleich, David}
+\begin{chunk}{axiom.bib}
+@misc{Glei05,
+ author = "Gleich, David",
+ title = {{Finite Calculus: A Tutorial for Solving Nasty Sums}},
+ link = "\url{}",
+ year = "2005",
+ abstract =
+ "In this tutorial, I will first explain the need for finite
+ calculus using an example sum I think is difficult to solve. Next,
+ I will show where this sum actually occurs and why it is
+ important. Following that, I will present all the mathematics
+ behind finite calculus and a series of theorems to make it helpful
+ before concluding with a set of examples to show that it really is
+ useful.",
+ paper = "Glei05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Majewski, Bohdan}
+\index{Havas, George}
+\begin{chunk}{axiom.bib}
+@article{Maje94,
+ author = "Majewski, Bohdan and Havas, George",
+ title = {{The Complexity of Greatest Common Divisor Computations}},
+ journal = "LNCS",
+ volume = "877",
+ pages = "184-193",
+ year = "1994",
+ abstract =
+ "We study the complexity of expressing the greatest common divisor
+ of $n$ positive numbers as a linear combination of the
+ numbers. We prove the NP-completeness of finding an optimal set of
+ multipliers with respect to either of $L_0$ metric or the
+ $L_\infty$ norm. We present and analyze a new method for
+ expressing the gcd of $n$ numbers as their linear combination and
+ give an upper bound on the size of the largest multiplier
+ produced by this method, which is optimal.",
+ paper = "Maje94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Shallit, Jeffrey}
+\index{Sorenson, Jonathan}
+\begin{chunk}{axiom.bib}
+@article{Shal94,
+ author = "Shallit, Jeffrey and Sorenson, Jonathan",
+ title = {{Analysis of a Left-Shift Binary GCD Algorithm}},
+ journal = "Journal of Symbolic Computation",
+ volume = "17",
+ number = "6",
+ pages = "473-486",
+ year = "1994",
+ abstract =
+ "We introcude a new left-shift binary algorithm, LSBGCD, for
+ computing the greatest common divisor of two integers, and we
+ provide an analysis of the worst-case behavior of the
+ algorithm. The analysis depends on a theorem of Ramharter about
+ the extremal behavior of certain continuants.",
+ paper = "Shal94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kozen, Dexter}
+\index{Landau, Susan}
+\index{Zippel, Richard}
+\begin{chunk}{axiom.bib}
+@article{Koze94,
+ author = "Kozen, Dexter and Landau, Susan and Zippel, Richard",
+ title = {{Decomposition of Algebraic Functions}},
+ journal = "LNCS",
+ volume = "877",
+ pages = "80-92",
+ year = "1994",
+ abstract =
+ "Functional decomposition -- whether a function $f(x)$ can be
+ written as a composition of functions $g(h(x))$ in a nontrivial
+ way -- is an important primitive in symbolic computation
+ systems. The problem of univariate polynomial decomposition was
+ shown to have an efficient solution by Kozen and Landau.
+ Dickerson and von zur Gathen gave algorithms for certain
+ multivariate cases. Zippel showed how to decompose rational
+ functions. In this paper, we address the issue of decomposition of
+ algebraic functions. We show that the problem is related to
+ univariate resultants in algebraic function fields, and in fact
+ can be reformulated as a problem of resultant decomposition. We
+ characterize all decompositions of a given algebraic function up
+ to isomorphism, and give an exponential time algorithm for finding
+ a nontrivial one if it exists. The algorithm involves genus
+ calculations and constructing transcendental generators of fields
+ of genus zero.",
+ paper = "Koze94.pdf"
+}
+
+\end{chunk}
+
+\index{Lipton, Richard J.}
+\begin{chunk}{axiom.bib}
+@article{Lipt94,
+ author = "Lipton, Richard J.",
+ title = {{Straight-Line Complexity and Integer Factorization}},
+ journal = "LNCS",
+ volume = "877",
+ pages = "71-79",
+ year = "1994",
+ abstract =
+ "We show that if polynomials with many rational roots have
+ polynomial length straight-line complexity, then integer
+ factorization is 'easy'",
+ paper = "Lipt94.pdf"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Maza, Marc Moreno}
+\begin{chunk}{axiom.bib}
+@article{Chen16,
+ author = "Chen, Changbo and Maza, Marc Moreno",
+ title = {{Quantifier Elimination by Cylindrical Algebraic
+ Decomposition based on Regular Chains}},
+ journal = "Journal of Symbolic Computation",
+ volume = "75",
+ pages = "74-93",
+ year = "2016",
+ abstract =
+ "A quantifier elimination algorithm by cylindrical algebraic
+ decomposition based on regular chains is presented. The main idea
+ is to refine a complex cylindrical tree until the signs of
+ polynomials appearing in the tree are sufficient to distinguish
+ the true and false cells. We report an implementation of our
+ algorithm in the RegularChains library in MAPLE and illustrate its
+ effectiveness by examples.".
+ paper = "Chen16.pdf"
+}
+
+\end{chunk}
+
+\index{Kotelnikov, Evgenii}
+\begin{chunk}{axiom.bib}
+@phdthesis{Kote18,
+ author = "Kotelnikov, Evgenii",
+ title = {{Automated Theorem Proving with Extensions of First-Order Logic}},
+ school = "Chalmers",
+ year = "2018",
+ abstract =
+ "Automated theorem provers are computer programs that check
+ whether a logical conjecture follows from a set of logical
+ statements. The conjecture and the statements are expressed in the
+ language of some formal logic, such as first-order logic. Theorem
+ provers for first-order logic have been used for automation in
+ proof assistants, verification of programs, static analysis of
+ networks, and other purposes. However, the efficient usage of
+ these provers remains challenging. One of the challenges is the
+ complexity of translating domain problems to first-order
+ logic. Not only can such translation be cumbersome due to semantic
+ differences between the domain and the logic, but it might
+ inadvertently result in problems that provers cannot easily handle.
+
+ The work presented in the thesis addresses this challenge by
+ developing an extension of first-order logic named FOOL. FOOL
+ contains syntactical features of programming languages and more
+ expressive logics, is friendly for translation of problems from
+ various domains, and can be efficiently suported by existing
+ theorem provers. We describe the syntax and semantics of FOOL and
+ present a simple translation from FOOL to plain first-order
+ logic. We describe an efficient clausal normal form transformation
+ algorithm for FOOL and based on it implement a support for FOOL in
+ the Vampire theorem prover. We illustrate the efficient use of
+ FOOL for program verification by describing a concise encoding of
+ next state relations of imperative programs in FOOL. We show a
+ usage of features of FOOL in problems of static analysis of
+ networks. We demonstrate the efficiency of automated theorem
+ proving in FOOL with an extensive set of experiments. In these
+ experiments we compare the performance of Vampire on a large
+ collection of problems from various sources translated to FOOL and
+ ordinary first-order logic. Finally, we fix the syntax for FOOL in
+ TPTP, the standard language of first-order theorem provers.",
+ paper = "Kote18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dewar, Michael}
+\begin{chunk}{axiom.bib}
+@inproceedings{Dewa92,
+ author = "Dewar, Michael",
+ title = {{Using Computer Algebra to Select Numerical Algorithms}},
+ booktitle = "ISSAC '92",
+ publisher = "ACM",
+ year = "1992",
+ pages = "1-8",
+ isbn = "0-89791-489-9",
+ abstract =
+ "Many real-life problems require a combination of both symbolic
+ and numerical methods for their solution. This has led to the
+ development of integrated, interactive symbolic / numeric packages
+ which use a computer algebra system for the former and a standard
+ subroutine library for the later. These systems may also be viewed
+ as simplified front-ends to the numerical library. To use these
+ packages, however, a user must be able to select which of the many
+ available routines is the most appropriate for his or her problem,
+ which contrasts with the 'black-box' style interfaces available in
+ computer algebra systems. This paper describes how a computer
+ algebra system can be used to make this decision, thus providing a
+ much-simplified and orthogonal interface.",
+ paper = "Dewa92.pdf"
+}
+
+\end{chunk}
+
+\index{Dupee, Brian J.}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dupe96,
+ author = "Dupee, Brian J. and Davenport, James H.",
+ title = {{An Intelligent Interface to Numerical Routines}},
+ journal = "LNCS",
+ number = "1128",
+ pages = "252-262",
+ year = "1996",
+ abstract =
+ "Links from Computer Algebra Systems to Numerical Libraries have
+ been increasingly made available. However, they remain, like the
+ numerical routines which comprise the libraries, difficult to use
+ by a novice and there is little help in choosing the appropriate
+ routine for any given problem, should there be a choice.
+
+ Computer Algebra Systems use generic names for each problem
+ area. For example, 'integrate' (or 'int') is used for integration
+ of a function, whatever method the code may use. Numeric
+ interfaces still use different names for each method together with
+ a variety of extra parameters, some of which may be
+ optional. Ideally, we should extend the generic name structure to
+ cover numerical routines. This would then, necessarily, require
+ algorithms for making an assessment of the efficacy of different
+ methods where such a choice exists.
+
+ This paper considers the link to the NAG Fortran Library from
+ version 2.0 of Axiom and shows how we can build on this to extend
+ and simplify the interface using an expert system for choosing and
+ using the numerical routines.",
+ paper = "Dupe96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\index{Dewar, Michael C.}
+\index{Richardson, Michael G.}
+\begin{chunk}{axiom.bib}
+@inbook{Dave92f,
+ author = "Davenport, James H. and Dewar, Michael C. and
+ Richardson, Michael G.",
+ title = {{Symbolic and Numeric Computation: the Example of IRENA}},
+ booktitle = "Symbolic and Numerical Computation for Artificial Intelligence",
+ pages = "347-362",
+ year = "1992",
+ publisher = "Academic Press",
+
+ abstract =
+ "Historically symbolic and numeric computation have pursued
+ different lines of evolution, have been written in different
+ languages and generally seen to be competitive rather than
+ complementary techniques. Even when both were used to solve a
+ problem ad hoc methods were used to transfer the data between
+ them.
+
+ We first discuss the reasons for this dichotomy, and then present
+ IRENA, a system being developed by the authors to present an
+ integrated environment with all the facilities of Reduce combined
+ with the functionality of the NAG FORTRAN library.
+
+ Not only does IRENA allow the Reduce user to make calls to the NAG
+ Library interactively, it also converts a natural input
+ representation to the required unnatural FORTRAN one and
+ vice-versa on output, which results in a much more intuitive
+ interface. Many parameters have default values and so need not be
+ supplied by th user.",
+ paper = "Dave92f.pdf"
+}
+
+\end{chunk}
+
+\index{Dewar, Mike}
+\index{Carlisle, David}
+\begin{chunk}{axiom.bib}
+@inproceedings{Dewa01,
+ author = "Dewar, Mike and Carlisle, David",
+ title = {{Mathematical Software: The Next Generation?}}
+ booktitle = "Int. Workshop on Mathematical Knowledge Management",
+ link = "\url{https://www.emis.de/proceedings/MKM2001/printed/dewar.pdf}",
+ publisher = "RISC",
+ year = "2001",
+ paper = "Dewa01.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Bodnar, Gabor}
+\index{Kaltenbacher, Barbara}
+\index{Pau, Petru}
+\index{Schicho, Josef}
+\begin{chunk}{axiom.bib}
+@article{Bodn01,
+ author = "Bodnar, Gabor and Kaltenbacher, Barbara and Pau, Petru and
+ Schicho, Josef",
+ title = {{Exact Real Computation in Computer Algebra}},
+ journal = "LNCS",
+ volume = "2630",
+ pages = "279-292",
+ year = "2001",
+ abstract =
+ "Exact real computation allows many of the advantages of numerical
+ computation (e.g. high performance) to be accessed also in
+ symbolic computation, providing validated results. In this paper
+ we present our approach to build a transparent and easy to use
+ connection between the two worlds, using this paradigm. The main
+ discussed topics are representation of exact real objects,
+ operations on exact real matrices, polynomial greatest common
+ divisor and root computation. Some of these problems are
+ ill-posed; we use regularization methods to solve them.",
+ paper = "Bodn01.pdf"
+}
+
+\end{chunk}
+
+\index{Barthe, G.}
+\index{Elbers, H.}
+\begin{chunk}{axiom.bib}
+@misc{Bart96,
+ author = "Barthe, G. and Elbers, H.",
+ title = {{Towards Lean Proof Checking}},
+ year = "1996",
+ abstract =
+ "Logical formal systems are inefficient at computations. In order
+ to increase their efficiency, we aim to extend these systems with
+ computational power. In this paper, we suggest a general, powerful
+ syntax, called oracle types, to extend type theories with
+ computational power; the resulting systems, which combine the
+ logical abilities of logical formal systems and the computational
+ power of term rewriting systems, provide a suitable environment
+ for theorem proving. As a practical application, we present an
+ extension of the theorem prover Lego with oracle types and
+ illustrate the use of this new system in performing algebraic
+ computations. Our implementation of oracle types is very flexible
+ and allows rewriting to be performed either inside Lego or by
+ Reduce, an efficient symbolic computation system. In our view, the
+ main novelty of our approach is to combine a sound theoretical
+ foundation with an efficient implementation. Besides, our work
+ provides the first attempt to combine symbolic computation systems
+ with theorem provers such as Coq and Lego, which are based on
+ intensional type theories.",
+ paper = "Bart96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bostan, Alin}
+\index{Schost, Eric}
+\begin{chunk}{axiom.bib}
+@article{Bost13,
+ author = "Bostan, Alin and Schost, Eric",
+ title = {{A Simple and Fast Algorithm for Computing Exponentials
+ of Power Series}},
+ journal = "Information Processing Letters",
+ volume = "13",
+ pages = "754-756",
+ year = "2013",
+ abstract =
+ "As was initially shown by Brent, exponentials of truncated power
+ series can be computed using a constant number of polynomial
+ multiplications. This note gives a relatively simple algorithm
+ with a low constant factor",
+ paper = "Bost13.pdf"
+}
+
+\end{chunk}
+
+\index{Piskac, Ruzica}
+\begin{chunk}{axiom.bib}
+@inproceedings{Pisk15,
+ author = "Piskac, Ruzica",
+ title = {{Frome Decision Procedures to Synthesis Procedures}},
+ booktitle = "Symp. on Symbolic and Numeric Algorithms for
+ Scientific Computing",
+ publisher = "ACM",
+ year = "2015",
+ abstract =
+ "Software synthesis is a technique for automatically generating
+ code from a given specification. The goal of software synthesis is
+ to make software development easier while increasing both the
+ productivity of the programmer and the correctness of the produced
+ code. In this paper we present an approach to synthesis that
+ relies on the use of automated reasoning and decision
+ procedures. First we describe how to generalize decision
+ procedures into predictable and complete synthesis
+ procedures. Here completeness means that the procedure is
+ guaranteed to find code that satisfies the given specification. We
+ illustrate the process of turning a decision procedure into a
+ synthesis procedure using linear integer arithmetic as an example.
+
+ However, writing a complete specification can be a tedious task,
+ sometimes even harder than writing the code itself. To overcome
+ this problem, ideally the user could provide a few input-output
+ examples, and then the code should be automatically derived. We
+ outline how to broaden usability and applications of current
+ software synthesis techniques. We conclude with an outlook on
+ possible future research directions and applications of synthesis
+ procedures.",
+ paper = "Pisk15.pdf"
+}
+
+\end{chunk}
+
+\index{Havas, George}
+\index{Majewski, Bohdan}
+\index{Matthews, K.R.}
+\begin{chunk}{axiom.bib}
+@techreport{Hava95,
+ author = "Havas, George and Majewski, Bohdan and Matthews, K.R.",
+ title = {{Extended GCD Algorithms}},
+ type = "technical report",
+ institution = "University of Queensland",
+ number = "TR0302",
+ year = "1995",
+ abstract =
+ "Extended gcd calculation has a long history and plays an
+ important role in computational number theory and linear
+ algebra. Recent results have shown that finding optimal
+ multipliers in extended gcd calculations is difficult. We study
+ algorithms for finding good multipliers and present new algorithms
+ with improved performance. We present a well-performing algorithm
+ which is based on lattice basis reduction methods and may be
+ formally analyzed. We also give a relatively fast algorithm with
+ moderate performance.",
+ paper = "Hava95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Ritt, J.F.}
+\begin{chunk}{axiom.bib}
+\article{Ritt25,
+ author = "Ritt, J.F.",
+ title = {{Elementary Functions and their Inverses}},
+ journal = "Transactions of the American Mathematical Society",
+ volume = "27",
+ pages = "68-90",
+ year = "1925",
+ paper = "Ritt25.pdf"
+}
+
+\end{chunk}
+
+\index{Risch, Robert H.}
+\begin{chunk}{axiom.bib}
+@article{Risc76,
+ author = "Risch, Robert H.",
+ title = {{Implicitly Elementary Integrals}},
+ journal = "Proc. Amer. Math.",
+ volume = "57",
+ number = "1",
+ pages = "1-7",
+ year = "1976",
+ paper = "Risc76.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Risch, Robert H.}
+\begin{chunk}{axiom.bib}
+@article{Risc79,
+ author = "Risch, Robert H.",
+ title = {{Algebraic Properties of the Elementary Functions of Analysis}},
+ journal = "American Journal of Mathematics",
+ volume = "101",
+ number = "4",
+ pages = "743-759",
+ abstract =
+ "The elementary functions of a complex variable $z$ are those
+ functions built up from the rational functions of $z$ by
+ exponentiation, taking logarithms, and algebraic operations. The
+ purpose of this paper is first, to prove a 'structure theorem'
+ which shows that if an algebraic relation holds among a set of
+ elementary functions, then they must satisfy an algebraic relation
+ of a special kind. Then we make four applications of this theorem,
+ obtaining both new and old results which are described here
+ briefly (and imprecisely).
+ \begin{enumerate}
+ \item An algorithm is given for telling when two elementary
+ expressions define the same function.
+ \item A characterization is derived of those ordinary differential
+ equations having elementary solutions
+ \item The four basic functions of elementary calculus -- exp, log,
+ tan, tan$^{-1}, -- are shown to be 'irredundant'
+ \item A characterization is given of elementary functions
+ possessing elementary inverses.",
+ paper = "Risc79.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Moses, Joel}
+\begin{chunk}{axiom.bib}
+@article{Mose72,
+ author = "Moses, Joel",
+ title = {{Toward a General Theory of Special Functions}},
+ journal = "Communications of the ACM",
+ volume = "15",
+ number = "7",
+ pages = "550-554",
+ year = "1972",
+ abstract =
+ "A list of a number of natural developments for the field of
+ algebraic manipulation is given. Then the prospects for a general
+ theory of functions defined by ordinary differential equations are
+ discussed. The claim is made that recent developments in
+ mathematics indicate that it should be possible to algorithmically
+ generate many properties of solutions to differential
+ equations. Such a theory is preferable to a less general effort to
+ make algebraic manipulation systems knowledgeable about the usual
+ special functions (e.g. exponential, hypergeometric).",
+ paper = "Mose72.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Muller, Jean-Michel}
+\begin{chunk}{axiom.bib}
+@book{Mull16,
+ author = "Muller, Jean-Michel",
+ title = {{Elementary Functions: Algorithms and Implementation}},
+ isbn = "978-1-4899-7981-0",
+ publisher = "Birkhauser",
+ year = "2016",
+ paper = "Mull16.pdf"
+}
+
+\end{chunk}
+
+\index{Fitt, A.D.}
+\index{Hoare, G.T.Q}
+\begin{chunk}{axiom.bib}
+@article{Fitt93,
+ author = "Fitt, A.D. and Hoare, G.T.Q",
+ title = {{The Closed-Form Integration of Arbitrary Functions}},
+ journal = "The Mathematical Gazette",
+ volume = "77",
+ number = "479",
+ pages = "227-236",
+ year = "1993"
+ paper = "Fitt93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Schorre, D.V.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Scho64,
+ author = "Schorre, D.V.",
+ title = {{META II: A Syntax-Oriented Compiler Writing Language}},
+ booktitle = "19th National Conference of the ACM",
+ publisher = "ACM",
+ year = "1964",
+ abstract =
+ "META II is a compiler writing language which consists of syntax
+ equations resembling Backus normal form and into which
+ instructions to output assembly language commands are
+ inserted. Compilers have been written in this language for VALGOL
+ I and VALGOL II. The former is a simple algebraic language
+ designed for the purpose of illustrating META II The latter
+ contains a fairly arge subset of ALGOL 60.
+
+ The method of writing compilers which is given in detail in the
+ paper may be explained briefly as follows. Each synta equation is
+ translated into a recursive subroutine which tests the input
+ string for a particular phrase structure, and deletes it if
+ found. Backup is avoided by the extensive use of factoring in the
+ syntax equations. For each source language, an interpreter is
+ written and programs are compiled into that interpretive language.
+
+ META II is not intended as a standard language which everyone will
+ use to write compilers. Rather, it is an example of a simple
+ working language which can give one a good start in designing a
+ compiler-writing compiler suited to his own needs. Indeed, the
+ META II compiler is written in its own language, thus lending
+ itself to modification.",
+ paper = "Scho64.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Jenks, Richard D.}
+\begin{chunk}{axiom.bib}
+@techreport{Jenk70,
+ author = "Jenks, Richard D.",
+ title = {{META/LISP: An interactive translator writing system}},
+ type = "research report",
+ number = "RC2968",
+ year = "1970",
+ institution = "IBM Research",
+ abstract =
+ "META/LISP is a general purpose translator writing system for IBM
+ System/360 currently running on TSS, CP/CMS, and OS/360. The input
+ to the system is a source program which simultaneously describes
+ 1) the syntax of some input data to be translated and
+ 2) algorithms which operate on the input data and a pushdown stack
+ to accomplish the desired translation; the output of the system is
+ a compiled program for translating that input data. In particular
+ when the input data are statements of a higher-level language to
+ be translated into assembly language, META/LISP serves as a
+ compiler-compiler. META/LISP uses the top-down syntax-directed
+ approach which makes the system extremely attractive for the
+ design and implementation of experimental languages; using
+ META/LISP such compilers are easy to write, easy to check out, and
+ - most importantly - easy to modify interactively. The appendices
+ which follow a rather complete description of the system including
+ a self-description of the META/LISP compiler.",
+ paper = "Jenk70.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
+\index{Havas, George}
+\index{Majewski, Bohdan S.}
+\index{Matthews, Keith R.}
+\begin{chunk}{axiom.bib}
+@article{Hava98,
+ author = "Havas, George and Majewski, Bohdan S. and Matthews, Keith R.",
+ title = {{Extended GCD and Hermite Normal Form Algorithms via
+ Lattice Basis Reduction}},
+ journal = "Experimental Mathematics",
+ volume = "7",
+ number = "2",
+ pages = "125-136",
+ year = "1998",
+ abstract =
+ "Extended gcd calculation has a long history and plays an
+ important role in computational number theory and linear
+ algebra. Recent results have shown that finding optimal
+ multipliers in extended gcd calculations is difficult. We present
+ an algorithm which uses lattice basis reduction to produce small
+ integer multipliers $x_1,\ldots,x_m$ for the equation
+ $s=gcd(x_1,\ldots,x_m)-x_1s_1+\ldots+x_ms_m$ where
+ $s_1,\ldots,s_m$ are given integers. The method generalises to
+ produce small unimodular transformation matrices for computing the
+ hermite normal form of an integer matrix.",
+ paper = "Hava98.pdf"
+}
+
+\end{chunk}
+
+\index{Havas, George}
+\index{Majewski, Bohdan}
+\begin{chunk}{axiom.bib}
+@techreport{Hava97,
+ author = "Havas, George and Majewski, Bohdan",
+ title = {{Extended GCD Algorithms}},
+ type = "technical report",
+ institution = "University of Queensland",
+ number = "TR0325",
+ year = "1997",
+ abstract =
+ "Given an integer vector of $n$ positive number numbers
+ $a=\vert a_i \vert^n_{i=1}$ the extended gcd problem asks for an
+ integer vector $x$ of length $n$ such that
+ \[xa^T=\sum_{i=1}^n x_ia_i = gcd(a_1,a_2,\ldots,a_n)\]
+
+ For many applications it is vital that some measure of $x$,
+ $\norm{x}$ is small. We have proved, however, that if we choose
+ either the max norm or the zero matric the question of finding
+ $x$ such that $\norm{x}$ is smaller than some positive constant
+ $K$ is NP-complete. We conjecture that the questions remains
+ NP-complete for other norms.
+
+ In the light of these results we hae proposed two approximation
+ algorithms. Their respective complexities are
+ $O(n^2 log(max_i\{a_i\}))$ and $O(n^4 log(max_i\{a_i\}))$.
+ Theoretical analysis of the algorithms leads
+ to unsatisfactory bounds on the quality of the solution. Thus here
+ we undertake a practical study of the methods, where their
+ performance is matched against optimal solutions.",
+ paper = "Hava97.pdf"
+}
+
+\end{chunk}
+
+\index{Cheng, Eugenia}
+\begin{chunk}{axiom.bib}
+@misc{Chen04,
+ author = "Cheng, Eugenia",
+ title = {{How to write proofs: A quick guide}},
+ link = "\url{http://cheng.staff.shef.ac.uk/proofguide/proofguide.pdf}",
+ year = "2004",
+ paper = "Chen04.pdf",
+ keywords = "printed,DONE"
+}
+
+\end{chunk}
+
+\index{Warren, David H.D.}
+\index{Pereira, Luis M.}
+\begin{chunk}{axiom.bib}
+@misc{Warr77,
+ author = "Warren, David H.D. and Pereira, Luis M.",
+ title = {{Prolog -- The Language and its Implementation Compared
+ with Lisp}},
+ year = "1977",
+ link =
+ "\url{http://www.public.imtbs-tsp.eu/~gibson/Teaching/Teaching-ReadingMaterial/WarrenPereiraPereira77.pdf}",
+ abstract =
+ "Prolog is a simple but powerful programming language founded on
+ symbolic logic. The basic computational mechanism is a pattern
+ matching process (``unification'') operating on general record
+ structures (``terms of logic''). We priefly review the language
+ and compare it especially with pure Lisp. The remainder of the
+ paper discusses techniques for implementing Prolog efficiently; in
+ particular we describe how to compile the patterns involved in the
+ matching process. These techniques are as incorporated in our
+ DECsystem-10 Prolog compiler (written in Prolog). The code it
+ generates is comparable in speed with that produced by existing
+ DEC10 Lisp compilers. We argue that pattern matching is a better
+ method for expressing operations on structured data than
+ conventional selectors and constructors -- both for the user and
+ for the implementor.",
+ paper = "Warr77.pdf"
+}
+
+\end{chunk}
+
+\index{Day, Martin V.}
+\begin{chunk}{axiom.bib}
+@book{Dayx16,
+ author = "Day, Martin V.",
+ title = {{An Introduction to Proofs and the Mathematical Vernacular}},
+ year = "2016",
+ publisher = "Virginia Tech",
+ link = "\url{www.math.vt.edu/people/day/ProofsBook/IPaMV.pdf}",
+ paper = "Dayx16.pdf"
+}
+
+\end{chunk}
+
+\index{Fourer, Robert}
+\index{Gay, David M.}
+\index{Kernighan, Brian W.}
+\begin{chunk}{axiom.bib}
+@misc{Four03,
+ author = "Fourer, Robert and Gay, David M. and Kernighan, Brian W.",
+ title = {{AMPL Reference}},
+ link = "\url{https://ampl.com/BOOK/CHAPTERS/24-refman.pdf}",
+ year = "2003",
+ paper = "Four03.pdf"
+}
+
+\end{chunk}
+
+\index{Hammack, Richard}
+\begin{chunk}{axiom.bib}
+@book{Hamm18,
+ author = "Hammack, Richard",
+ title = {{Book of Proof}},
+ publisher = "Hammack, Richard",
+ year = "2018",
+ paper = "Hamm18.pdf"
+}
+
+\end{chunk}
+
+\index{Armstrong, J.L.}
+\index{Birding, S.R.}
+\index{Williams, M.C.}
+\begin{chunk}{axiom.bib}
+@inbook{Arms92,
+ author = "Armstrong, J.L. and Birding, S.R. and Williams, M.C.",
+ title = {{Use of Prolog for Developing a New Programming Language}},
+ booktitle = "The Practical Application of Prolog",
+ year = "1992",
+ publisher = "Institute of Electrical Engineers, London",
+ abstract =
+ "This paper describes how Prolog was used for the development of a
+ new concurrent real-time symbolic programming language called
+ Erlang.
+
+ Erlang was developed by first building a prototype in Prolog --
+ the prototype was used by a user group to test their reactions to
+ the language. As time passed many features were added (and
+ removed) from the interpreter and eventually the language reached
+ a level of maturity where it was decided to try it out on a
+ significant problem.
+
+ About 3 years and some 20,000 lines of Erlang later, performance
+ became an issue -- we wrote Prolog cross compilers from Erlang to
+ various concurrent logic programming languages followed by a
+ direct implementation of Erlang itself. The direct implementation
+ of Erlang was loosely based on the WAM and made by writing a
+ Prolog compiler from Erlang to a new abstractmachine and an
+ emulator for the abstract machine in 'C'. The instruction set for
+ the abstract machine was first prototyped in Prolog -- finally the
+ compiler was re-written in Erlang, thus totally removing any
+ dependency on Prolog.
+
+ This paper describes some of the key events which lay between the
+ simple prototype and the current version of the language.",
+ paper = "Arms92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pirog, Maciej}
+\index{Gibbons, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Piroxx,
+ author = "Pirog, Maciej and Gibbons, Jeremy",
+ title = {{Extended Abstract: A Functional Derivation of the Warren
+ Abstract Machine}},
+ link = "\url{http://www.cs.ox.ac.uk/jeremy.gibbons/publications/wam.pdf}",
+ year = "unknown",
+ abstract =
+ "Based on Danvy et al.'s functional correspondence, we give a
+ further example of gradual refinement of an interpreter into a
+ known, low-level abstract machine underlying real-world compilers,
+ by deriving an abstract model of the Warren Abstract Machine from
+ a simple resolution-based Prolog interpreter. We show that other
+ well-known functional programming techniques (namely, explicit
+ laziness and semi-persistent data structures) can help to develop
+ abstract machines without detailed examination of the semantics
+ realised by the interpreter.",
+ paper = "Piroxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Russinoff, David M.}
+\begin{chunk}{axiom.bib}
+@article{Russ92,
+ author = "Russinoff, David M.",
+ title = {{A Verified Prolog Compiler for the Warren Abstract Machine}},
+ journal = "Journal of Logic Programming",
+ volume = "13",
+ number = "4",
+ pages = "367-412",
+ year = "1992",
+ abstract =
+ "We extend the theory of Prolog to provide a framework for the
+ study of Prolog compilation technology. For this purpose, we first
+ demonstrate the semantic equivalence of two Prolog interpreters: a
+ conventional SLD-refutation procedure and one that employs
+ Warren's ``last call'' optimization. Next, we formally define the
+ Warren Abstract Machine (WAM) and its instruction set and present
+ a Prolog compiler for the WAM. Finally, we prove that the WAM
+ execution of a compiled Prolog program produces the same result as
+ the interpretation of its source.",
+ paper = "Russ92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bohrer, Brandon}
+\index{Crary, Karl}
+\begin{chunk}{axiom.bib}
+@misc{Bohr16,
+ author = "Bohrer, Brandon and Crary, Karl",
+ title = {{A Proof-Producing Verified Prolog Compiler}},
+ year = "2016",
+ link = "\url{www.cs.cmu.edu/~bbohrer/pub/twam-iclp2016-long.pdf}",
+ abstract =
+ "We have designed and implemented a verified compiler for a
+ dialect of Prolog. Our compiler is verified using proof-producing
+ compilatoin: every compiled program is accompanied with a formal
+ proof that it is equivalent to a particular source program. Our
+ formal proofs take the form of type information for our new
+ verifying abstract machine which we call the TWAM, whose type
+ system natively understands logic programs specified in the
+ logical framework LF. We present a soundness metatheorem for the
+ TWAM showing that well-typed TWAM programs are sound proof-search
+ procedures. In doing so, we reduce our trusted computing base from
+ the entire compiler to the TWAM typechecker.",
+ paper = "Bohr16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tarau, Paul}
+\begin{chunk}{axiom.bib}
+@misc{Tara16,
+ author = "Tarau, Paul",
+ title = {{A Hitchhiker's Guide to Reinventing a Prolog Machine}},
+ year = "2016",
+ abstract =
+ "We take a fresh, ``clean-room'' look at implementing Prolog by
+ deriving its translation to an executable representation and its
+ execution algorithm from a simple Horn Clause meta-interpreter.
+ The resulting design has some interesting properties:
+ \begin{itemize}
+ \item the heap representation of terms and the abstract machine
+ instruction encodings are the same.
+ \item no dedicated code area is used as the code is placed
+ directly on the heap.
+ \item unification and indexing operations are orthogonal
+ \item filtering of matching clauses happens without building new
+ structures on the heap
+ \item variables in function and predicate symbol positions are
+ handled with no performance penalty
+ \item a simple English-like syntax is used as an intermediate
+ representation for clauses and goals
+ \item the same English-like syntax can be used by programmers
+ directly as an alternative to classic Prolog syntax
+ \item solutions of (multiple) logic engines are exposed as answer
+ streams that can be combined through typical functional
+ programming patterns
+ \item performance of a basic interpreter implemeting our design is
+ within a factor of 2 of a highly optimized WAM-based system
+ \end{itemize}
+
+ To help placing our design on the fairly rich map of Prolog
+ systems, we discuss similarities to existing Prolog abstract
+ machines, with emphasis on separating necessary commonalities from
+ arbitrary implementation choices.",
+ paper = "Tara16.pdf"
+}
+
+\end{chunk}
+
+\index{Felleisen, Matthias}
+\begin{chunk}{axiom.bib}
+@techreport{Fell85,
+ author = "Felleisen, Matthias",
+ title = {{Transliterating Prolog into Scheme}},
+ type = "technical report",
+ number = "182",
+ institution = "University of Indiana",
+ year = "1985",
+ paper = "Fell85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wang, Ke}
+\begin{chunk}{axiom.bib}
+@misc{Wang19,
+ author = "Wang, Ke",
+ title = {{Learning Scalable and Precise Representation of Program
+ Semantics}},
+ year = "2019",
+ abstract =
+ "Neural program embedding has shown potential in aiding the
+ analysis of large-scale, complicated software. Newly proposed deep
+ neural architectures pride themselves on learning program
+ semantics rather than superficial syntactic features. However, by
+ considering the source code only, the vast majority of neural
+ networks do not capture a deep, precise representation of program
+ semantics. In this paper, we present DYPRO, a novel deep neural
+ network that learns from program execution traces. Compared to the
+ prior dynamic models, not only is DYPRO capable of generalizing
+ across multiple executions for learning a program's dynamic
+ semantics in its entirety, but DYPRO is also more efficient when
+ dealing with programs yielding long execution traces. For
+ evaluation, we task DYPRO with semantic classification
+ (i.e. categorizing programs based on their semantics) and compared
+ it against two prominent static models: Gated Graph Neural Network
+ and TreeLSTM. We find that DYPRO achieves the highest prediction
+ accuracy among all models. To further reeal the capacity of all
+ aforementioned deep neural architectures, we examine if the models
+ can learn to detect deeper semantic properties of a program. In
+ particular given a task of recognizing loop invariants, we show
+ DYPRO beats all static models by a wide margin.",
+ paper = "Wang19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Muller, Dennis}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Mull18,
+ author = "Muller, Dennis and Kohlhase, Michael and Rabe, Florian",
+ titlle = {{Automatically Finding Theory Morphisms for Knowledge
+ Management}},
+ journal = "LNCS",
+ volume = "11006",
+ year = "2018",
+ abstract =
+ "We present a method for finding morphisms between formal
+ theories, both within as well as across libraries based on
+ different logical foundations. As they induce new theorems in the
+ target theory for any of the source theory, theory morphisms are
+ high-value elements of a modular formaly library. Usually, theory
+ morphisms are manually encoded, but this practice requires authors
+ who are familiar with source and target theories at the same time,
+ which limits the scalability of the manual approach.
+
+ To remedy this problem, we have developed a morphism finder
+ algorithm that automates theory morphism discovery. In this paper we
+ present an implementation in the MMT system and show specific use
+ cases. We fous on an application of theory discovery, where a user
+ can check whether a (part of a) formal theory already exists in
+ some library, potentially avoiding duplication of work or
+ suggesting an opportunity for refactoring.",
+ paper = "Mull18.pdf"
+}
+
+\end{chunk}
+
+\index{Cohl, Howard S.}
+\index{Greiner-Petter, Andre}
+\index{Schubotz, Moritz}
+\begin{chunk}{axiom.bib}
+@article{Cohl18,
+ author = "Cohl, Howard S. and Greiner-Petter, Andre and Schubotz, Moritz",
+ titlle = {{Automated Symbolic and Numerical Testing of DLMF Formulae
+ Using Computer Algebra Systems}},
+ journal = "LNCS",
+ volume = "11006",
+ year = "2018",
+ abstract =
+ "We have developed an automated procedure for symbolic and
+ numerical testing of formulae extracted from the National
+ Institute of Standards and Technology (NIST) Digital Library of
+ Mathematical Functions (DLMF). For the NIST Digital Repository of
+ Mathematical Formulae, we have developed conversion tools from
+ semantic Latex to Computer Algebra System (CAS) MAPLE which relies
+ on Youssef's part-of-math tagger. We convert a test data subset of
+ 4,078 semantics Latex DLMF formulae extracted from the DLMF to the
+ native CAS representation and then apply an automated scheme for
+ symbolic and numerical testing and verification. Our framework is
+ implemented using Java and MAPLE. We describe in detail the
+ conversion process which is required so that the CAS is able to
+ correctly interpret the mathematical representation of the
+ formulae. We describe the improvement of the effectiveness of our
+ automated scheme through incremental enhancements (making more
+ precise) of the mathematical semantics markup of the formulae.",
+ paper = "Cohl18.pdf"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{Sharoda, Yasmine}
+\begin{chunk}{axiom.bib}
+@article{Care18,
+ author = "Carette, Jacques and Farmer, William M. and Sharoda, Yasmine",
+ titlle = {{Biform Theories: Project Description}},
+ journal = "LNCS",
+ volume = "11006",
+ year = "2018",
+ abstract =
+ "A biform theory is a combination of an axiomatic theory and an
+ algorithmic theory that supports the integration of reasoning and
+ computation. These are ideal for specifying and reasoning about
+ algorithms that manipulate mathematical expressions. However,
+ formalizing biform theories is challenging as it requires the
+ means to express statements about the interplay of what these
+ algorithms do and what their actions mean mathematically. This
+ paper describes a project to develop a methodology for expressing,
+ manipulating, managing, and generating mathematical knowledge as a
+ network of biform theories. It is a subproject of MathScheme, a
+ long-term project at McMaster University to produce a framework
+ for integrating formal deduction and symbolic computation."
+ paper = "Care18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Care17,
+ author = "Carette, Jacques and Farmer, William M.",
+ titlle = {{Formalizing Mathematical Knowledge as a Biform Theory
+ Graph: A Case Study}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "A biform theory is a combination of an axiomatic theory and an
+ algorithmic theory that supports the integration of reasoning and
+ computation. These are ideal for formalizing algorithms that
+ manipulate mathematical expressions. A theory graph is a network
+ of theories connected by meaning-preserving theory morphisms that
+ map the formulae of one theory to the formulas of another
+ theory. Theory graphs are in turn well suited for formalizing
+ mathematical knowledge at the most convenient level of abstraction
+ using the most convenient vocabulary. We are interested in the
+ problem of whether a body of mathematical knowledge can be
+ effectively formalized as a theory graph of biform theories. As a
+ test case, we look at the graph of theories encoding natural
+ number arithmetic. We used two different formalisms to do this,
+ which we describe and compare. The first is realized in
+ CTT$_{uqe}$, a version of Church's type theory with quotation and
+ evaluation, and the second is realized in Agda, a dependently
+ typed programming language.",
+ paper = "Care17.pdf"
+}
+
+\end{chunk}
+
+\index{Chojecki, Przemyslaw}
+\begin{chunk}{axiom.bib}
+@article{Choj17,
+ author = "Chojecki, Przemyslaw",
+ titlle = {{DeepAlgebra -- An Outline of a Program}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "We outline a program in the area of formalization of mathematics
+ to automate theorem proving in algebra and algebraic geometry. We
+ propose a construction of a dictionary between automated theorem
+ provers and (La)Tex exploiting syntactic parsers. We describe its
+ application to a repository of human-written facts and definitions
+ in algebraic geometry (The Stacks Project). We use deep learing
+ techniques.",
+ paper = "Choj17.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm17,
+ author = "Farmer, William M.",
+ titlle = {{Theory Morphisms in Church's Type Theory with Quotation
+ and Evaluation}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "CTT$_{qe}$ is a version of Church's type theory with global
+ quotation and evaluation operators that is engineered to reason
+ about the interplay of syntax and semantics and to formalize
+ syntax-level mathematical algorithms. CTT$_{uqe}$ is a variant of
+ CTT$_{qe}$ that admits undefined expressions, partial functions,
+ and multiple base types of individuals. It is better suited than
+ CTT$_{qe}$ as a logic for building networks of theories connected
+ by theory morphisms. This paper presents the syntax and semantics
+ of CTT$_{uqe}$, defines a notion of a theory morphism from one
+ CTT$_{uqe}$ theory to another, and gives two simple examples
+ involving monoids that illustrate the use of theory morphisms in
+ CTT$_{qe}$.",
+ paper = "Farm17.pdf"
+}
+
+\end{chunk}
+
+\index{Muller, Dennis}
+\index{Gauthier, Thibault}
+\index{Kaliszyk, Cezary}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Mull17,
+ author = "Muller, Dennis and Gauthier, Thibault and Kaliszyk, Cezary
+ and Kohlhase, Michael and Rabe, Florian",
+ title = {{Classification of Alignments Between Concepts of Formal
+ Mathematical Systems}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "Mathematical knowledge is publicly available in dozens of
+ different formats and languages, ranging from informal
+ (e.g. Wikipedia) to formal corpora (e.g. Mizar). Despite an
+ enormous amount of overlap between these corpora, only few
+ machine-actionalbe connections exist. We speak of alignment if the
+ same concept occurs in different libraries, possibly with slightly
+ different names, notations, or formal definitions. Leveraging
+ these alignments creates a huge potential for knowledge sharing
+ and transfer, e.g. integrating theorem provers ore reusing
+ services across systems. Notably, even imperfect alignments,
+ i.e. concepts that are very similar rather than identical, can
+ often play very important roles. Specifically, in machine learning
+ techniques for theorem proving and in automation techniques that
+ use these, they allow learning-reasoning base automation for
+ theorem provers to take inspiration from proofs from different
+ formal proof libraries or semi-formal libraries even if the latter
+ is based on a different mathematical foundation. We present a
+ classification of alignments and design a simple format for
+ describing alignments, as well as an infrastructure for sharing
+ them. We propose these as a centralized standard for the
+ community. Finally, we present an initial collection of
+ approximately 12000 alignments from the different kinds of
+ mathematical corpora, including proof assistant libraries and
+ semi-formal corpora as a public resource.",
+ paper = "Mull17.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm16,
+ author = "Farmer, William M.",
+ titlle = {{Incorporating Quotation and Evaluation into Church's Type
+ Theory: Syntax and Semantics}},
+ journal = "LNCS",
+ volume = "9791",
+ year = "2016",
+ abstract =
+ "CTT$_{qe}$ is a version of Church's type theory that includes
+ quotation and evaluation operators that are similar to quote and
+ eval in the Lisp programming language. With quotation and
+ evaluation it is possible to reason in CTT$_{qe}$ about the
+ interplay of the syntax and semantics of expressions and, as a
+ result, to formalize syntax-based mathematical algorithms. We
+ present the syntax and semantics of CTT$_{qe}$ and give several
+ examples that illustrate the usefulness of having quotation and
+ evaluation in CTT$_{qe}$. We do not give a proof system for
+ CTT$_{qe}$ but we do sketch what a proof system could look like.",
+ paper = "Farm16.pdf"
+}
+
+\end{chunk}
+
+\index{Blanchette, Jasmin Christian}
+\index{Haslbeck, Maximilian}
+\index{Matichuk, Daniel}
+\index{Nipkow, Tobias}
+\begin{chunk}{axiom.bib}
+@article{Blan15,
+ author = "Blanchette, Jasmin Christian and Haslbeck, Maximilian and
+ Matichuk, Daniel and Nipkow, Tobias",
+ titlle = {{Mining the Archive of Formal Proofs}},
+ journal = "LNCS",
+ volume = "9150",
+ year = "2015",
+ abstract =
+ "The Archive of Formal Proofs is a vast collection of
+ computer-checked proofs developed using the proof assistant
+ Isabelle. We perform an in-depth analysis of the archive, looking
+ at various properties of the proof developments, including size,
+ dependencies, and proof style. This gives some insights into the
+ nature of formal proofs",
+ paper = "Blan15.pdf"
+}
+
+\end{chunk}
+
+\index{Horozal, Fulya}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Horo15,
+ author = "Horozal, Fulya and Rabe, Florian",
+ titlle = {{Formal Logic Definitions for Interchange Languages}},
+ journal = "LNCS",
+ volume = "9150",
+ year = "2015",
+ abstract =
+ "System integration often requires standardized interchange
+ languages, via which systems can exchange mathematical
+ knowledge. Major examples are the MathML-based markup languages
+ and TPTP. However, these languages standardize only the syntax of
+ the exchanged knowledge, which is insufficient when the involved
+ logics are complex or numerous. Logical frameworks, on the other
+ hand, allow representing the logics themselves (and are thus aware
+ of the semantics), but they abstract from the concrete syntax.
+
+ Maybe surprisingly, until recently, state-of-the-art logical
+ frameworks were not quite able to adequately represent logics
+ commonly used in formal systems. Using a recent extension of the
+ logical framework LF, we show how to give concise formal
+ definitions of the logics used in TPTP. We can also formally
+ define translations and combinations between the various TPTP
+ logics. This allows us to build sematics-aware tool support such
+ as type-checking TPTP content.
+
+ While our presentation focuses on the current TPTP logics, our
+ approach can be easily extended to other logics and interchange
+ languages. In particular, our logic representations can be used
+ with both TPTP and MathML. Thus, a single definition of the
+ semantics can be used with either interchange syntax.",
+ paper = "Horo15.pdf"
+}
+
+\end{chunk}
+
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Rabe15,
+ author = "Rabe, Florian",
+ titlle = {{Generic Literals}},
+ journal = "LNCS",
+ volume = "9150",
+ year = "2015",
+ abstract =
+ "MMT is a formal framework that combines the flexibility of
+ knowledge representation languages like OPENMATH with the formal
+ rigor of logical frameworks like LF. It systematically abstracts
+ from theoretical and practical aspects of individual formal
+ languages and tries to develop as many solutions as possible
+ generically.
+
+ In this work, we allow MMT theories to declare user-devined
+ literals, which makes literals as user-extensible as operators,
+ axioms, and notations. This is particularly important for
+ framework languages, which must be able to represent any choice of
+ literals. Theoretically, our literals are introduced by importing
+ a model that defines the denotations of some types and function
+ symbols. Practically, MMT is coupled with a programming language,
+ in which these models are defined.
+
+ Our results are implemented in the MMT system. In particular,
+ literals and computation on them are integrated with the parser
+ and type checker.",
+ paper = "Rabe15.pdf"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{Kohlhase, Michael}
+\begin{chunk}{axiom.bib}
+@article{Care14,
+ author = "Carette, Jacques and Farmer, William M. and Kohlhase, Michael",
+ title = {{Realms: A Structure for Consolidating Knowledge about
+ Mathematical Theories}},
+ journal = "LNCS",
+ volume = "8543",
+ year = "2014",
+ abstract =
+ "Since there are different ways of axiomatizing and developing a
+ mathematical theory, knowledge about such a theory may reside in
+ many places and in many forms within a library of formalized
+ mathematics. We introduce the notion of a realm as a structure for
+ consolidating knowledge about a mathematical theory. A realm
+ contains several axiomatizations of a theory that are separately
+ developed. Views interconnect these developments and establish
+ that the axiomatizations are equivalent in the sense of being
+ mutually interpretable. A realm also contains an external interface
+ that is convenient for users of the library who want to apply the
+ concepts and facts of the theory without delving into the details
+ of how the concepts are facts were developed. We illustrate the
+ utility of realms through a series of examples. We also give an
+ outline of the mechanisms that are needed to create and maintain
+ realms.",
+ paper = "Care14.pdf"
+}
+
+\end{chunk}
+
+\index{Bradford, Russell}
+\index{Davenport, James H.}
+\index{England, Matthew}
+\index{Wilson, David}
+\begin{chunk}{axiom.bib}
+@article{Brad13a,
+ author = "Bradford, Russell and Davenport, James H. and England, Matthew
+ and Wilson, David",
+ title = {{Optimising Problem Formulation for Cylindrical Algebraic
+ Decomposition}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "Cylindrical Algebraic Decomposition (CAD) is an important tool
+ for the study of real algebraic geometry with many applications
+ both within mathematics and elsewhere. It is known to have doubly
+ exponential complexity in the number of variables in the worst
+ case, but the actual computation time can vary greatly. It is
+ possible to offer different formulations for a given problem
+ leading to great differences in tractability. In this paper we
+ suggest a new measure for CAD complexity which takes into account
+ the real geometry of the problem. This leads to new heuristics for
+ choosing: the variable ordering for a CAD problem, a designated
+ equational constraint, and formulations for truth-table invariant
+ CADs (TTICASs). We then consider the possibility of using Groebner
+ bases to precondition TTICAD and when such formulations constitute
+ the creation of a new problem.",
+ paper = "Brad13a.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm13a,
+ author = "Farmer, William M.",
+ title = {{The Formalization of Syntax-Based Mathematical Algorithms
+ Using Quotation and Evaluation}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "Algorithms like those for differentiating functional expressions
+ manipulate the syntactic structure of mathematical expressions in
+ a mathematically meaningful way. A formalization of such an
+ algorithm should include a specification of its computational
+ behavior, a specification of its mathematical meaning, and a
+ mechanism for applying the algorithm to actual
+ expressions. Achieving these goals requires the ability to
+ integrate reasoning about the synta of the expressions with
+ reaoning about what the expressions mean. A syntax framework is a
+ mathematical structure that is an abstract model for a syntax
+ reasoning system. It contains a mapping of expressions to
+ syntactic values that represent the syntactic structures of the
+ expressions; a language for reasoning about syntactic values; a
+ quotation mechanism to refer to the syntactic value of an
+ expression; and an evaluation mechanism to refer to the value of
+ the expression represented by a syntactic value. We present and
+ compare two approaches, based on instances of a syntax framework,
+ to formalize a syntax-based mathematical algorithm in a formal
+ theory $T$. In the first approach the syntactic values for the
+ expressions manipulated by the algorithm are members of an
+ inductive type in $T$, but quotation and evaluation are functions
+ defined in the matatheory of $T$. In the second approach every
+ expression in $T$ is represented by a syntactic value, and
+ quotation and evalution are operators in $T$ itself.",
+ paper = "Farm13a.pdf"
+}
+
+\end{chunk}
+
+\index{Heras, Jonathan}
+\index{Komendantskaya, Ekaterina}
+\begin{chunk}{axiom.bib}
+@article{Hera13,
+ author = "Heras, Jonathan and Komendantskaya, Ekaterina",
+ title = {{ML4PG in Computer Algebra Verification}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "ML4PG is a machine-learning extension that provides statical
+ proof hints during the process of Coq/SSReflect proof
+ development. In this paper, we use ML4PG to find proof patterns in
+ the CoqEAL library -- a library that was devised to verify the
+ correctness of Computer Algebra algorithms. In particular, we use
+ ML4PG to help us in the formalisation of an efficient algorithm to
+ computer the inverse of triangular matrices.",
+ paper = "Hera13.pdf"
+}
+
+\end{chunk}
+
+\index{Kohlhase, Michael}
+\index{Mance, Felix}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Kohl13,
+ author = "Kohlhase, Michael and Mance, Felix and Rabe, Florian",
+ title = {{A Universal Machine for Biform Theory Graphs}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "Broadly speaking, there are two kinds of semantics-aware
+ assistant systems for mathematics: proof assistants express the
+ semantic in logic and emphasize deduction, and computer algebra
+ systems express the semantics in programming languages and
+ empahsize computation. Combining the complementary strengths of
+ both approaches while mending their complementary weaknesses has
+ been an important goal of the mechanized mathematics community for
+ some time.
+
+ We pick up on the idea of biform theories and interpret it in the
+ MMT/OMDOC framework which introduced the foundations-as-theories
+ approach, and can thus represent both logics and programming
+ languages as theories. This yields a formal, modular framework of
+ biform theory graphs which mixes specifications and implemenations
+ sharing the module system and typing information.
+
+ We present automated knowledge management work flows that
+ interface to existing specification/programming tools and enable
+ an OPENMATH Machine, that operationalizes biform theories,
+ evaluating expressions by exhaustively applying the
+ implementations of the respective operators. We evaluate the new
+ biform framework by adding implementations to the OPENMATH
+ standard content dictionaries.",
+ paper = "Kohl13.pdf"
+}
+
+\end{chunk}
+
+\index{Mahboubi, Assia}
+\begin{chunk}{axiom.bib}
+@article{Mahb13,
+ author = "Mahboubi, Assia",
+ title = {{The Rooster and the Butterflies}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "This paper describes a machine-checked proof of the Jorden-Holder
+ theorem for finite groups. This purpose of this description is to
+ discuss the representation of the elementary concepts of finite
+ group theory inside type theory. The design choices underlying
+ these representations were crucial to the successful formalization
+ of a complete proof of the Odd Order Theorem in the Coq system.",
+ paper = "Mahb13.pdf"
+}
+
+\end{chunk}
+
+\index{Tankink, Carst}
+\index{Kaliszyk, Cezary}
+\index{Urban, Josef}
+\index{Geuvers, Herman}
+\begin{chunk}{axiom.bib}
+@article{Tank13,
+ author = "Tankink, Carst and Kaliszyk, Cezary and Urban, Josef and
+ Geuvers, Herman",
+ title = {{Formal Mathematics on Display: A Wiki for Flyspeck}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "The AGORA system is a prototype ``Wiki for Formal Mathematics'',
+ with an aim to support developing and documenting large
+ formalizations of mathematics in a proof assistant. The functions
+ implemented in AGORA include in-browser editing, strong AI/ATP
+ proof advice, verification, and HTML rendering. The HTML rendering
+ contains hyperlinks and provides on-demand explanation of the
+ proof state for each proof step. In the present paper we show the
+ prototype Flyspeck Wiki as an instance of AGORA for HOL Light
+ formalizations. The wiki can be used for formalizations of
+ mathematics and for writing informal wiki pages about
+ mathematics. Such informal pages may contain islands of formal
+ text, which is used here for providing an initial cross-linking
+ between Hales's informal Flyspeck book, and the formal Flyspeck
+ development.
+
+ The AGORA platform intends to address distributed wiki-style
+ collaboration on large formalization projects, in particular both
+ the aspect of immediate editing, verification and rendering of
+ formal code, and the aspect of gradual and mutual refactoring and
+ correspondence of the initial informal text and its
+ formalization. Here, we highlight these features with the Flyspeck
+ Wiki.",
+ paper = "Tank13.pdf"
+}
+
+\end{chunk}
+
+\index{Asperti, Andrea}
+\index{Ricciotti, Wilmer}
+\begin{chunk}{axiom.bib}
+@article{Aspe12a,
+ author = "Asperti, Andrea and Ricciotti, Wilmer",
+ title = {{A Web Interface for Matita}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ paper = "Aspe12a.pdf"
+}
+
+\end{chunk}
+
+\index{Alama, Jesse}
+\index{Mamane, Lionel}
+\index{Urban, Josef}
+\begin{chunk}{axiom.bib}
+@article{Alam12,
+ author = "Alama, Jesse and Mamane, Lionel and Urban, Josef",
+ title = {{Dependencies in Formal Mathematics: Applications and
+ Extration for Coq and Mizar}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Two methods for extracting detailed formal dependencies from the
+ Coq and Mizar system are presented and compared. The methods are
+ used for dependency extraction from two large mathematical
+ repositories: the Coq Repository at Nijmegen and the Mizar
+ Mathematical Library. Several applications of the detailed
+ dependency analysis are described and proposed. Motivated by the
+ different applications, we discuss the various kinds of
+ dependencies that we are interested in, and the suitability of
+ various dependency extraction methods.",
+ paper = "Alam12.pdf"
+}
+
+\end{chunk}
+
+\index{Hetzl, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Hetz12,
+ author = "Hetzl, Stefan",
+ title = {{Project Presentation: Algorithmic Structuring and
+ Compression of Proofs (ASCOP)}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Computer-generated proofs are typically analytic, i.e. they
+ essentially consist only of formulas which are present in the
+ theorem that is shown. In contrast, mathematical proffs written by
+ humans almost never are: they are highly structured due to the use
+ of lemmas.
+
+ The ASCOP project aims at developing algorithms and software which
+ structure and abbreviate analytic proofs by computing useful
+ lemmas. These algorithms will be based on recent groundbreaking
+ results establishing a new connection between proof theory and
+ formal language theory. This connecion allows the application of
+ efficient algorithms based on formal grammars to structure and
+ compress proofs.",
+ paper = "Hetz12.pdf"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@misc{Gapt19,
+ author = "Unknown",
+ title = {{GAPT: General Architecture for Proof Theory}},
+ year = "2019",
+ link = "\url{https://www.logic.at/gapt/downloads/gapt-user-manual.pdf}",
+ paper = "Gapt19.pdf"
+}
+
+\end{chunk}
+
+\index{Horozal, Fulya}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Horo12,
+ author = "Horozal, Fulya and Kohlhase, Michael and Rabe, Florian",
+ title = {{Extending MKM Formats at the Statement Level}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Successful representation and markup languages find a good
+ balance between giving the user freedom of expression, enforcing
+ the fundamental semantic invariants of the modeling framework, and
+ allowing machine support for the underlying semantic
+ structures. MKM formats maintain strong invariants while trying to
+ be foundationally unconstrained, which makes the induced design
+ problem particularly challenging.
+
+ In this situation, it is standard practice to define a minimal
+ core language together with a scripting/macro facility for
+ syntactic extensions that map into the core language. In practice,
+ such extension facilities are either fully unconstrained (making
+ invariants and machine support difficult) or limited to the object
+ level (keeping the statement and theory levels fixed).
+
+ In this paper we develop a general methodology for extending MKM
+ representation formats at the statement level. We show the utility
+ (and indeed necessity) of statement-level extensions by
+ redesigning the OMDoc format into a minimal, regular core language
+ (strict OMDoc) and an extension (pragmatic OMDoc) that maps into
+ strict OMDoc.",
+ paper = "Horo12.pdf"
+}
+
+\end{chunk}
+
+\index{Iancu, Mihnea}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Ianc12,
+ author = "Iancu, Mihnea and Rabe, Florian",
+ title = {{Management of Change in Declarative Languages}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Due to the high degree of interconnectedness of formal
+ mathematical statements and theories, human authors often have
+ difficulties anticipating and tracking the effects of a change in
+ large bodies of symbolic mathematical knowledge. Therefore, the
+ automation of change management is desirable. But while computers
+ can in principle detect and propagate changes automatically, this
+ process must take the semantics of the underlying mathematical
+ formalism into account. Therefore, concrete management of change
+ solutions are difficult to realize.
+
+ The MMT language was designed as a generic declarative language
+ that captures universal structural features while avoiding a
+ commitment to a particular formalism. Therefore, it provides a
+ promising framework for the systematic study of changes in
+ declarative languages. We leverage this framework by providing a
+ generic change management solution at the MMT level, which can be
+ instantiated for arbitrary specific languages.",
+ paper = "Ianc12.pdf"
+}
+
+\end{chunk}
+
+\index{Wilson, David J.}
+\index{Bradford, Russell J.}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Wils12,
+ author = "Wilson, David J. and Bradford, Russell J. and
+ Davenport, James H.",
+ title = {{Speeding Up Cylindrical Algebraic Decomposition by
+ Groebner Bases}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Groebner Bases and Cylindrical Algebraic Decomposition are
+ generally thought of as two, rather different, methods of looking
+ at systems of equations and, in the case of Cylindrical Algebraic
+ Decomposition, inequalities. However, even for a mixed system of
+ equalities and inequalities, it is possible to apply Groebner
+ bases to the (conjoined) equalities before invoking CAD. We see
+ that this is, quite often but not always, a beneficial
+ preconditioning of the CAD problem.
+
+ It is also possible to precondition the (conjoined) inequalities
+ with respect to the equalities, and this can also be useful in
+ many cases.",
+ paper = "Wils12.pdf"
+}
+
+\end{chunk}
+
+\index{Horozal, Fulya}
+\index{Iacob, Alin}
+\index{Jucovschi, Constantin}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Horo11,
+ author = "Horozal, Fulya and Iacob, Alin and Jucovschi, Constantin
+ and Kohlhase, Michael and Rabe, Florian",
+ title = {{Combining Source, Content, Presentation, Narration, and
+ Relational Presentation}},
+ journal = "LNCS",
+ volume = "6824",
+ year = "2011",
+ abstract =
+ "In this paper, we try to bridge the gap between different
+ dimensions / incarnations of mathematical knowledge: MKM
+ representation formats (content), their human-oriented languages
+ (source, presentation), their narrative linearizations
+ (narration), and relational presentations used in the semantic
+ web. The central idea is to transport solutions from software
+ engineering to MKM regarding the parallel interlinked maintenance
+ of the different incarnations. We show how the integration of
+ these incarnations can be utilized to enrich the authoring and
+ viewing processes, and we evaluate our infrastructure on the LATIN
+ Logic Atlas, a modular library of logic formalizations, and a set
+ of computer science lecture notes written in STEX -- a modular,
+ semantic variant of LATEX.",
+ paper = "Horo11.pdf"
+}
+
+\end{chunk}
+
+\index{Krebbers, Robbert}
+\index{Spitters, Bas}
+\begin{chunk}{axiom.bib}
+@article{Kreb11,
+ author = "Krebbers, Robbert and Spitters, Bas",
+ title = {{Computer Certified Efficient Exact Reals in Coq}},
+ journal = "LNCS",
+ volume = "6824",
+ year = "2011",
+ abstract =
+ "Floating point operations are fast, but require continuous effort
+ on the part of the user in order to ensure that the results are
+ correct. This burden can be shifted away from the user by
+ providing a library of exact analysis in which the computer
+ handles the error estimates. We provide an implementaiton of the
+ exact real numbers in the Coq proof assistant. This improves on
+ the earlier Coq-implementation by O'Connor in two ways: we use
+ dyadic rationals built from the machine integers and we optimize
+ computation of power series by using approximate
+ division. Moreover, we use type classes for clean mathematical
+ interfaces. This appears to be the first time that type classes
+ are used in heavy computation. We obtain over a 100 times speed up
+ of the basic operations and indications for improving the Coq system.",
+ paper = "Kreb11.pdf"
+}
+
+\end{chunk}
+
+\index{Rabe, Florian}
+\index{Kohlhase, Michael}
+\index{Coen, Claudio Sacerdoti}
+\begin{chunk}{axiom.bib}
+@article{Rabe11,
+ author = "Rabe, Florian and Kohlhase, Michael and Coen, Claudio Sacerdoti",
+ title = {{A Foundational View on Integration Problems}},
+ journal = "LNCS",
+ volume = "6824",
+ year = "2011",
+ abstract =
+ "The integration of reasoning and computation services across
+ system and language boundaries is a challenging problem of
+ computer science. In this paper, we use integration for the
+ scenario where we have two systems that we integrate by moving
+ problems and solutions between them. While this scenario is often
+ approached from an engineering perspective, we take a foundational
+ view. Based on the generic declarative language MMT, we develop a
+ theoretical framework for system integration using theories and
+ partial theory morphisms. Because MMT permits representations of
+ the meta-logical foundations themselves, this includes integration
+ across logics. We discuss save and unsafe integration schemes and
+ devise a general form of safe integration.",
+ paper = "Rabe11.pdf"
+}
+
+\end{chunk}
+
+\index{Asperti, Andrea}
+\index{Coen, Claudio Sacerdoti}
+\begin{chunk}{axiom.bib}
+@article{Aspe10a
+ author = "Asperti, Andrea and Coen, Claudio Sacerdoti",
+ title = {{Some Considerations on the Usability of Interactive Provers}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "In spite of the remarkable achievements recently obtained in the
+ field of mechanization of formal reasoning, the overall usability
+ of interactive provers does not seem to be sensibly improved since
+ the advent of the ``second generation'' of systems, in the mid of
+ the eighties. We try to analyze the reasons of such a slow
+ progress, pointing out the main problems and suggesting some
+ possible research directions.",
+ paper = "Aspe10a.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{Calmet, Jacques}
+\index{Campbell, John A.}
+\begin{chunk}{axiom.bib}
+@article{Calm10,
+ author = "Calmet, Jacques and Campbell, John A.",
+ title = {{A Revisited Perspective on Symbolic Mathematical Computing
+ and Artificial Intelligence}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "We provide a perspective on the current state and possible future
+ of links between symbolic mathematical computing and artificial
+ intelligence, on the occasion of the 10th biennial conference
+ (AISMC, later AISC) devoted to those connections. It follows a
+ similar perspective expressed for the first such conference in 1992
+ and then revised and expanded 5 years later. Issues related to the
+ computational management of mathematical knowledge are
+ highlighted.",
+ paper = "Calm10.pdf"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Sexton, Alan P.}
+\index{Sorge, Volker}
+\index{Watt, Stephen M.}
+\begin{chunk}{axiom.bib}
+@article{Care10a,
+ author = "Carette, Jacques and Sexton, Alan P. and Sorge, Volker and
+ Watt, Stephen M.",
+ title = {{Symbolic Domain Decomposition}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "Decomposing the domain of a function into parts has many uses in
+ mathematics. A domain may naturally be a union of pieces, a
+ function may be defined by cases, or different boundary conditions
+ may hold on different regions. For any particular problem the
+ domain can be given explicitly, but when dealing with a family of
+ problems given in terms of symbolic parameters, matters become
+ more difficult. This article shows how hybrid sets, that is
+ multisets allowing negative multiplicity, may be used to express
+ symbolic domain decompositions in an efficient, elegant and
+ uniform way, simplifying both computation and reasoning. We apply
+ this theory to the arithmetic of piecewise functions and symbolic
+ matrices and show how certain operations may be reduced from
+ exponential to linear complexity.",
+ paper = "Care10a.pdf"
+}
+
+\end{chunk}
+
+\index{Cohen, Cyril}
+\index{Mahboubi, Assia}
+\begin{chunk}{axiom.bib}
+@article{Cohe10,
+ author = "Cohen, Cyril and Mahboubi, Assia",
+ title = {{A Formal Quantifier Elimination for Algebraically Closed Fields}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "We prove formally that the first order theory of algebraically
+ closed fields enjoys quantifier elimination, and hence is
+ decidable. This proof is organized in two modular parts. We first
+ reify the first order theory of rings and prove that quantifier
+ elimination leads to decidability. Then we implement an algorithm
+ which constructs a quantifier free formula from any first order
+ formula in the theory of ring. If the underlying ring is in fact
+ an algebraically closed field, we prove that the two formulas have
+ the same semantic. The algorithm producing the quantifier free
+ formula is programmed in continuation passing style, which leads
+ to both a concise program and an elegant proof of semantics
+ correctness.",
+ paper = "Cohe10.pdf"
+}
+
+\end{chunk}
+
+\index{Dominguez, Cesar}
+\index{Rubio, Julio}
+\begin{chunk}{axiom.bib}
+@article{Domi10,
+ author = "Dominguez, Cesar and Rubio, Julio",
+ title = {{Computing in Coq with Infinite Algebraic Data Structures}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "Computational content encoded into constructive type theory
+ proofs can be used to make computing experiments over concrete
+ data structures. In this paper, we explore this possibility when
+ working in Coq with chain complexes of infinite type (that is to
+ say, generated by infinite sets) as a part of the formalization of
+ a hierarchy of homological algebra structures.",
+ paper = "Domi10.pdf"
+}
+
+\end{chunk}
+
+\index{Kerber, Manfred}
+\begin{chunk}{axiom.bib}
+@article{Kerb10,
+ author = "Kerber, Manfred",
+ title = {{Proofs, Proofs, Proofs, and Proofs}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "In logic there is a clear concept of what constitutes a proof and
+ what not. A proof is essentially defined as a finite sequence of
+ formulae which are either axioms or derived by proof rules from
+ formulae earlier in the sequence. Sociologically, however, it is
+ more difficult to say what should constitute a proof and what
+ not. In this paper we will look at different forms of proofs and
+ try to clarify the concept of proof in the wider meaning of the
+ term. This has implications on how proofs should be represented
+ formally.",
+ paper = "Kerb10.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Nederpelt, R.}
+\index{Kamareddine, F.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Nede01,
+ author = "Nederpelt, R. and Kamareddine, F.",
+ title = {{An Abstract Syntax for a Formal Language of Mathematics}},
+ booktitle = "4th int. Tbilisi Symp. on Language, Logic, and Computation",
+ publisher = "unknown",
+ year = "2001",
+ abstract =
+ "This paper provides an abstract syntax for a formal language of
+ mathematics. We call our language Weak Type Theory (abbreviated
+ WTT). WTT will be as faithful as possible to the mathematician's
+ language yet will be formal and will not allow ambiguities. WTT
+ can be used as an intermediatry between the natural language of
+ the mathematician and the formal language of the logician. As far
+ as we know, this is the first extensive formalization of an
+ abstract syntax of a formal language of mathematics.",
+ paper = "Nede01.pdf"
+}
+
+\end{chunk}
+
+\index{Barbeau, Edward J.}
+\begin{chunk}{axiom.bib}
+@book{Barb00,
+ author = "Barbeau, Edward J.",
+ title = {{Mathematical Fallacies, Flaws, and Flimflam}},
+ publisher = "American Mathematical Society",
+ year = "2000",
+ paper = "Barb00.pdf"
+}
+
+\end{chunk}
+
+\index{Baker, Josef B.}
+\index{Sexton, Alan P.}
+\index{Sorge, Volker}
+\begin{chunk}{axiom.bib}
+@article{Bake09,
+ author = "Baker, Josef B. and Sexton, Alan P. and Sorge, Volker",
+ title = {{A Linear Grammar Approach to Mathematical Formula
+ Recognition from PDF}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Many approaches have been proposed over the years for the
+ recognition of mathematical formulae from scanned documents. More
+ recently a need has arisen to recognise formulae from PDF
+ documents. Here we can avoid ambiguities introduced by traditional
+ OCR approaches and instead extract perfect knowledge of the
+ characters used in formulae directly from the document. This can
+ be exploited by formula recognition techniques to achieve correct
+ results and high performance.
+
+ In this paper we revist an old grammatical approach to formula
+ recognition, that of Anderson from 1968, and assess its
+ applicability with respect to data extracted from PDF
+ documents. We identify some problems of the original method when
+ applied to common mathematical expressions and show how they can
+ be overcome. The simplicity of the original method leads to a very
+ efficient recognition technique that not only is very simple to
+ implement but also yields results of high accuracy for the
+ recognition of mathematical formulae from PDF documents.",
+ paper = "Bake09.pdf"
+}
+
+\end{chunk}
+
+\index{Biha, Sidi Ould}
+\begin{chunk}{axiom.bib}
+@article{Biha09,
+ author = "Biha, Sidi Ould",
+ title = {{Finite Group Representation Theory with Coq}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Representation theory is a branch of algebra that allows the
+ study of groups through linear applications, i.e. matrices. Thus
+ problems in abstract groups can be reduced to problems on
+ matrices. Representation theory is the basis for character
+ theory. In this paper we present a formalization of finite groups
+ representation theory in the Coq system that includes a
+ formalization of Maschke's theorem on reducible finite group algebra.",
+ paper = "Biha09.pdf"
+}
+
+\end{chunk}
+
+\index{Calmet, Jacques}
+\begin{chunk}{axiom.bib}
+@article{Calm09,
+ author = "Calmet, Jacques",
+ title = {{Abstraction-Based Information Technology: A Framework for
+ Open Mechanized Reasoning}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "OMRS (Open Mechanized Reasoning Systems) was designed for
+ Automated Theorem Proving and then extended to Computer
+ Algebra. These are the two domains at the heart of the Calculemus
+ approach. An obvious question is to assess whether such an
+ approach can be extended to new domains either within AI or
+ outside of AI. There have been several attempts to turn the world
+ into a computational system. This talk stays away from such
+ general attempts and introduces a framework that is fully set
+ within AI. It extends the basic concepts of OMRS to diverse fields
+ ranging from information technology to sociology through law as
+ illustrated by examples. The main motivation is to claim that
+ whatever the selected approach, Artificial Intelligence is gaining
+ enough strength and power to reach new frontiers and to turn
+ challenges that are not a priori of a purely computational nature
+ into AI domains.",
+ paper = "Calm09.pdf"
+}
+
+\end{chunk}
+
+\index{Calude, Cristian S.}
+\index{Muller, Christine}
+\begin{chunk}{axiom.bib}
+@article{Calu09,
+ author = "Calude, Cristian S. and Muller, Christine",
+ title = {{Formal Proof: Reconciling Correctness and Understanding}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Hilbert's concept of formal proof is an ideal of rigour for
+ mathematics which has important applications in mathematical
+ logic, but seems irrelevant for the practice of mathematics. The
+ advent, in the last twenty years, of proof assistants was followed
+ by an impressive record of deep mathematical theorems formally
+ proved. Formal proof is practically achievable. With formal proof,
+ correctness reaches a standard that no pen-and-paper proof can
+ match, but an essential component of mathematics -- the insight
+ and understanding -- seems to be in short supply. So, what makes a
+ proof understandable? To answer this question we first suggest a
+ list of symptoms of understanding. We then propose a vision of an
+ environment in which users can write and check formal proofs as
+ well as query them with reference to the symptoms of
+ understanding. In this way, the environment reconciles the main
+ features of proof: correctness and understanding.",
+ paper = "Calu09.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\index{Kohlhase, Michael}
+\begin{chunk}{axiom.bib}
+@article{Dave09,
+ author = "Davenport, James H. and Kohlhase, Michael",
+ title = {{Unifying Math Ontologies: A Tale of Two Standards}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "One of the fundamental and seemingly simple aims of mathematical
+ knowledge management (MKM) is to develop and standardize formats
+ that allow to ``represent the meaning of the objects of
+ mathematics''. The open formats OpenMath and MathML address this,
+ but differ subtly in syntax, rigor, and structural viewpoints
+ (notably over calculus). To avoid fragmentation and smooth out
+ interoperability obstacles, effort is under way to align them into
+ a joint format OpenMath/MathML 3. We illustrate the issues that
+ come up in such an alignment by looking at three main areas: bound
+ variables and conditions, calculus (which relates to the previous)
+ and ``lifted'' n-ary operators.",
+ paper = "Dave09.pdf"
+}
+
+\end{chunk}
+
+\index{Heras, Jonathan}
+\index{Pascual, Vico}
+\index{Rubio, Julio}
+\begin{chunk}{axiom.bib}
+@article{Hera09,
+ author = "Heras, Jonathan and Pascual, Vico and Rubio, Julio",
+ title = {{Using Open Mathematical Documents to Interface Computer
+ Algebra and Proof Assistant Systems}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Mathematical Knowledge can be encoded by means of Open
+ Mathematical Documents (OMDoc) to interface both Computer Algebra
+ and Proof Assistant systems. In this paper, we show how a unique
+ OMDoc structure can be used to dynamically generate, both a
+ Graphical User Interface for a Computer Algebra system and a
+ script for a Proof Assistant. This generic approach has been made
+ concrete through a first prototype interfacing the Kenzo Computer
+ Algebra system with the ACL2 Theorem Prover, both based on the
+ Common Lisp programming language. An OMDoc repository has been
+ developed allowing the user to customize the application in an
+ easy way.",
+ paper = "Hera09.pdf"
+}
+
+\end{chunk}
+
+\index{Rich, Albert D.}
+\index{Jeffrey, David J.}
+\begin{chunk}{axiom.bib}
+@article{
+ author = "Rich, Albert D. and Jeffrey, David J.",
+ title = {{A Knowledge Repository for Indefinite Integration
+ Based on Transformation Rules}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract = "
+ Taking the specific problem domain of indefinite integration, we
+ describe the on-going development of a repository of mathematical
+ knowledge based on transformation rules. It is important that the
+ repository be not confused with a look-up table. The database of
+ transformation rules is at present encoded in Mathematica, but this is
+ only one convenient form of the repository, and it could be readily
+ translated into other formats. The principles upon which the set of
+ rules is compiled is described. One important principle is
+ minimality. The benefits of the approach are illustrated with
+ examples, and with the results of comparisons with other approaches.",
+ paper = "Rich09.pdf"
+}
+
+\end{chunk}
+
+\index{Watt, Stephen M.}
+\begin{chunk}{axiom.bib}
+@article{Watt09,
+ author = "Watt, Stephen M.",
+ title = {{Algorithms for the Functional Decomposition of Laurent
+ Polynomials}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Recent work has detailed the conditions under which univariate
+ Laurent polynomials have functional decompositions. This paper
+ presents algorithms to compute such univariate Laurent polynomial
+ decompositions efficiently and gives their multivariate
+ generalizations.
+
+ One application of functiona decomposition of Laurent polynomials
+ is the functional decomposition of so-called ``symbolic
+ polynomials''. These are polynomial-like objects whose exponents
+ are themselves integer-valued polynomials rather than
+ integers. The algebraic independence of $X%, $X^n$, $X^{n^2/2}$,
+ etc., and some elementary results on integer-valued polynomials
+ allow problems with symbolic polynomials to be reduced to problems
+ with multivariate Laurent polynomials. Hence we are interested in
+ the functional decomposition of these objects.",
+ paper = "Watt09.pdf"
+}
+
+\end{chunk}
+
+\index{Aspinall, David}
+\index{Denney, Ewen}
+\index{Luth, Christoph}
+\begin{chunk}{axiom.bib}
+@article{Aspi08,
+ author = "Aspinall, David and Denney, Ewen and Luth, Christoph",
+ title = {{A Tactic Language for Hiproofs}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "We introduce and study a tactic language, Hitac, for constructing
+ hierarchical proofs, known as hiproofs. The idea of hiproofs is to
+ superimpose a labelled hierarchical nesting on an ordinary proof
+ tree. The labels and nesting are used to describe the organisation
+ of the proof, typically relating to its construction process. This
+ can be useful for understanding and navigating the proof. Tactics
+ in our language construct hiproof structure together with an
+ underlying proof tree. We provide both a big-step and a small-step
+ operational semantics for evaluating tactic expressions. The
+ big-step semantics captures the intended meaning, whereas the
+ small-step semantics hints at possible implementations and
+ provides a unified notion of proof state. We prove that these
+ notions are equivalent and construct valid proofs.",
+ paper = "Aspi08.pdf"
+}
+
+\end{chunk}
+
+\index{Backeljauw, Franky}
+\index{Becuwe, Stefan}
+\index{Cuyt, Annie}
+\begin{chunk}{axiom.bib}
+@article{Back08,
+ author = "Backeljauw, Franky and Becuwe, Stefan and Cuyt, Annie",
+ title = {{Validated Evaluation of Special Mathematical Functions}}
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Because of the importance of special functions, several books and
+ a large collection of papers have been devoted to the numerical
+ computation of these functions, the most well-known being the
+ Abramowitz and Stegun handbook. But up to this date, no
+ environment offers routines for the provable correct evaluation of
+ these special functions.
+
+ We point out how series and limit-periodic continued fraction
+ representation of the functions can be helpful in this
+ respect. Our scalable precision technique is mainly based on the
+ use of sharpened a priori truncation and round-off error upper
+ bounds, in case of real arguments. The implementation is validated
+ in the sense that it returns a sharp interval enclosure for the
+ requested function evaluation, at the same cost as the evaluation.",
+ paper = "Back08.pdf"
+}
+
+\end{chunk}
+
+\index{Bouche, Thierry}
+\begin{chunk}{axiom.bib}
+@article{Bouc08,
+ author = "Bouche, Thierry",
+ title = {{Digital Mathematics Libraries: The Good, the Bad, the Ugly}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "The mathematicians' Digital mathematics library (DML), which is
+ not to be confused with libraries of mathematical objects
+ represented in some digital format, is the generous idea that all
+ mathematics ever published should end up in digital form so that
+ it would be more easily referenced, accessible, usable. This
+ concept was formulated at the very beginning of this century, and
+ yielded a lot of international activity that culminated around
+ years 2002-2005. While it is estimated that a substantial part of
+ the existing math literature is already available in some digital
+ format, nothing looking like one digital mathematics library has
+ emerged, but a multiplicity of competing electronic offers, with
+ unique standards, features, business models, access policies,
+ etc. -- even though the contents themselves overlap somewhat,
+ while leaving wide areas untouched. The millenium's applealing
+ idea has become a new Tower of Babel.
+
+ It is not obvious how much of the traditional library functions we
+ should give up while going digital. The point of view shared by
+ many mathematicians is that we should be able to find a reasonable
+ archiving policy fitting all stakeholders, allowing to translate
+ the essential features of the past library system -- which is the
+ central infrastructure of all math departments worldwide -- in the
+ digital paradigm, while enhancing overall performances thanks to
+ dedicated information technology.
+
+ The vision of this library is rather straightforward: a third
+ party to the academic publishing system, preserving, indexing, and
+ keeping current its digital collections through a distributed
+ network of partners curating the physical holdings, and a
+ centralized access facility making use of innovative mining and
+ interlinking techniques for easy navigation and discovery.
+
+ However, the fragmentation level is so high that the hope of a
+ unique portal providing seamless access to everything relevant to
+ mathematical research seems now completely out of reach.
+ Nevertheless, we have lessons to learn from each one of the
+ already numerous projects running. One of them is that there are
+ too many items to deal with, and too many different inital choices
+ over metadata sets and formats: it won't be possible to find a
+ nontrivial greatest common divisor coping with everything already
+ available, and manual upgrading is highly improbable.
+
+ This is where future management techniques for loosely formalised
+ mathematical knowledge could provide a new impetus by at last
+ enabling a minimum set of features across projects borders through
+ automated procedures. We can imagine e.g. math-aware OCR on
+ scanned pages, concurrently with interpreters of electronic
+ sources of born digital texts, both producing searchable full
+ texts in a compatible semistructured format. The challenge is
+ ultimately to take advantage of the high formalisation of
+ mathematical texts rather than merely ignoring it!
+
+ With these considerations in mind, the talk will focus on
+ achievements, limitations, and failures of existing digital
+ mathematics libraries, taking the NUMDAM and CEDRAM programs as
+ principal examples, hence the speaker himself is the target.",
+ paper = "Bouc08.pdf"
+}
+
+\end{chunk}
+
+\index{Dominguez, Cesar}
+\begin{chunk}{axiom.bib}
+@article{Domi08,
+ author = "Dominguez, Cesar",
+ title = {{Formalizing in Coq Hidden Algebras to Specify Symbolic
+ Computation Systems}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "This work is an attempt to formalize, using the Coq proof
+ assistant, the algebraic specification of the data structures
+ appearing in two symbolic computation systems for algebraic
+ topology called EAT and Kenzo. The specification of these
+ structures have been obtained through an operation, called imp
+ operation, between different specification frameworks as standard
+ algebraic specifications and hidden specifications. Resuing
+ previous Coq implementations of universal algebra and category
+ theory we have proposed a Coq formalization of the imp operation,
+ extending the representation to the particular hidden algebras
+ which take part in this operation.",
+ paper = "Domi08.pdf"
+}
+
+\end{chunk}
+
+\index{Kohlhase, Michael}
+\index{Muller, Christine}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Kohl08a,
+ author = "Kohlhase, Michael and Muller, Christine and Rabe, Florian",
+ title = {{Notations for Living Mathematical Documents}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Notations are central for understanding mathematical
+ discourse. Readers would like to read notations that transport the
+ meaning well and prefer notations that are familiar to
+ them. Therefore, authors optimze the choice of notations with
+ respect to these two criteria, while at the same time trying to
+ remain consistent over the document and their own prior
+ publications. In print media where notations are fixed at
+ publication time, this is an over-constrained problem. In living
+ documents notations can be adapted at reading time, taking reader
+ preferences into account.
+
+ We present a representational infrastructure for notations in
+ living mathematical documents. Mathematical notations can be
+ defined declaratively. Author and reader can extensionally define
+ the set of available notation definitions at arbitrary document
+ levels, and they can guide the notation selection function via
+ intensional annotations.
+
+ We give an abstract specification of notation definitions and the
+ flexible rendering algorithms and show their coverage on
+ paradigmatic examples. We show how to use this framework to render
+ OPENMATH and Content-MathML to Presentation-MathML, but the
+ approach extends to arbitrary content and presentation formats. We
+ discuss prototypical implementations of all aspects of the
+ rendering pipeline.",
+ paper = "Kohl08a.pdf"
+}
+
+\end{chunk}
+
+\index{Stratford, Jonathan}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Stra08,
+ author = "Stratford, Jonathan and Davenport, James H.",
+ title = {{Unit Knowledge Management}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "In 9, various observations on the handling of (physical) units in
+ OpenMath were made. In this paper, we update those observations,
+ and make some comments based on a working unit converter that,
+ because of its OpenMath-based design, is modular, extensible, and
+ reflective. We also note that some of the issues in an effective
+ converter, such as the rules governing abbreviations, being more
+ linguistic than mathematical, do not lend themselves to easy
+ expression in OpenMath.",
+ paper = "Stra08.pdf"
+}
+
+\end{chunk}
+
+\index{Andres, Mirian}
+\index{Lamban, Laureano}
+\index{Rubio, Julio}
+\begin{chunk}{axiom.bib}
+@article{Andr07,
+ author = "Andres, Mirian and Lamban, Laureano and Rubio, Julio",
+ title = {{Executing in Common Lisp, Proving in ACL2}},
+ journal = "LNCS",
+ volume = "4573",
+ year = "2007",
+ abstract =
+ "In this paper, an approach to integrate an already-written Common
+ Lisp program for algebraic manipulation with ACL2 proofs of
+ properties of that program is presented. We report on a particular
+ property called ``cancellation theorem'', which has been proved in
+ ACL2, and could be applied to several problems in the field of
+ Computational Algebraic Topology.",
+ paper = "Andr07.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave07a,
+ author = "Davenport, James H.",
+ title = {{What Might ``Understand a Function'' Mean?}},
+ journal = "LNCS",
+ volume = "4573",
+ year = "2007",
+ abstract =
+ "Many functions in classical mathematics are largely defined in
+ terms of their derivatives, so Bessel's function is ``the''
+ solution of Bessel's equation, etc. For definiteness, we need to
+ add other properties, such as initial values, branch cuts,
+ etc. What actually makes up ``the definition'' of a function in
+ computer algebra? The answer turns out to be a combination of
+ arithmetic and analytic properties.",
+ paper = "Dave07a.pdf"
+}
+
+\end{chunk}
+
+\index{Asperti, Andrea}
+\index{Geuvers, Herman}
+\index{Loeb, Iris}
+\index{Mamane, Lionel Elie}
+\index{Coen, Claudio Sacerdoti}
+\begin{chunk}{axiom.bib}
+@article{Aspe06,
+ author = "Asperti, Andrea and Geuvers, Herman and Loeb, Iris and
+ Mamane, Lionel Elie and Coen, Claudio Sacerdoti",
+ title = {{An Interactive Algebra Course with Formalised Proofs and
+ Definitions}},
+ journal = "LNCS",
+ volume = "4108",
+ year = "2006",
+ abstract =
+ "We describe a case-study of the application of web technology to
+ create web-based didactic material out of a repository of formal
+ mathematics, using the structure of an existing course. The paper
+ discusses the difficulties related to associating notation to a
+ formula, the embedding of formal notions into a document (the
+ ``view''), and the rednering of proofs.",
+ paper = "Aspe06.pdf"
+}
+
+\end{chunk}
+
+\index{Cairns, Paul}
+\index{Gow, Jeremy}
+\begin{chunk}{axiom.bib}
+@article{Cair06,
+ author = "Cairns, Paul and Gow, Jeremy",
+ title = {{Literate Proving: Presenting and Documenting Formal Proofs}},
+ journal = "LNCS",
+ volume = "4108",
+ year = "2006",
+ abstract =
+ "Literate proving is the analogue for literate programming in the
+ mathematical realm. That is, the goal of literate proving is for
+ humans to produce clear expositions of formal mathematics that
+ could even be enjoyable for people to read whilst remaining
+ faithful representations of the actual proofs. This paper
+ describes maze, a generic literate proving system. Authors markup
+ formal proof files, such as Mizar files, with arbitrary XML and
+ use maze to obtain the selected extracts and transform them for
+ presentation, e.g. as Latex. To aid its use, maze has built in
+ transformations that include pretty printing and proof sketching
+ for inclusion in latex documents. These transformations challenge
+ the concept of faithfulness in literate proving but it is argued
+ that this should be a distinguishing feature of literate proving
+ from literate programming.",
+ paper = "Cair06.pdf"
+}
+
+\end{chunk}
+
+\index{Raja, Amar}
+\index{Rayner, Matthew}
+\index{Sexton, Alan}
+\index{Sorge, Volker}
+\begin{chunk}{axiom.bib}
+@article{Raja06,
+ author = "Raja, Amar and Rayner, Matthew and Sexton, Alan and
+ Sorge, Volker",
+ title = {{Towards a Parser for Mathematical Formula Recognition}},
+ journal = "LNCS",
+ volume = "4108",
+ year = "2006",
+ abstract =
+ "For the transfer of mathematical knowledge from paper to
+ electronic form, the reliable automatic analysis and understanding
+ of mathematical texts is crucial. A robust system for this task
+ needs to combine low level character recognition with higher level
+ structural analysis of mathematical formulas. We present progress
+ towards this goal by extending a database-driven optical character
+ recognition system for mathematics with two high level analysis
+ features. One extends and enhances the traditional approach of
+ projection profile cutting. The second aims at integrating the
+ recognition process with graph grammar rewriting by giving support
+ to the interactive construction and validation of grammar
+ rules. BOth approaches can be successfully employed to enhance the
+ capabilities of our system to recognise and reconstruct compound
+ mathematical expressions.",
+ paper = "Raja06.pdf"
+}
+
+\end{chunk}
+
+\index{Adams, Andrew A.}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Adam04,
+ author = "Adams, Andrew A. and Davenport, James H.",
+ title = {{Copyright Issues for MKM}},
+ journal = "LNCS",
+ volume = "3119",
+ year = "2004",
+ abstract =
+ "We present an overview of the current situation and recent and
+ expected future developments in areas of copyright law and
+ economics relevant to Mathematical Knowledge Management.",
+ paper = "Adam04.pdf"
+}
+
+\end{chunk}
+
+\index{Carlisle, David}
+\index{Dewar, Mike}
+\begin{chunk}{axiom.bib}
+@article{Carl03,
+ author = "Carlisle, David and Dewar, Mike",
+ title = {{NAG Library Documentation}},
+ journal = "LNCS",
+ volume = "2594",
+ year = "2003",
+ abstract =
+ "This paper describes the management and evolution of a large
+ collection of 1200 documents detailing the functionality in NAG
+ Library products.
+
+ This provides a case study addressing many of the issues which
+ concern the ``MKM'' project, involving conversion of legacy
+ formats (SGML and Latex) to XML, and inferring semantic content
+ from mainly presentational mathematical expressions.",
+ paper = "Carl03.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave03,
+ author = "Davenport, James H.",
+ title = {{MKM from Book to Computer: A Case Study}},
+ journal = "LNCS",
+ volume = "2594",
+ year = "2003",
+ abstract =
+ "[2] is one of the great mathematical knowledge
+ repositories. Nevertheless, it was written for a different era,
+ and for human readership. In this paper, we describe the sorts of
+ knowledge in one chapter (elementary transcendental functions) and
+ the difficulties in making this sort of knowledge formal. This
+ makes us ask questions about the nature of a Mathematical
+ Knowledge Repository, and whether a database is enough, or whether
+ more ``intelligence'' is required.",
+ paper = "Dave03.pdf"
+}
+
+\end{chunk}
+
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@article{Wied03b,
+ author = "Wiedijk, Freek",
+ title = {{Comparing Mathematical Provers}},
+ journal = "LNCS",
+ volume = "2594",
+ year = "2003",
+ abstract =
+ "We compare fifteen systems for the formalizatioin of mathematics
+ with a computer. We present several tables that list various
+ properties of these programs. The three main dimensions on which
+ we compare these systems are: the size of their library, the
+ strength of their logic and their level of automation.",
+ paper = "Wied03b.pdf"
+}
+
+\end{chunk}
+
+\index{Deplagne, Eric}
+\index{Kirchner, Claude}
+\begin{chunk}{axiom.bib}
+@article{Depl02,
+ author = "Deplagne, Eric and Kirchner, Claude",
+ title = {{Deduction versus Computation: The Case of Induction}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "The fundamental difference and the essential complementarity
+ between computation and deduction are central in computer algebra,
+ automated deduction, proof assistants and in frameworks making
+ them cooperating. In this work we show that the fundamental proof
+ method of induction can be udnerstood and implemented as either
+ computation or deduction.
+
+ Inductive proofs can be built either explicitly by making use of
+ an induction principle or implicitly by using the so-called
+ induction by rewriting and inductionless induction methods. When
+ mechanizing proof construction, explicit induction is used in
+ proof assistants and implicit induction is used in rewrite based
+ automated theorem provers. The two approaches are clearly
+ complementary but up to now there was no framework able to
+ encompass and to understand uniformly the two methods. In this
+ work, we propose such an approach based on the general notion of
+ deduction modulo. We extend slightly the original version of the
+ deduction modulo framework and we provide modularity properites
+ for it. We show how this applies to a uniform understanding of the
+ so called induction by rewriting method and how this relates
+ directly to the general use of the induction principle.",
+ paper = "Depl02.pdf"
+}
+
+\end{chunk}
+
+\index{Meier, Andreas}
+\index{Sorge, Volker}
+\index{Colton, Simon}
+\begin{chunk}{axiom.bib}
+@article{Meie02,
+ author = "Meier, Andreas and Sorge, Volker and Colton, Simon",
+ title = {{Employing Theory Formationi to Guide Proof Planning}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "The invention of suitable concepts to characterise mathematical
+ structures is one of the most challenging tasks for both human
+ mathematicians and automated theorem provers alike. We present an
+ approach where automatic concept formation is used to guide
+ non-isomorphism proofs in the residue class domain. The main idea
+ behind the proof is to automatically identify discriminants for
+ two given structures to show that they are not
+ isomorphic. Suitable discriminants are generated by a theory
+ formation system; the overall proof is constructe by a proof
+ planner with the additional support of traditional automated
+ theorem provers and a computer algebra system.",
+ paper = "Meie02.pdf"
+}
+
+\end{chunk}
+
+\index{Sturm, Thomas}
+\begin{chunk}{axiom.bib}
+@article{Stur02,
+ author = "Sturm, Thomas",
+ title = {{Integration of Quantifier Elimination with Constraint
+ Logic Programming}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "We examine the potential of an extension of constraint logic
+ programming, where the admissible constraints are arbitrary
+ first-order formulas over some domain. Constraint solving is
+ realized by effective quantifier elimination. The arithmetic is
+ always exact. We descrbe the conceptual advantages of our approach
+ and the capabilities of the current implementation
+ CLP(RL). Supported domains are $\mathbb{R}$, $\mathbb{C}$, and
+ $\mathbb{Q}_p$. For our discussion here we restrict to $\mathbb{R}$.
+ paper = "Stur02.pdf"
+}
+
+\end{chunk}
+
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Rabe12,
+ author = "Rabe, Florian",
+ title = {{Q Query Language for Formal Mathematical Libraries}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "One of the most promising applications of mathematical knowledge
+ management is search: Even if we restrict attention to the tiny
+ fragment of mathematics that has been formalized, the amount
+ exceeds the comprehension of an individual human.
+
+ Based on the generic representation language MMT, we introduce the
+ mathematical query langauge QMT: It combines simplicity,
+ expressivity, and scalability while avoiding a commitment to a
+ particular logical formalism. QMT can integrate various search
+ paradigms such as unification, semantic web, or XQuery style
+ queries, and QMT queries can span different mathematical
+ libraries.
+
+ We have implemented QMT as a part of the MMT API. This combination
+ provides a scalable indexing and query engine that can be readily
+ applied to any library of mathematical knowledge. While our focus
+ here is on libraries that are available in a content markup
+ language, QMT naturally extends to presentation and narration
+ markup languages.",
+ paper = "Rabe12.pdf"
+}
+
+\end{chunk}
+
+\index{Amin, Nada}
+\index{Rompf, Tiark}
+\begin{chunk}{axiom.bib}
+@inproceedings{Amin18,
+ author = "Amin, Nada and Rompf, Tiark",
+ title = {{Collapsing Towers of Interpreters}},
+ booktitle = "Principles of Programming Languages",
+ year = "2018",
+ publisher = "ACM",
+ abstract =
+ Given a tower of interpreters, i.e., a sequence of multiple
+ interpreters interpreting one another as input programs, we aim to
+ collapse this tower into a compiler that removes all interpretive
+ overhead and runs in a single pass. In the real world, a use case
+ might be Python code executed by an x86 runtime, on a CPU emulated
+ in a JavaScript VM, running on an ARM CPU. Collapsing such a tower
+ can not only exponentially improve runtime performance, but also
+ enable the use of base language tools for interpreted programs,
+ e.g. for analysis and verification. In this paper, we lay the
+ foundations in an idealized but realistic setting.
+
+ We present a multi-level lambda calculus that features staging
+ constructs and stage polymorphism: based on runtime parameters, an
+ evaluator either executes source code (thereby acting as an
+ interpreter) or generates code (thereby acting as a compiler). We
+ identify stage polymorphism, a programming model from the domain
+ of high-performance program generators, as the key mechanism to
+ make such interpreters compose in a collapsible way.
+
+ We present Pink, a meta-circular Lisp-like evaluator on top of
+ this calculus, and demonstrate that we can collapse arbitrarily
+ many levels of self-interpretation, including levels with
+ semantics modifications. We discuss several examples: compiling
+ regular expressions through an interpreter to base code, building
+ program transformers from modified interpreters, and others. We
+ develop these ideas further to include reflection and reification,
+ culminating in Purple, a reflective language inspired by Brown,
+ Blond, and Black, which realizes a conceptually infinite tower,
+ where every aspect of the semantics can change
+ dynamically. Addressing an open challenge, we show how user
+ programs can be compiled and recompiled under user-modified
+ semantics.",
+ paper = "Amin18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Ritt, Joseph Fels}
+\begin{chunk}{axiom.bib}
+@book{Ritt48,
+ author = {{Ritt, Joseph Fels}},
+ title = {{Integration in Finite Terms}},
+ publisher = "Columbia University Press, New York",
+ year = "1948"
+}
+
+\end{chunk}
+
+\index{Conrad, Brian}
+\begin{chunk}{axiom.bib}
+@misc{Conr05,
+ author = "Conrad, Brian",
+ title = {{Impossibility Theorems for Elementary Integration}},
+ year = "2005",
+ link =
+ "\url{http://www2.maths.ox.ac.uk/cmi/library/academy/LectureNotes05/Conrad.pdf}",
+ abstract =
+ "Liouville proved that certain integrals, most famously
+ $\int{e^{-x^2}}~dx$, cannot be expressed in elementary terms. We
+ explain how to give precise meaning to the notion of integration
+ ``in elementary terms'', and we formulate Liouville's theorem
+ that characterizes the possible form of elementary
+ antiderivatives. Using this theorem, we deduce a practical
+ criterion for proving such impossibility results in special cases.
+
+ This criterion is illustrated for the Gaussian integral
+ $\int{e^{-x^2}}~dx$ from probability theory, the logarithmic
+ integral $\int{}~dt/log(t)$ from the study of primes, and
+ elliptic integrals. Our exposition is aimed at students who are
+ familiar with calculus and elementary abstract algebra (at the
+ level of polynomial rings $F(t)$ over a field $F$).",
+ paper = "Conr05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kiss, Csongor}
+\index{Eisenbach, Susan}
+\index{Field, Tony}
+\index{Jones, Simon Peyton}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kiss18,
+ author = "Kiss, Csongor and Eisenbach, Susan and Field, Tony and
+ Jones, Simon Peyton",
+ title = {{Higher-order Type-level Programming in Haskell}},
+ booktitle = "Proc. ACM Programming Languages",
+ year = "2018",
+ publisher = "ACM"
+ abstract =
+ "Type family applications in Haskell must be fully saturated. This
+ means that all type-level functions have to be first-order,
+ leading to code that is both messy and long winded. In tis paper
+ we detail an extension to GHC that removes this restriction. We
+ augment Haskell's existing type arrow, $\rightarrow$, with an
+ unmatchable arrow $\twoheadrightarrow$, that supports partial
+ application of type families without compromising soundness. A
+ soundness proof is provided. We show how the techniques described
+ can lead to substantial code-size reduction (circa 80\%) in the
+ type-level logic of commonly-used type-level libraries whilst
+ simultaneously improving code quality and readability.",
+ paper = "Kiss18.pdf"
+}
+
+\end{chunk}
+
+\index{Ostebee, Arnold}
+\index{Zorn, Paul}
+\begin{chunk}{axiom.bib}
+@article{Oste93,
+ author = "Ostebee, Arnold and Zorn, Paul",
+ title = {{Telegraphic Reviews}},
+ journal = "The American Mathematical Monthly",
+ volume = "100",
+ number = "8",
+ pages = "812-817",
+ year = "1993",
+ paper = "Oste93.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
diff --git a/src/axiom-website/patches.html b/src/axiom-website/patches.html
index 2ed6302..ba0e657 100644
--- a/src/axiom-website/patches.html
+++ b/src/axiom-website/patches.html
@@ -5996,6 +5996,8 @@ src/input/groebtest.input test Parisse paper examples

books/Newsletter.September85.pdf updated

20190526.01.tpd.patch
books/bookheader.tex add names to credit list

+20190527.01.tpd.patch
+books/multind.sty multiple index files in a book

--
1.9.1