From 0bb66a74fc5f23ebaf759e06d737d37f93df8aad Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Sat, 12 Oct 2019 07:24:36 0400
Subject: [PATCH] books/bookvolbib added references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Sane
\index{Benoit, Alexandre}
\index{Chyzak, Frederic}
\index{Darrasse, Alexis}
\index{Gregoire, Thomas}
\index{Koutschan, Christoph}
\index{Mezzarobba, Marc}
\index{Salvy Bruno}
\begin{chunk}{axiom.bib}
@misc{DDMF19,
author = "Benoit, Alexandre and Chyzak, Frederic and Darrasse, Alexis
and Gregoire, Thomas and Koutschan, Christoph and
Mezzarobba, Marc and Salvy Bruno",
title = {{Digital Dictionary of Mathematical Functions}}
year = "2019",
link = "\url{ddfm.msrinria.inria.fr/1.9.1/ddmf}",
abstract =
"Interactive site on Mathematical Functions with properties,
truncated expansions, numerical evaluations, plots, and more. The
functions currently presented are elementary functions with
special functions of a single variable. More functions  special
functions with parameters, orthogonal polynomials, sequences 
will be added with the project advances.",
paper = "DDMF19.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Stoutemyer, David R.}
\begin{chunk}{axiom.bib}
@misc{Stou12a,
author = "Stoutemyer, David R.",
title = {{Can the Eureqa Symbolic Regression Program, Computer
Algebra and Numerical Analysis help each other?}},
link = "\url{https://arxiv.org/pdf/1203.1023.pdf}",
year = "2012",
abstract =
"The free Eureqa program has recently received extensive press
praise. A representitive quote is
\begin{quote}
There are very clever 'thinking machines' in existence today,
such as Watson, the IBM computer that conquered {\sl Jeopardy!}
last year. But next to Eureqa, Watson is merely a glorified
search engine.
\end{quote}
The program is designed to work with noisy experimental data,
searching for then returning a set of result expressions that
attempt to optimally trade off conciseness with accuracy.
However, if the data is generated from a formula for which there
exists more concise equivalent formulas, sometimes some of the
candidate Eureqa expressions are one or more of those more concise
equivalents expressions. If not, perhaps one or more of the
returned Eureqa expressions might be a sufficiently accurate
approximation that is more concise than the given
formula. Moreover, when there is no known closed form expression,
the data points can be generated by numerical methods, enabling
Eureqa to find expressions that concisely fit those data points
with sufficient accuracy. In contrast to typical regression
software, the user does not have to explicitly or implicitly
provide a specific expression or class of expressions containing
unknown constants for the software to determine.
Is Eureqa useful enough in these regards to provide an additional
tool for experimental mathematics, computer algebra users and
numerical analysts? Yes, if used carefully. Can computer algebra
and numerical methods help Eureqa? Definitely.",
paper = "Stou12a.pdf"
}
\end{chunk}
\index{Barthe, Gilles}
\begin{chunk}{axiom.bib}
@article{Bart85,
author = "Barthe, Gilles",
title = {{Implicit Coercions in Type Systems}},
journal = "LNCS",
volume = "1158",
pages = "115",
year = "1985",
abstract =
"We propose a notion of pure type system with implicit
coercions. In our framework, judgements are extended with a
context of coerions $\Delta$ and the application rule is modified
so as to allow coercions to be left implicit. The setting supports
multiple inheritance and can be applied to all type theories with
$\Pi$types. One originality of our work is to propose a
computational interpretation of implict coercions. In this paper,
we demonstrate how this interpretation allows a strict control on
the logical properties of pure type systems with implicit coercions.",
paper = "Bart85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Barthe, Gilles}
\index{Ruys, Mark}
\index{Barendregt, Henk}
\begin{chunk}{axiom.bib}
@article{Bart85a,
author = "Barthe, Gilles and Ruys, Mark and Barendregt, Henk",
title = {{A TwoLevel Approach Towards Lean ProofChecking}},
journal = "LNCS",
volume = "1158",
pages = "1635",
year = "1985",
abstract =
"We present a simple and effective methodology for equational
reasoning in proof checkers. The method is based on a twolevel
approach distinguishing between syntax and semantics of
mathematical theories. The method is very general and can be
carried out in any system with inductive and oracle types. The
potential of our twolevel approach is illustrated by some
examples developed in Lego.",
paper = "Bart85a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Lou, Zhaohui}
\begin{chunk}{axiom.bib}
@article{Loux08,
author = "Lou, Zhaohui",
title = {{Coercions in a Polymorphic Type System}},
journal = "Math. Struct. in Comp. Science",
volume = "18",
pages = "729751",
year = "2008",
abstract =
"We incorporate the idea of coercive subtyping, a theory of
abbreviation for dependent type theories, into the polymorphic
type system in functional programming languages. The traditional
type system with letpolymorphism is extended with argument
coercions and function coercions, and a corresponding type
inference algorithm is presented and proved to be sound and complete.",
paper = "Loux08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Berger, U.}
\index{Schwichtenberg, H.}
\begin{chunk}{axiom.bib}
@article{Berg85,
author = "Berger, U. and Schwichtenberg, H.",
title = {{The Greatest Common Divisor: A Case Study for Program
Extraction from Classical Proofs}},
journal = "LNCS",
volume = "1158",
pages = "3646",
year = "1985",
paper = "Berg85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Lou, Zhaohui}
\begin{chunk}{axiom.bib}
@article{Loux12,
author = "Lou, Zhaohui",
title = {{Formal Semantics in Modern Type Theories with Coercive Semantics}},
journal = "Linguistics and Philosophy",
volume = "35",
pages = "491513",
year = "2012",
abstract =
"In the formal semantics based on modern type theories, common
nouns are interpreted as types, rather than as predicates on
entities as in Montague's semantics. This brings about important
advantages in linguistic interpretations but also leads to a
limitation of expressive power because there are fewer operations
on types as compared with those on predicates. The theory of
coercive subtyping adequately extends the modern type theories
and, as shown in this paper, plays a very useful role in making
type theories more expressive for formal semantics. It not only
gives a satisfactory solution to the basic problem of 'multiple
categorisations' caused by interpreting common nouns as types, but
provides a powerful formal framework to model interesting
linguistic phenomena such as copredication, whose formal treatment
has been found difficult in a Montagovian setting. In particular,
we show how to formally introduce dottypes in a type theory with
coercive subtyping and study some typetheoretic constructs that
provide useful representational tools for reference transfers and
multiple word meanings in formal lexical semantics.",
paper = "Loux12.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jedynak, Wojciech}
\index{Biernacka, Malgorzata}
\index{Biernacki, Dariusz}
\begin{chunk}{axiom.bib}
@inproceedings{Jedy13,
author = "Jedynak, Wojciech and Biernacka, Malgorzata and
Biernacki, Dariusz",
title = {{An Operational Foundation for the Tactic Language of Coq}},
booktitle = "Proc. 15th Symp. on Principles and Practices of
Declarative Programming",
publisher = "ACM",
pages = "2536",
year = "2013",
isbn = "9781450321549",
abstract =
"We introduce a semantic toolbox for Ltac, the tactic language of
the popular Coq proof assistant. We present three formats of
operational semantics, each of which has its use in the practice
of tactic programming: a bigstep specification in the form of
natural semantics, a model of implementation in the form of an
abstract machine, and a smallstep characterization of computation
in the form of reduction semantics. The three semantics are
provably equivalent and have been obtained via offtheshelf
derivation techniques of the functional correspondence and the
syntactic correspondence. We also give examples of Ltac programs
and discuss some of th eissues that the formal semantics help to
clarify.
With this work we hope to enhance the operational understanding of
Ltac as well as to set up a framework to reason about Coq scripts
and to build tools supporting tactic programming based on rigorous
semantics.",
paper = "Jedy13.pdf"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Ricciotti, Wilmer}
\index{Coen, Claudio Sacerdoti}
\index{Tassi, Enrico}
@article{Aspe12b
author = "Asperti, Andrea and Ricciotti, Wilmer and
Coen, Claudio Sacerdoti and Tassi, Enrico",
title = {{Formal Metatheory of Programming Languages in the Matita
Interactive Theorem Prover}},
journal = "Journal of Automated Reasoning",
volume = "49",
number = "3",
pages = "427451",
year = "2012",
abstract =
"This paper is a report about the use of Matita, an interactive
theorem prover under development at the University of Bologna, for
the solution of the POPLmark Challenges, part 1a. We provide three
different formalizations, including two direct solutions using
pure de Bruijn and locally nameless encodings of bound variables,
and a formalization using named variables, obtained by means of a
sound translation to the locally nameless encoding. According to
this experience, we also discuss some of the proof principles used
in our solutions, which have led to the development of a
generalized inversion tactic for Matita.",
paper = "Aspe12b.pdf",
keywords = "printed"
}
\end{chunk}
\index{Strecker, Martin}
\begin{chunk}{axiom.bib}
@phdthesis{Stre99,
author = "Strecker, Martin",
title = {{Construction and Deduction in Type Theories}},
school = "Unversitat Ulm",
year = "1999",
abstract =
"This dissertation is concerned with interactive proof
construction and automated proof search in type theories, in
particular the Calculus of Constructions and its subsystems.
Type theories can be conceived as expressive logics which combine
a functiona programming language, strong typing and a higherorder
logic. They are therefore a suitable formalism for specification
and verification systems. However, due to their expressiveness, it
is difficult to provide appropriate deductive support for type
theories. This dissertation first examines general methods for
proof construction in type theories and then explores how these
methods can be refined to yield proof search procedures for
specialized fragments of the language.
Proof development in type theories usually requires the
construction of a term having a given type in a given context. For
the term to be constructed, a {\sl metavariable} is introduced
which is successively instantiated in the course of the proof. A
naive use of metavariables leads to problems, such as
noncommutativity of reduction and instantiation and the
generation of illtyped terms during reduction. For solving these
problems, a calculus with {\sl explicit substitutions} is
introduced, and it is shown that this calculus preserves
properties such as strong normalisation and decidability of typing.
In order to obtain a calculus appropriate for proof search, the
usual natural deduction presentation of type theories is replaced
by a {\sl sequent style presentation}. It is shown that the
calculus thus obtained is correct with respect to the original
calculus. Completeness (proved with a cutelimination argument) is
shown for all predicative fragments of the lambda cube.
This dissertation concludes with a discussion of some techniques
that make proof search practically applicable, such as unification
and pruning of the proof search space by exploiting
impermutabilities of the sequent calculus.",
paper = "Stre99.pdf"
}
\end{chunk}
\index{Lou, Zhaohui}
\begin{chunk}{axiom.bib}
@phdthesis{Loux90,
author = "Lou, Zhaohui",
title = {{An Extended Calculus of Constructions}},
school = "University of Edinburgh",
year = "1990",
abstract =
"This thesis presents and studies a unifying theory of dependent
types ECC Extended Calculus of Constructions. ECC integrates
CoquandHuet's (impredicative) calculus of constructions and
MartinLof's (predicative) type theory with universes, and turns
out to be a strong and expressive calculus for formalization of
mathematics, structured proof development and program specification.
The meta theory of ECC is studied and we show that the calculus
has good metatheoretic properties. The main proof theoretic
result is the {\sl strong normalization theorem} which makes
explicit the predicativity of the predicative universes. The
strong normalization result shows the proof theoretic consistency
of the calculus; in particular, it implies the consistency of the
embedded intuitionistic higherorder logic and the decidability of
the theory. The metatheoretic results establish the theoretical
foundations both for pragmatic applications in theorem proving and
program specification and for computer implementations of the
theory. ECC has been implemente in the proof development system
LEGO developed by Pollack.
In ECC, dependent $\Sigma$ types are nonpropositional types
residing in the predicative universes and propositions are lifted
as higherlevel types as well. This solves the known difficulty
that adding strong $\Sigma$ types to an impredicative system
results in logical paradox and enables $\Sigma$ types to be used
to express the intuitionistic notion of subsets. $\Sigma$ types
together with type universes hence provide useful abstraction and
module mechanisms for abstract description of mathematical
theories and basic mechanisms for program specification and
adequate formalization of abstract mathematics (e.g. abstract
algebras and notions in category theory). A notion of (abstract)
mathematical theory can be described and leads to a promising
approach to {\sl abstract reasoning} and {\sl structured
reasoning}. Program specifications can be expressed by $\Sigma$
types, using propositions in the embedded logic to describe
program properties (for example, by an equality reflection result,
computational equality can be modeled by the propositional
Leibniz's equality definable in the theory). These developments
allow comprehensive structuring of formal or rigorous development
of proofs and programs.
Also discussed is how the calculus can be understood
settheoretically. We explain an $\omegaSet$ (realizability)
model of the theory. In particular, propositions can be
interpreted as partial equivalence relations and the predicative
type universes as corresponding to large set universes.",
paper = "Loux90.pdf"
}
\end{chunk}
\index{von Henke, F.W.}
\index{Luther, M.}
\index{Pfeifer, H.}
\index{Ruess, H.}
\index{Schwier, D.}
\index{Strecker, M.}
\index{Wagner, M.}
\begin{chunk}{axiom.bib}
@article{Henk96,
author = "von Henke, F.W. and Luther, M. and Pfeifer, H. and Ruess, H.
and Schwier, D. and Strecker, M. and Wagner, M.",
title = {{The TYPELAB Specification and Verification Environment}},
journal = "LNCS",
volume = "1101",
pages = "604607",
year = "1996",
paper = "Henk96.pdf"
}
\end{chunk}
\index{von Henke, F.W.}
\index{Dold, A.}
\index{Ruess, H.}
\index{Schwier, D.}
\index{Strecker, M.}
\begin{chunk}{axiom.bib}
@article{Henk94,
author = "von Henke, F.W. and Dold, A. and Ruess, H. and Schwier, D.
and Strecker, M.",
title = {{Construction and Deduction Methods for the Formal
Development of Software}},
journal = "LNCS",
number = "1009",
year = "1994",
abstract =
"In this paper we present an approach towards a framework based on
the type theory ECC (Extended Calculus of Constructions) in which
specifications, programs and operators for modular development by
stepwise refinement can be formally described and reasoned
about. We show that generic software development steps can be
expressed as higherorder functions and demonstrate that proofs
about their asserted effects can be carried out in the underlying
logical calculus.
For transformations requiring syntactic manipulations of objects,
a twolevel system comprising a Meta and an Objectlevel is
provided, and it is shown how transformations can be formalized
that faithfully represent operators on the object level.",
paper = "Henk94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kamareddine, Fairouz}
\index{Laan, Twan}
\index{Nederpelt, Rob}
\begin{chunk}{axiom.bib}
@book{Kama05,
author = "Kamareddine, Fairouz and Laan, Twan and Nederpelt, Rob",
title = {{A Modern Perspective on Type Theory}},
comment = "Applied Logic Series 29",
publisher = "Kluwer Academic Publishers",
isbn = "1402023359",
year = "2005",
paper = "Kama05.pdf"
}
\end{chunk}
\index{Andrews, Peter B.}
\begin{chunk}{axiom.bib}
@book{Andr02,
author = "Andrews, Peter B.",
title = {{An Introduction to Mathematical Logic and Type Theory: To
Truth Through Proof}},
comment = "Applied Logic Series 27",
publisher = "Springer",
year = "2002",
isbn = "9789401599344",
paper = "Andr02.pdf"
}
\end{chunk}
\index{Strecker, M.}
\index{Luther, M.}
\index{von Henke, F.}
\begin{chunk}{axiom.bib}
@inbook{Stre98,
author = "Strecker, M. and Luther, M. and von Henke, F.",
title = {{Interactive and Automated Proof Construction in Type Theory}},
publisher = "Springer",
chapter = "3",
pages = "7396",
isbn = "9789401704359",
year = "1998",
abstract =
"This chapter gives a survey of TYPELAB, a specification and
verification environment that integrates interactive proof
development and automated proof search. TYPELAB is based on a
constructive type theory, the Calculus of Constructions, which can
be understood as a combination of a typed $\lambda$calculus and
an expressive higherorder logic. Distinctive features of the type
system are dependent function types ($\Pi$ types) for modeling
polymorphism and dependent record types ($\Sigma$ types) for
encoding specifications and mathematical theories.",
paper = "Stre98.pdf"
}
\end{chunk}
\index{Kreitz, Christoph}
\begin{chunk}{axiom.bib}
@inbook{Krei98,
author = "Kreitz, Christoph",
title = {{Program Synthesis}},
booktitle = "Automated Deduction  A Basis for Applications (Vol III)",
publisher = "Springer",
year = "1998",
chapter = "5",
pages = "105134",
isbn = "9789401704373",
comment = "Applied Logic Series, volume 10",
paper = "Krei98.pdf"
}
\end{chunk}
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@misc{Wadl00,
author = "Wadler, Philip",
title = {{Proofs are Programs: 19th Century Logic and 21st Century
Computing}},
link = "\url{https://homepages.inf.ed.ac.uk/wadler/papers/frege/frege.pdf}",
year = "2000",
paper = "Wadl00.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wang, Paul S.}
\begin{chunk}{axiom.bib}
@article{Wang80,
author = "Wang, Paul S.",
title = {{The EEZGCD Algorithm}},
journal = "SIGSAM Bulletin",
volume = "14",
number = "2",
pages = "5060",
year = "1980",
abstract =
"An enhanced gcd algorithm based on the EXGCD algorithm is
described. Implementational aspects are emphasized. It is
generally faster and is particularly suited for computing gcd of
sparse multivariate polynomials. The EEZGCD algorithm is
characterized by the following features:
\begin{enumerate}
\item avoiding unlucky evaluations,
\item predetermining the correct leading coefficient of the
desired gcd,
\item using the sparsity of the given polynomials to determine
terms in the gcd and
\item direct methods for dealing with the ``common divisor problem.''
\end{enumerate}
The common divisor problem occurs when the gcd has a different
common divisor with each of the cofactors. The EZGCD algorithm
does a squarefree decomposition in this case. It can be avoided
resulting in increased speed. One method is to use parallel padic
construction of more than two factors. Machine examples with
timing data are included.",
paper = "Wang80.pdf",
keywords = "printed"
}
\end{chunk}
\index{Tsuji, Kuniaki}
\begin{chunk}{axiom.bib}
@article{Tsuj09,
author = "Tsuji, Kuniaki",
journal = "Journal of Symbolic Computation",
title = {{An Improved EZGCD Algorithm for Multivariate Polynomials}},
volume = "44",
number = "1",
year = "2009",
pages = "99110",
abstract =
"The EZGCD algorithm often has a badzero problem, which has a
remarkable influence on polynomials with higherdegree terms. In
this paper, by applying special ideals, the EZGCD algorithm for
sparse polynomials is improved. This improved algorithm greatly
reduces computational complexity because of the sparseness of
polynomials. The author expects that the use of these ideals will
be useful as a resolution for obtaining a GCD of sparse
multivariate polynomials with higherdegree terms.",
paper = "Tsuj09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sanuki, Masaru}
\index{Inaba, Daiju}
\index{Sasaki, Tateaki}
\begin{chunk}{axiom.bib}
@inproceedings{Sanu15,
author = "Sanuki, Masaru and Inaba, Daiju and Sasaki, Tateaki",
title = {{Computation of GCD of Sparse Multivariate Polynomials by
Extended Hensel Construction}},
booktitle = "17th Int. Symp. on Symbolic and Numeric Algorithms for
Scientific Computing",
publisher = "IEEE",
year = "2015",
abstract =
"Let $F(x,u_1,\ldots,u_i)$ be a squarefree multivariate polynomial
in main variable $x$ and subvariables $u_1\ldots u_i$. We say
that the leading coefficient (LC) of $F$ is singular if it
vanishes at the origin of the subvariables. A representative
algorithm for nonsparse multivariate polynomial GCD is the EZGCD
algorithm, which is based on the generalized Hensel construction
(GHC). In order to apply the GHC easily, we requires 1) the LC of
$F$ is nonsingular, 2) $F(x,0,\ldots,0)$ is squarefree, and 3)
the initial Hensel factor of GCD is ``lucky''. These requirements
are usually satisfied by the ``nonzero substitution'', i.e. to
shift the origin of subvariables. However, the nonzero
substitution may cause a drastic increase of the number of terms
of $F$ if $F$ is sparse. In 1993, Sasaki and Kako proposed the
extended Hensel construction (EHC) which does not perform the
nonzero substitution even if the LC is singular. Using the EHC,
Inaba implemented an algorithm of multivariate polynomial
factorization and verified that it is very useful for sparse
polynomials. In this paper, we apply the EHC for the computation
of GCD of sparse multivariate polynomials. In order to find a
lucky initial factor, we utilize the weighting of subvariables,
etc. Our naive implementation in Maple shows that our algorithm is
comparable in performance to Maple's GCD routine base on the
sparse interpolation.",
paper = "Sanu15.pdf"
}
\end{chunk}
\index{Griesmer, J.H.}
\begin{chunk}{axiom.bib}
@article{Grie76,
author = "Griesmer, James",
title = "{{Symbolic Mathematical Computation: A Survey}},
journal = "SIGSAM Bulletin",
volume = "10",
number = "2",
pages = "3032",
year = "1976",
paper = "Grie76.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Luther, Marko}
\index{Strecker, Martin}
\begin{chunk}{axiom.bib}
@misc{Luth98,
author = "Luther, Marko and Strecker, Martin",
title = {{A Guided Tour through TYPELAB}},
year = "1998",
abstract =
"This report gives a survey of TYPELAB, a specification and
verification environment that integrates interactive proof
development and automated proof search. TYPELAB is based on a
constructive type theory, the Calculus of COnstructions, which can
be understood as a combinations of a typed $\lambda$calculus and
an expressive higherorder logic. Distinctive features of the type
system are dependent function types for modeling polymorphism and
dependent record types for encoding specifications and
mathematical theories. After presenting an extended example which
demonstrates how program development by stepwise refinement of
specifications can be carried out, the theory underlying the
prover component of TYPELAB is described in detail. A calculus
with metavariables and explicit substitutions is introduced, and
the metatheoretic properties of this calculus are
analyzed. Furthermore, it is shown that this calculus provides an
adequate foundation for automated proof search in fragments of the
logic.",
paper = "Luth98.pdf",
keywords = "printed"
}
\end{chunk}
\index{Ehrig, H.}
\index{Kreowski, H.J.}
\index{Mahr, B.}
\index{Padawitz, P.}
\begin{chunk}{axiom.bib}
@article{Ehri82,
author = "Ehrig, H. and Kreowski, H.J. and Mahr, B. and Padawitz, P.",
title = {{Algebraic Implementation of Abstract Data Types}},
journal = "Theoretical Computer Science",
volume = "20",
pages = "209263",
year = "1982",
abstract =
"Starting with a review of the theory of algebraic specifications
in the sense of the ADJgroup a new theory for algebraic
implementation of abstract data types is presented.
While main concepts of this new theory were given already at
several conferences this paper provides the full theory of
algebraic implementations developed in Berlin except of complexity
considerations which are given in a separate paper. This new
concept of algebraic implementations includes implementations for
algorithms in specific programming languages and on the other hand
it meets also the requirements for stepwise refinement of
structured programs and software systems as introduced by Dijkstra
and Wirth. On the syntactical level an algebraic implementation
corresponds to a system of recursive programs while the semantical
level is defined by algebraic constructions, called SYNTHESIS,
RESTRICTION and IDENTIFICATION. Moreover the concept allows
composition of implementations and a rigorous study of
correctness. The main results of the paper are different kinds of
correctness criteria which are applied to a number of illustrating
examples including the implementation of sets by hashtables.
Algebraic implementations of larger systems like a histogram or a
parts system are given in separate case studies which, however,
are not included in this paper.",
paper = "Ehri82.pdf"
}
\end{chunk}
\index{de Moura, Leonardo}
\index{Avigad, Jeremy}
\index{Kong, Soonho}
\index{Roux, Cody}
\begin{chunk}{Mour15,
@misc{Mour15,
author = "de Moura, Leonardo and Avigad, Jeremy and Kong, Soonho
and Roux, Cody",
title = {{Elaboration in Dependent Type Theory}},
link = "\url{https://arxiv.org/pdf/1505.04324.pdf}",
year = "2015",
abstract =
"To be usable in practice, interactive theoremprovers need to
provide convenient and efficient means of writing expressions,
definitions, and proofs. This involves inferring information that
is often left implicit in an ordinary mathematical text, and
resolving ambiguities in mathematical expressions. We refer to the
rpocess of passing from a quasiformal and partiallyspecified
expression to a completely precise formal one as {\sl
elaboration}. We describe an elaboration algorithm for dependent
type theory that has been implemented in the Lean theorem
prover. Lean's elaborator supports higherorder unification, type
class inference, ad hoc overloading, insertion of coercions, the
use of tactics, and the computational reduction of terms. The
interactions between these components are subtle and complex, and
the elaboration algorithm has been carefully designed to balance
efficiency and usability. We describe the central design goals,
and the means by which they are achieved.",
paper = "Mour15.pdf",
keywords = "printed"
}
\end{chunk}
\index{de Moura, Leonardo}
\index{Kong, Soonho}
\index{Avigad, Jeremy}
\index{van Doorn, Floris}
\index{von Raumer, Jakob}
@misc{Mour19,
author = "de Moura, Leonardo and Kong, Soonho and Avigad, Jeremy
and van Doorn, Floris and von Raumer, Jakob",
title = {{The Lean Theorem Prover (system description)}},
link = "\url{http://florisvandoorn.com/papers/lean_description.pdf}",
year = "2019",
abstract =
"Lean is a new open source theorem prover being developed at
Microsoft Research and Carnegie Mellon University, with a small
trusted kernel based on dependent type theory. It aims to bridge the
gap between interactive and automated theorem proving, by situating
automated tools and methods in a framework that supports user
interaction and the construction of fully specified axiomatic
proofs. Lean is an ongoing and longterm effort, but it already
provides many useful components, integrated development
environments, and a rich API which can be used to embed it into
other systems. It is currently being used to formalize category
theory, homotopy type theory, and abstract algebra. We describe the
project goals, system architecture, and main features, and we
discuss applications and continuing work.",
paper = "Mour19.pdf",
keywords = "printed, DONE"
}
\end{chunk}
\index{Dybjer, Peter}
\begin{chunk}{axiom.bib}
\article{Dybj94,
author = "Dybjer, Peter",
title = {{Inductive Families}},
journal = "Formal Aspects of Computing",
volume = "6",
number = "4",
pages = "440465",
year = "1994",
abstract =
"A general formulation of inductive and recursive definitions in
MartinLof's type theory is presented. It extends Backhouse's
'DoItYourself Type Theory' to include inductive definitions of
families of sets and definitions of functions by recursion on the
way elements of such sets are generated. The formulation is in
natural deduction and is intended to be a natural generalization
to type theory of MartinLof's theory of iterated inductive
definitions of predicate logic.
Formal criteria are given for correct formation and introduction
rules of a new set former capturing definition by strictly
positive, iterated, generalized induction. Moreover, there is an
inversion principle for deriving elimination and equality rules
from the formation and introduction rules. Finally, there is an
alternative schematic presentation of definition by recursion.
The resulting theory is a flexible and powerful language for
programming and constructive mathematics. We hint at the wealth of
possible applications by showing several basic examples: predicate
logic, generalized induction, and a formalization of the untyped
lambda calculus.",
paper = "Dybj94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Selsam, Daniel}
\begin{chunk}{axiom.bib}
@misc{Sels19,
author = "Selsam, Daniel",
title = {{CS240H: A Standalone Proofchecker for the Lean Theorem Prover}},
year = "2019",
link = "\url{http://www.scs.stanford.edu/16wics240h/projects/selsam.pdf}",
paper = "Sels19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Birkhoff, Garrett}
\begin{chunk}{axiom.bib}
@article{Birk35,
author = "Birkhoff, Garrett",
title {{On the Structure of Abstract Algebra}},
journal = "Proc. of the Cambridge Philosophical Society",
volume = "31",
year = "1935",
paper = "Birk35.pdf"
}
\end{chunk}
\index{de Moura, Leonardo}
\begin{chunk}{axiom.bib}
@misc{Mour16,
author = "de Moura, Leonardo",
title = {{The Lean Theorem Prover}},
link = "\url{https://www.youtube.com/watch?v=69ytTKfSSgc}",
conference = "PLSE '16",
comment "video",
year = "2016",
keywords = "DONE"
}
\end{chunk}
\index{Awodey, Steve}
\begin{chunk}{axiom.bib}
@misc{Awod12,
author = "Awodey, Steve",
title = {{Category Theory Foundations. Lectures 14}},
year = "2012",
comment = "Oregon Programming Language Summer School 2013",
link =
"\url{http://www.youtube.com/watch?v=ZKmodCApZwk&list=PL8Ky8IYL8Oh7awp0sqa82o7Ggt4AGhyf}"
}
\end{chunk}
\index{Weirich, Stephanie}
\index{Choudhury, Pritam}
\index{Voizard, Antoine}
\index{Eisenberg, Richard A.}
\begin{chunk}{axiom.bib}
@misc{Weir19,
author = "Weirich, Stephanie and Choudhury, Pritam and Voizard,
Antoine and Eisenberg, Richard A.",
title = {{A Role for Dependent Types in Haskell (Extended Version)}},
link = "\url{https://arxiv.org/pdf/1905.13706.pdf}",
year = "2019",
abstract =
"Modern Haskell supports zerocost coercions, a mechanism where
types that share the same runtime representation may be freely
converted between. To make sure such conversions are safe and
desirable, this feature relies on a mechanism of roles to prohibit
invalid coercions. In this work, we show how to integrate roles
with dependent type systems and prove, using the Coq proof
assistant, that the resulting system is sound. We have designed
this work as a foundation for the addition of dependent types to
the Glasgow Haskell Compiler, but we also expect that it will be
of use to designers of other dependentlytyped languages who might
want to adopt Haskell's safe coercion feature.",
paper = "Weir19.pdf"
}
\end{chunk}
\index{Tennent, R.D.}
\begin{chunk}{axiom.bib}
@article{Tenn76,
author = "Tennent, R.D.",
title = {{The Denotational Semantics of Programming Languages}},
journal = "Communications of the ACM",
volume = "19",
number = "8",
pages = "437453",
year = "1976",
abstract =
"This paper is a tutorial introduction to the theory of
programming language semantics developed by D. Scott and
C. Strachey. The application of the theory to formal language
specification is demonstrated and other applications are
surveyed. The first language considered, LOOP, is very elementary
and its definition merely introduces the notion and methodology of
the approach. Then the semantic concepts of environments, stores,
and continuations are introduced to model classes of programming
language features and the underlying mathematical theory of
computation due to Scott is motivated and outlined. Finally, the
paper presents a formal definition of the language GEDANKEN.",
paper = "Tenn76.pdf",
keywords = "printed"
}
\end{chunk}
\index{Elliott, Conal}
\begin{chunk}{axiom.bib}
@inproceedings{Elli17,
author = "Elliott, Conal",
title = {{Compiling to Categories}},
booktitle = "Proc. ACM Program. Lang. Vol 1",
publisher = "ACM",
year = "2017",
link = "\url{http://conal.net/papers/compilingtocategories/compilingtocategories.pdf}",
abstract =
"It is wellknown that the simply typed lambdacalculul is modeled
by any cartesian closed category (CCC). This correspondence
suggests giving typed functional programs a variety of
interpretations, each corresponding to a different category. A
convenient way to realize this idea is as a collection of
meaningpreserving transformations added to an existing compiler,
such as GHC for Haskell. This paper describes automatic
differentiation, incremental computation, and interval
analysis. Each such interpretation is a category easily defined in
Haskell (outside of the compiler). The general technique appears
to provide a compelling alternative to deeply embedded
domainspecific languages.",
paper = "Elli17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Harrison, John}
\begin{chunk}{axiom.bib}
@misc{Harr13,
author = "Harrison, John",
title = {{A Survey of Automated Theorem Proving}},
year = "2013",
link = "\url{https://www.lektorium.tv/lecture/14805}"
}
\end{chunk}
\index{Ganesalingam, M.}
\index{Gowers, W.T.}
\begin{chunk}{axiom.bib}
@article{Gane17,
author = "Ganesalingam, M. and Gowers, W.T.",
title = {{A Fully Automatic Theorem Prover with HumanStyle Output}},
journal = "J. Automated Reasoning",
volume = "58",
pages = "253291",
year = "2017",
abstract =
"This paper describes a program that solves elementary
mathematical problems, mostly in metric space theory, and presents
solutions that are hard to distinguish from solutions that might
be written by a human mathematician.",
paper = "Gane17.pdf"
}
\end{chunk}
\index{Ganzinger, Harald}
\begin{chunk}{axiom.bib}
@article{Ganz80,
author = "Ganzinger, Harald",
title = {{Transforming Denotational Semantics into Practical
Attribute Grammars}},
journal = "LNCS",
volume = "54",
pages = "169",
year = "1980",
paper = "Ganz80.pdf"
}
\end{chunk}
\index{Jones, Neil D.}
\index{Schmidt, David A.}
\begin{chunk}{axiom.bib}
@article{Jone80,
author = "Jones, Neil D. and Schmidt, David A.",
title = {{Compiler Generation from Denotational Semantics}},
journal = "LNCS",
volume = "54",
pages = "7093",
year = "1980",
abstract =
"A methodology is described for generating provably correct
compilers from denotational definitions of programming
languages. An application is given to produce compilers into STM
code (an STM or state transition machine is a flowchartlike
program, lowlevel enough to be translated into efficient code on
conventional computers). First, a compiler $\phi:LAMC\rightarrow
STM$ from a lambda calculus dialect is defined. Any denotational
defintion $\Delta$ of language $L$ defines a map
$\over{\rightarrow}{\Delta}:L\rightarrow LAMC$, so
$\over{\rightarrow}{\Delta}\circ \phi$ compiles $L$ into STM
code. Correctness follows from the correctness of $\phi$.
The algebraic framework of Morris, ADJ, etc. is used. The set of
STMs is given an algebraic structure so any
$\over{\rightarrow}{\Delta} \circ \phi$ may be specified by giving
a derived operator on STM for each syntax rule of $L$.
This approach yields quite redundant object programs, so the paper
ends by describing two flow analytic optimization methods. The
first analyzes an alreadyproduced STM to obtain information about
its runtime behaviour which is used to optimize the STM. The
second analyzer the generated compiling scheme to determine
runtime properties of object programs in general which a compiler
can use to produce less redundant STMs.",
paper = "Jone80.pdf"
}
\end{chunk}
\index{Raskovsky, Martin}
\index{Collier, Phil}
\begin{chunk}{axiom.bib}
@article{Rask80,
author = "Raskovsky, Martin and Collier, Phil",
title = {{From Standard to Implementation Denotational Semantics}},
journal = "LNCS",
volume = "54",
pages = "94139",
year = "1980",
abstract =
"We are developing a compiler compiler. It takes as input the
formal definition of a programming language in Denotational
Semantics and produces as output a fairly efficient compiler
written in a system programming language which in turn will
produce code for a real machine. This work mainly deals with the
code generation parts.",
paper = "Rask80.pdf"
}
\end{chunk}
\index{Gaudel, M.C.}
\begin{chunk}{axiom.bib}
@article{Gaud80,
author = "Gaudel, M.C.",
title = {{Specification of Compilers as Abstract Data Type
Representations}},
journal = "LNCS",
volume = "54",
pages = "140164",
year = "1980",
abstract =
"This paper presents a method for specifying and proving
compilers. This method is based on the algebraic data types
ideas. The main points are:
\begin{itemize}
\item to each language is associated an algebraic abstract data type
\item the semantic value of a program is given as a term of this
data type
\item the translation of the semantic values of source programs
into semantic values of target programs is specified and proved as
the representation of an algebrayc data type by another one.
\end{itemize}
A compiler generator, PERLUETTE, which accepts such specifications
as input is described. The proof technic is discussed.",
paper = "Gaud80.pdf"
}
\end{chunk}
\index{Thatcher, James W.}
\index{Wagner, Eric G.}
\index{Wright, Jesse B.}
\begin{chunk}{axiom.bib}
@article{That80,
author = "Thatcher, James W. and Wagner, Eric G. and Wright, Jesse B.",
title = {{More on Advice on Structuring Compilers and Proving Them
Correct}},
journal = "LNCS",
volume = "54",
pages = "165188",
year = "1980",
paper = "That80.pdf"
}
\end{chunk}
\index{Madsen, Ole Lehrmann}
\begin{chunk}{axiom.bib}
@article{Mads80,
author = "Madsen, Ole Lehrmann",
title = {{On Defining Semantics by means of Extended Attribute Grammars}},
journal = "LNCS",
volume = "54",
pages = "259299",
year = "1980",
paper = "Mads80.pdf"
}
\end{chunk}
\index{Jones, Neil D.}
\index{Madsen, Michael}
\begin{chunk}{axiom.bib}
@article{Jone80a,
author = "Jones, Neil D. and Madsen, Michael",
title = {{AttributeInfluenced LR Parsing}},
journal = "LNCS",
volume = "54",
pages = "393407",
year = "1980",
abstract =
"Methods are described which make it possible, when given an
arbitrary attribute grammar (or AG),
\begin{enumerate}
\item to analyze the AG to determine which of its attributes may
be computed during LR parsing,
\item to augment the parser with instructions and data structures
to compute many attributes during parsing,
\item to use attribute values to assist the parsing process
(e.g. to use symbol table information to decide whether P(X) is an
array element or a function call).
\end{enumerate}",
paper = "Jone80a.pdf"
}
\end{chunk}
\index{Bernstein, Daniel J.}
\index{Yang, BoYin}
\begin{chunk}{axiom.bib}
@misc{Bern19,
author = "Bernstein, Daniel J. and Yang, BoYin",
title = {{Fast ConstantTime GCD and Modular Inversion}},
year = "2019",
link = "\url{https://gcd.crypto.to/safegcd20190413.pdf}",
abstract =
"This paper introduces streamlined constanttime variants of
Euclid's algorithm, both for polynomial inputs and for integer
inputs. As concrete applications, this paper saves time in (1)
modular inversion for Curve25519, which was previously believed to
be handled much more efficiently by Fermat's method, and (2) key
generation for the ntruhrss701 and sntrup4591761 latticebased
cryptosystems.",
paper = "Bern19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Necula, George Ciprian}
\begin{chunk}{axiom.bib}
@phdthesis{Necu98,
author = "Necula, George Ciprian",
title = {{Compiling with Proofs}},
school = "Carnegie Mellon University",
year = "1998",
link = "\url{https://www.cs.cmu.edu/~rwh/theses/necula.pdf}",
abstract =
"One of the major challenges of building software systems is to
ensure that the various components fit together in a welldefined
manner. This problem is exacerbated by the recent advent of
software components whose origin is unknown or inherently
untrusted, such as mobile code or user extensions for operating
system kernels or database servers. Such extensions are useful for
implementing an efficient interaction model between a client and a
server because several data exchanges between them can be saved at
the cost of a single code exchange.
In this dissertation, I propose to tackle such system integrity
and security problems with techniques from mathematical logic and
programming language semantics. I propose a framework, called
{\sl proofcarrying code}, in which the extension provider sends
along with the extension code a representation of a formal proof
that the code meets certain safety and correctness
requirements. Then, the code receiver can ensure the safety of
executing the extension by validating the attached proof. The
major advantages of proofcarrying code are that it requires a
simple trusted infrastructure and that it does not impose runtime
penalties for the purpose of ensuring safety.
In addition to the concept of proofcarrying code, this
dissertation contributes the idea of certifying compilation. A
{\sl certifying compiler} emits, in addition to optimized target
code, function specifications and loop invariants that enable a
theoremproving agent to prove nontrivial properties of the
target code, such as type safety. Such a certifying compiler,
along with a proofgenerating theorem prover, is not only a
convenient producer of proofcarrying code but also a powerful
softwareengineering tool. The certifier also acts as an effective
referee for the correctness of each compilation, thus simplifying
considerably compiler testing and maintenance.
A complete system for proofcarrying code must also contain a
{\sl proofgenerating theorem prover} for the purpose of producing
the attached proofs of safety. This dissertation shows how
standard decision procedures can be adapted so that they can
produce detailed proofs of the proved predicates and also how
these proofs can be encoded compactly and checked
efficiently. Just like for the certifying compiler, a
proofgenerating theorem prover has significant software
engineering advantages over a traditional prover. In this case, a
simple proof checker can ensure the soundness of each successful
proving task and indirectly assist in testing and maintenance of
the theorem prover.",
paper = "Necu98.pdf"
}
\end{chunk}
\index{Cramer, Marcos}
\index{Koepke, Peter}
\index{Schroder, Bernhard}
\begin{chunk}{axiom.bib}
@article{Cram11,
author = "Cramer, Marcos and Koepke, Peter and Schroder, Bernhard",
title = {{Parsing and Disambiguation of Symbolic Mathematics in the
Naproche System}},
journal = "LNAI",
number = "6824",
pages = "180195",
year = "2011",
publisher = "Springer",
abstract =
"The Naproche system is a system for linguistically analysing and
proofchecking mathematical texts written in a controlled natural
language. The aim is to have an input language that is as close as
possible to the language that mathematicians actually use when
writing textbooks or papers.
Mathematical texts consist of a combination of natural language
and symbolic mathematics, with symbolic mathematics obeying its
own syntactic rules. We discuss the difficulties that a program
for parsing and disambiguating symbolic mathematics must face and
present how these difficulties have been tackled in the Naproche
system. One of these difficulties is the fact that information
provided in the preceding context  including information
provided in natural language  can influence the way a symbolic
expression has to be disambiguated.",
paper = "Cram11.pdf"
}
\end{chunk}
\index{Altenkirch, Thorsten}
\index{McBride, Conor}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@inproceedings{Alte07,
author = "Altenkirch, Thorsten and McBride, Conor and Swierstra, Wouter",
title = {{Observational Equality, Now!}},
booktitle = "ACM Workshop Programming Languages meets Program
Verification",
publisher = "ACM",
pages = "5768",
year = "2007"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Ricciotti, Wilmer}
\index{Coen, Claudio Sacerdoti}
\index{Tassi, Enrico}
\begin{chunk}{axiom.bib}
@article{Aspe09a,
author = "Asperti, Andrea and Ricciotti, Wilmer and Coer, Claudio
Sacerdoti and Tassi, Enrico",
title = {{Hints in Unification}},
journal = "LNCS",
volume = "5674",
pages = "8498",
year = "2009",
isbn = "9783642033582",
abstract =
"Several mechanisms such as Canonical Structures, Type Classes, or
Pullbacks have been recently introduced with the aim to improve the
power and flexibility of the type inference algorithm for interactive
theorem provers. We claim that all these mechanisms are particular
instances of a simpler and more general technique, just consisting in
providing suitable hints to the unification procedure underlying type
inference. This allows a simple, modular and not intrusive
implementation of all the above mentioned techniques, opening at the
same time innovative and unexpected perspectives on its possible
applications.",
paper = "Aspe09a.pdf"
}
\end{chunk}
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@article{Avig07,
author = "Avigad, Jeremy",
title = {{A Formally Verified Proof of the Prime Number Theorem}},
journal = "ACM Trans. Comput. Logic",
volume = "9",
number = "1",
pages = "2",
year = "2007"
}
\end{chunk}
\index{Barendregt, Hendrik Pieter}
\begin{chunk}{axiom.bib}
@article{Bare91,
author = "Barendregt, Hendrik Pieter",
title = {{An Introduction to Generalized Type Systems}},
journal = "Journal of Functional Programming",
volume = "1",
number = "2",
year = "1991",
pages = "125154",
abstract =
"Programming languages often come with type systems. Some of these are
simple, others are sophisticated. As a stylistic representation of
types in programming languages several versions of typed lambda
calculus are studied. During the last 20 years many of these systems
have appeared, so there is some need of classification. Working
towards a taxonomy, Barendregt (1991) gives a finestructure of the
theory of constructions (Coquand and Huet 1988) in the form of a
canonical cube of eight type systems ordered by inclusion. Berardi
(1988) and Terlouw (1988) have independently generalized the method of
constructing systems in the λcube. Moreover, Berardi (1988, 1990)
showed that the generalized type systems are flexible enough to
describe many logical systems. In that way the wellknown
propositionsastypes interpretation obtains a nice canonical form.",
paper = "Bare91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bertot, Yves}
\index{Cast\'eran, Pierre}
\begin{chunk}{axiom.bib}
@book{Bert04,
author = {Bertot, Yves Cast\'eran, Pierre},
title = {{Interactive Theorem Proving and Program Development}},
publisher = "Springer",
year = "2004",
isbn = "3540208542",
abstract = "
Coq is an interactive proof assistant for the development of
mathematical theories and formally certified software. It is based on
a theory called the calculus of inductive constructions, a variant of
type theory.
This book provides a pragmatic introduction to the development of
proofs and certified programs using Coq. With its large collection of
examples and exercies it is an invaluable tool for researchers,
students, and engineers interested in formal methods and the
development of zerofault software."
}
\end{chunk}
\index{Bertot, Yves}
\index{Gonthier, Georges}
\index{Biha, Sidi Ould}
\index{Pasca, Ioana}
\begin{chunk}{axiom.bib}
@inproceedings{Bert08,
author = "Bertot, Yves and Gonthier, Georges and Biha, Sidi Ould and
Pasca, Ioana",
title = {{Canonical Big Operators}},
booktitle = "Theorem Proving in Higher Order Logics",
publisher = "Springer",
pages = "86101",
year = "2008"
}
\end{chunk}
\index{Blanqui, Frederic}
\index{jouannaud, JeanPierre}
\index{Okada, Mitsuhiro}
\begin{chunk}{axiom.bib}
@inproceedings{Blan99,
author = "Blanqui, Frederic and jouannaud, JeanPierre and Okada, Mitsuhiro",
title = {{The Calculus of Algebraic Constructions}},
booktitle = "Rewriting Techniques and Applications RTA99",
year = "1999",
publisher = "LNCS 1631",
link = "\url{https://hal.inria.fr/inria00105545v1/document}",
abstract =
"This paper is concerned with the foundations of the Calculus of
Algebraic Constructions (CAC), an extension of the Calculus of
Constructions by inductive data types. CAC generalizes inductive
types equipped with higherorder primitive recursion, by providing
definition s of functions by patternmatching which capture recursor
definitions for arbitrary nondependent and nonpolymorphic inductive
types satisfying a strictly positivity condition. CAC also
generalizes the firstorder framework of abstract data types by
providing dependent types and higherorder rewrite rules.",
paper = "Blan99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Church, Alonzo}
\begin{chunk}{axiom.bib}
@article{Chur40,
author = "Church, Alonzo",
title = {{A Formulation of the Simple Theory of Types}},
journal = "J. of Symbolic Logic",
volume = "5",
number = "2",
year = "1940",
pages = "5668",
abstract =
"The purpose of the present paper is to give a formulation of the
simple theory of types which incorporates certain features of the
calculus of $\lambda$conversion. A complete incorporation of the
calculus of $\lambda$conversion into the theory of types is
impossible if we require that $\lambda x$ and juxtaposition shall
retain their respective meanings as an abstraction operator and as
denoting the application of function to argument. But the present
partial incorporation has certain advantages from the point of view of
type theory and is offered as being of interest on this basis
(whatever may be thought of the finally satisfactory character of the
theory of types as a foundation for logic and mathematics).",
paper = "Chur40.pdf",
keywords = "printed"
}
\end{chunk}
\index{Ciolli, Gianni}
\index{Gentili, Graziano}
\index{Maggesi, Marco}
\begin{chunk}{axiom.bib}
@article{Ciol11,
author = "Ciolli, Gianni and Gentili, Graziano and Maggesi, Marco",
title = {{A Certified Proof of the Cartan Fixed Point Theorem}},
journal = "J. Autom. Reasoning",
volume = "47",
number = "3",
pages = "319336",
year = "2011"
}
\end{chunk}
\index{Constable, R.L.}
\index{Allen, S.F.}
\index{Bromley, H.M.}
\index{Cremer, J.F.}
\index{Harper, R.W.}
\index{Howe, D.J.}
\index{Knoblock, T.B.}
\index{Mendler, N.P.}
\index{Panagaden, P.}
\index{Tsaaki, J.T.}
\index{Smith, S.F.}
\begin{chunk}{axiom.bib}
@book{Cons85,
author = "Constable, R.L. and Allen, S.F. and Bromley, H.M. and Cremer, J.F.
and Harper, R.W. and Howe, D.J. and Knoblock, T.B. and
Mendler, N.P. and Panagaden, P. and Tsaaki, J.T. and Smith, S.F.",
title = {{Implementing Mathematics with The Nuprl Proof Development System}},
publisher = "PrenticeHall",
year = "1985"
}
\end{chunk}
\index{Coquuand, Thierry}
\index{Huet, Gerard}
\begin{chunk}{axiom.bib}
@incollection{Coqu88,
author = "Coquuand, Thierry and Huet, Gerard",
title = {{The Calculus of Constructions}},
booktitle = "Information and Computation, Volume 76",
year = "1988",
publisher = "Academic Press",
paper = "Coqu88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Coquand, Thierry}
\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@inproceedings{Coqu90,
author = "Coquand, Thierry and Paulin, Christine",
title = {{Inductively Defined Types}},
booktitle = "Int. Conf. on Computer Logic",
publisher = "Springer",
pages = "5066",
year = "1990"
}
\end{chunk}
\index{Cramer, Marcos}
\index{Koepke, Peter}
\index{Schroder, Bernhard}
\begin{chunk}{axiom.bib}
@article{Cram11,
author = "Cramer, Marcos and Koepke, Peter and Schroder, Bernhard",
title = {{Parsing and Disambiguation of Symbolic Mathematics in the
Naproche System}},
journal = "LNAI",
number = "6824",
pages = "180195",
year = "2011",
publisher = "Springer",
abstract =
"The Naproche system is a system for linguistically analysing and
proofchecking mathematical texts written in a controlled natural
language. The aim is to have an input language that is as close as
possible to the language that mathematicians actually use when
writing textbooks or papers.
Mathematical texts consist of a combination of natural language
and symbolic mathematics, with symbolic mathematics obeying its
own syntactic rules. We discuss the difficulties that a program
for parsing and disambiguating symbolic mathematics must face and
present how these difficulties have been tackled in the Naproche
system. One of these difficulties is the fact that information
provided in the preceding context  including information
provided in natural language  can influence the way a symbolic
expression has to be disambiguated.",
paper = "Cram11.pdf"
}
\end{chunk}
\index{Hales, Thomas C.}
\index{Harrison, John}
\index{McLaughlin, Sean}
\index{Nipkow, Tobias}
\index{Obua, Steven}
\index{Zumkeller, Roland}
\begin{chunk}{axiom.bib}
@article{Hale10,
author = "Hales, Thomas C. and Harrison, John and McLaughlin, Sean
and Nipkow, Tobias and Obua, Steven and Zumkeller,
Roland",
title = {{A Revision of the Proof of the Kepler Conjecture}},
jounal = "44",
volume = "1",
pages = "134",
year = "2010"
}
\end{chunk}
\index{Harrison, John}
\begin{chunk}{axiom.bib}
@inproceedings{Harr96a,
author = "Harrison, John",
title = {{HOL Light: A Tutorial Introduction}},
booktitle = "First Int. Conf. on Formal Methods in ComputerAided Design",
publisher = unknownn",
pages = "265269",
year = "1996"
}
\end{chunk}
\index{Harrison, John}
\begin{chunk}{axiom.bib}
@article{Harr09a,
author = "Harrison, John",
title = {{A Formalized Proof of Dirichlet's Theorem on Primes in
Arithmetic Progression}},
journal = "J. Formaliz. Reason.",
volume = "2",
number = "1",
pages = "6383",
year = "2009"
}
\end{chunk}
\index{Harrison, John}
\begin{chunk}{axiom.bib}
@article{Harr09b,
author = "Harrison, John",
title = {{Formalizing an Analytic Proof of the Prime Number Theorem}},
journal = "J. Automated Reasoning",
volume = "43",
pages = "243261",
year = "2009"
}
\end{chunk}
\index{Holzl, Johannes}
\index{Heller, Armin}
\begin{chunk}{axiom.bib}
@inproceedings{Holz11,
author = "Holzl, Johannes and Heller, Armin",
title = {{Three Chapters of Measure Theory in Isabelle / HOL}},
booktitle = "Interactive Theorem Proving",
publisher = "Springer",
pages = "135151",
year = "2011"
}
\end{chunk}
\index{Huet, Gerard}
\index{Saibi, Amokrane}
\begin{chunk}{axiom.bib}
@inproceedings{Huet00,
author = "Huet, Gerard and Saibi, Amokrane",
title = {{Constructive Category Theory}},
booktitle = "Proof, Language, and Interaction: Essays in Honour of
Robin Milner",
publisher = "MIT Press",
pages = "235275",
year = "2000"
}
\end{chunk}
\index{Kornilowicz, Artur}
\begin{chunk}{axiom.bib}
@article{Korn07,
author = "Kornilowicz, Artur",
title = {{A Proof of the Jordan Curve Theorem via the Brouwer Fixed
Point Theorem}},
journal = "Mechanized Mathematics and Its Applications",
volme = "6",
number = "1",
pages = "3340",
year = "2007"
}
\end{chunk}
\index{MartinL\"of, P.}
\begin{chunk}{axiom.bib}
@inproceedings{Mart73,
author = "MartinL\"of, P.",
title = {{An Intuitionistic Theory of Types: Predicative Part}},
booktitle = "Logic Colloqium '73",
publisher = "NorthHolland",
year = "1973"
}
\end{chunk}
\index{Mhamdi, Tarek}
\index{Hasan, Osman}
\index{Tahar, Sofiene}
\begin{chunk}{axiom.bib}
@inproceedings{Mham11,
author = "Mhamdi, Tarek and Hasan, Osman and Tahar, Sofiene",
title = {{Formalization of Entropy Measure in HOL}},
booktitle = "Interactve Theorem Proving",
publisher = "Springer",
pages = "233248",
year = "2011"
}
\end{chunk}
\index{Nathanson, Melvyn B.}
\begin{chunk}{axiom.bib}
@article{Nath08,
author = "Nathanson, Melvyn B.",
title = {{Desperately Seeing Mathematical Proof}},
journal = "Notices of the American Math. Society",
volume = "55",
number = "7",
pages = "773",
year = "2008"
}
\end{chunk}
\index{Nipkow, Tobias}
\index{Paulson, Lawrence C.}
\index{Wenzel, Markus}
\begin{chunk}{axiom.bib}
@book{Nipk02a,
author = "Nipkow, Tobias and Paulson, Lawrence C. and Wenzel, Markus",
title = {{Isabelle / HOL. A Proof Assistant for HigherOrder Logic}},
publisher = "Springer",
year = "2002"
}
\end{chunk}
\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@inproceedings{Ocon05,
author = "O'Connor, Russell",
title = {{Essential Incompleteness of Arithmetic Verified by Coq}},
booktitle = "Theorem Proving in Higher Order Logics",
publisher = "Springer",
pages = "245260",
year = "2005"
}
\end{chunk}
\index{Dowek, Gilles}
\begin{chunk}{axiom.bib}
@inbook{Dowe01,
author = "Dowek, Gilles",
title = {{Handbook of Automated Reasoning, Vol II}},
publisher = "Elsevier Science",
year = "2001",
chapter = "16",
pages = "10091062"
}
\end{chunk}
\index{Feit, Walter}
\index{Thompson, John G.}
\begin{chunk}{axiom.bib}
@article{Feit63,
author = "Feit, Walter and Thompson, John G.",
title = {{Solvability of Groups of Odd Order}},
journal = "Pacific Journal of Mathematics",
volume = "13",
pages = "7751029",
year = "1963"
}
\end{chunk}
\index{Ganesalingam, Mohan}
\begin{chunk}{axiom.bib}
@phdthesis{Gane09,
author = "Ganesalingam, Mohan",
title = {{The Language of Mathematics}},
school = "University of Cambridge",
year = "2009"
}
\end{chunk}
\index{Gonthier, Goerges}
\begin{chunk}{axiom.bib}
@article{Gont08,
author = "Gonthier, Goerges",
title = {{Formal Proof  The Four Color Theorem}},
journal = "Notices Amer. Math. Soc.",
volume = "55",
number = "11",
pages = "13821393",
year = "2008"
}
\end{chunk}
\index{Gonthier, Goerges}
\begin{chunk}{axiom.bib}
@inproceedings{Gont11,
author = "Gonthier, Goerges",
title = {{Advances in the Formalization of the Odd Order Theorem}},
booktitle = "Interactive Theorem Proving",
publisher = "Springer",
pages = "2",
year = "2011"
}
\end{chunk}
\index{Gonthier, Goerges}
\begin{chunk}{axiom.bib}
@inproceedings{Gont11a,
author = "Gonthier, Goerges",
title = {{PointFree, SetFree Concrete Linear Algebra}},
booktitle = "Interactive Theorem Proving",
publisher = "Springer",
pages = "103118",
year = "2011"
}
\end{chunk}
\index{Gonthier, Goerges}
\index{Mahboubi, Assia}
\begin{chunk}{axiom.bib}
@article{Gont10,
author = "Gonthier, Goerges and Mahboubi, Assia",
title = {{An Introduction to Small Scale Reflection in Coq}},
journal = "J. Formaliz. Reason.",
volume = "3",
number = "2",
pages = 95152",
year = "2010"
}
\end{chunk}
\index{Gonthier, Goerges}
\index{Mahboubi, Assia}
\index{Rideau, Laurence}
\index{Tassi, Enrico}
\index{Thery, Laurent}
\begin{chunk}{axiom.bib}
@inproceedings{Gont07,
author = "Gonthier, Goerges and Mahboubi, Assia and Rideau, Laurence
and Tassi, Enrico and Thery, Laurent",
title = {{A Modular Formalisation of Finite Group Theory}},
booktitle = "Theorem Proving in Higher Order Logics",
publisher = "Springer",
pages = 86101",
year = "2007"
}
\end{chunk}
\index{Gonthier, Goerges}
\index{Ziliani, Beta}
\index{Nanevski, Aleksandar}
\index{Dreyer, Derek}
\begin{chunk}{axiom.bib}
@inproceedings{Gont11b,
author = "Gonthier, Goerges and Ziliani, Beta and Nanevski, Aleksandar
and Dreyer, Derek",
title = {{How to make Ad Hoc Proof Automation less Ad Hoc}},
booktitle = "Int. Conf. on Functional Programming",
publisher = "ACM",
pages = "163175",
year = "2011"
}
\end{chunk}
\index{Grabowski, Adam}
\index{Kornilowicz, Artur}
\index{Naumowicz, Adam}
\begin{chunk}{axiom.bib}
@article{Grab10,
author = "Grabowski, Adam and Kornilowicz, Artur and Naumowicz, Adam",
title = {{Mizar in a Nutshell}},
journal = "J. Formaliz. Reason.",
volume = "3",
number = "2",
pages = "153245",
year = "2010"
}
\end{chunk}
\index{Hales, Thomas C.}
\begin{chunk}{axiom.bib}
@article{Hale07,
author = "Hales, Thomas C.",
title = {{The Jordan Curve Theorem, Formally and Informally}},
journal = "Amer. Math. Monthly",
volume = "114",
number = "10",
pages = "882894",
year = "2007"
}
\end{chunk}
\index{Saibi, Amokrane}
\begin{chunk}{axiom.bib}
@inproceedings{Saib97,
author = "Saibi, Amokrane",
title = {{Typing Algorithm in Type Theory with Inheritance}},
booktitle = "Symp. on Principles of Programming Languages",
publisher = "ACM",
pages = "292301",
year = "1997"
}
\end{chunk}
\index{Shankar, Natarjan}
\index{Owre, Sam}
\begin{chunk}{axiom.bib}
@inproceedings{Shan00,
author = "Shankar, Natarjan and Owre, Sam",
title = {{Principles and Pragmatics of Subtyping in PVS}},
booktitle = "Recent Trends in Algebraic Development Techniques",
publisher = "Springer",
pages = "3752",
year = "2000"
}
\end{chunk}
\index{Strub, PierreYves}
\begin{chunk}{axiom.bib}
@inproceedings{Stru10,
author = "Strub, PierreYves",
title = {{Coq Modulo Theory}},
booktitle = "19th Annual Conf. on Computer Science Logic",
publisher = "Springer",
pages = "549643",
year = "2010"
}
\end{chunk}
\index{Troelstra, A.S.}
\index{van Dalen, Dirk}
\begin{chunk}{axiom.bib}
@book{Troe88,
author = "Troelstra, A.S. and van Dalen, Dirk",
title = {{Constructivism in Mathematics, Vol 2}},
publisher = "NorthHolland",
year = "1988"
}
\end{chunk}
\index{Tait, William W.}
\begin{chunk}{axiom.bib}
@article{Tait86,
author = "Tait, William W.",
title = {{Truth and Proof: The Platonism of Mathematics}},
link = "\url{logic.harvard.edu/EFI_Tait_PlatonisminMathematics.pdf}",
journals = "Synthese",
volume = "69",
pages = "341370",
year = "1986",
paper = "Tait86.pdf",
keywords = "printed"
}
\end{chunk}
\index{Werner, Benjamin}
\begin{chunk}{axiom.bib}
@inbook{Wern97,
author = "Werner, Benjamin",
title = {{Sets in Types, Types in Sets}},
booktitle = "Theoretical Aspects of Computer Software",
publisher = "Springer",
chapter = "unknown",
pages = "530546",
year = "1997"
}
\end{chunk}
\index{Parigot, Michel}
\begin{chunk}{axiom.bib}
@article{Pari92,
author = "Parigot, Michel",
title = {{$\lambda\mu$Calculus: An Algorithmic Interpretation of
Classical Natural Deduction}},
journal = "LNCS",
volume = "624",
pages = "190201",
year = "1992",
paper = "Pari92.pdf"
}
\end{chunk}
\index{Parigot, Michel}
\begin{chunk}{axiom.bib}
@article{Pari92a,
author = "Parigot, Michel",
title = {{Recursive Programming with Proofs}},
journal = "Theoretical Computer Science",
volume = "94",
pages = "335356",
year = "1992",
paper = "Pari92a.pdf"
}
\end{chunk}
\index{Murthy, Chetan R.}
\begin{chunk}{axiom.bib}
@techreport{Murt91,
author = "Murthy, Chetan R.",
title = {{Classical Proofs as Programs: How, What and Why}},
type = "technical report",
institution = "Cornell University",
number = "TR911215",
year = "1991",
abstract =
"We recapitulate Friedman's conservative extension result of
(suitable) classical over constructive systems for $\prod_2^0$
sentences, viewing it in two lights: as a translation of programs
from an almostfunctional language (with $C$) back to its
functional core, and as a translation of a constructive logic for
a functional language to a classical logic for an
almostfunctional language. We investigate the computational
properties of the translation and of classical proofs and
characterize the classical proofs which give constructions in
concrete, computational terms, rather than logical terms. We
characterize different versions of Friedman's translation as
translating slightly different almostfunctional languages to a
functional language, thus giving a general method for arriving at
a sound reduction semantics for an almostfunctional language with
a mixture of eager and lazy constructors and destructors, as well
as integers, pairs, unions, etc. Finally, we describe how to use
classical reasoning in a disciplined manner in giving classical
(yet constructivizable) proofs of sentences of greater complexity
than $\prod_2^0$. This direction offers the possibility of
applying classical reasoning to more general programming problems.",
paper = "Murt91.pdf"
}
\end{chunk}
\index{Turing, A. M.}
\begin{chunk}{axiom.bib}
@misc{Turi48,
author = "Turing, A. M.",
title = {{Intelligent Machinery}},
year = "1948",
link = "\url{https://weightagnostic.github.io/papers/turning1948.pdf}",
abstract =
"The possible ways in which machinery might be made to show
intelligent behaviour are discussed. The analogy with the human
brain is used as a guiding principle. It is pointed out that the
potentialities of the human intelligence can only be realized if
suitable education is provided. The investigation mainly centres
round an analogous teaching process applied to machines. The idea
of an unorganized machine is defined, and it is suggested that the
infant human cortex is of this nature. Simple examples of such
machines are given, and their education by means of rewards and
punishments is discussed. In one case the education process is
carried through until the organization is similar to that of an
ACE.",
paper = "Turi48.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bailleux, Olivier}
\begin{chunk}{axiom.bib}
@misc{Bail19,
author = "Bailleux, Olivier",
title = {{Subsumptiondriven Clause Learning with DPLL+Restarts}},
year = "2019",
link = "\url{https://arxiv.org/pdf/1906.07508.pdf}",
abstract =
"We propose to use a DPLL+restart to solve SAT instances by
successive simplifications based on the production of clauses that
subsume the initial clauses. We show that this approach allows the
refutation of pebbling formulae in polynomial time and linear
space, as effectvely as with a CDCL solver.",
paper = "Bail19.pdf"
}
\end{chunk}
\index{Clark, Kevin}
\begin{chunk}{axiom.bib}
@misc{Clar11,
author = "Clark, Kevin",
title = {{An Algorithm that Decides PRIMES in Polynomial Time}}
, year = "2011",
link =
"\url{https://sites.math.washington.edu/~morrow/336_11/papers/kevin.pdf}",
paper = "Clar11.pdf",
keywords = "printed"
}
\end{chunk}
\index{van Tonder, Rijnard}
\index{Le Goues, Claire}
\begin{chunk}{axiom.bib}
@inproceedings{Tond19,
author = "van Tonder, Rijnard and Le Goues, Claire",
title = {{Lightweight MultiLanguage Syntax Transformation with
Parser Parser Combinators}},
booktitle = "PLDI",
publisher = "ACM",
isbn = "9781450367127",
year = "2019",
paper = "Tond19.pdf"
}
\end{chunk}
\index{Qiu, Xiaokang}
\index{Garg, Pranav}
\index{Stefanescu, Andrei}
\index{Madhusudan, P.}
\begin{chunk}{axiom.bib}
@misc{Qiux13,
author = "Qiu, Xiaokang and Garg, Pranav and Stefanescu, Andrei and
Madhusudan, P.",
title = {{Natural Proofs for Structure, Data, and Separation}},
year = "2013",
link = "\url{http://madhu.cs.illinois.edu/dryad_full_version.pdf}",
abstract =
"We propose {\sl natural proofs} for reasoning with programs that
manipulate datastructures against complex specifications 
specifications that describe the structure of the heap, the data
stored within it, and separation and framing of
substructures. Natural proofs are a subclass of proofs that are
amenable to completely automated reasoning, that provide sound but
incomplete procedures, and that capture common reasoning tactics
in program verification. We develop a dialect of separation logic
over heaps, called DRYAD, with recursive definitions that avoids
explicit quantification. We develop ways to reason with heaplets
using classical logic over the theory of sets, and develop natural
proofs for reasoning using proof tactics involving disciplined
unfoldings and formula abstractions. Natural proofs are encoded
into decidable theories of firstorder logic so as to be
discharged using SMT solvers.
We also implement the technique and show that a large class of
more than 100 correct programs that manipulate datastructures are
amenable to full functional correctness using the proposed natural
proof method. These programs are drawn from a variety of sources
including standard datastructures, the SchorrWaite algorithm for
garbage collection, a large number of lowlevel C routines from
the Glib library, the OpenBSD library and the Linux kernel, and
routines from a secure verified OSbrowser project. Our work is
the first that we know of that can handle such a wide range of
full functional verification properties of heaps automatically,
given pre/post and loop invariant annotations. we believe that
this work paves the way for the deductive verification technology
to be used by programmers who do not (and need not) understand the
internals of the underlying logic solvers, significantly
increasing their applicability in building reliable systems.",
paper = "Qiux13.pdf",
keywords = "printed"
}
\end{chunk}
\index{Propp, James}
\begin{chunk}{axiom.bib}
@misc{Prop13,
author = "Propp, James",
title = {{Real Analysis in Reverse}},
year = "2013",
link = "\url{https://arxiv.org/pdf/1204.4483.pdf}",
abstract =
"Many of the theorems of real analysis, against the background of
the ordered field axioms, are equivalent to Dedekind completeness,
and hence can serve as completeness axioms for the reals. In the
course of demonstrating this, the article offers a tour of some
lessfamiliar ordered fields, provides some of the relevant
history, and considers pedagogical implications.",
paper = "Prop13.pdf"
}
\end{chunk}
\index{Coquand, Thierry}
\begin{chunk}{axiom.bib}
@techreport{Coqu86,
author = "Coquand, Thierry",
title = {{An Analysis of Girard's Paradox}},
year = "1986",
institution = "INRIA Centre de Rocquencourt",
number = "531",
abstract =
"We study the consistency of a few formal systems specially some
extensions of Church's calculus and the construction system. We
show that Church's calculus is not compatible with the notion of
secondorder type. We apply this result for showing that the
calculus of construction wit four levels is inconsistent. We
suggest finally some consistent extensions of these two calculi.",
paper = "Coqu86.pdf"
}
\end{chunk}
\index{Grossman, Dan}
\begin{chunk}{axiom.bib}
@inproceedings{Gros02,
author = "Grossman, Dan",
title = {{Existential Types for Imperative Languages}},
booktitle = "Euro. Symp. on Prog. Langs. and Systems",
publisher = "SpringerVerlag",
pages = "2135",
year = "2002",
isbn = "3540433635",
abstract =
"We integrate existential types into a strongly typed Clike
language. In particular, we show how a bad combination of
existential types, mutation, and aliasing can cause a subtle
violation of type safety. We explort two independent ways to
strengthen the type system to restore safety. One restricts the
mutation of existential packages. The other restricts the types of
aliases of extential packages. We use our framework to explain why
other languages with existential types are safe.",
paper = "Gros02.pdf"
}
\end{chunk}
\index{Jung, Ralf}
\index{Jourdan, JacquesHenri}
\index{Krebbers, Robbert}
\index{Dreyer, Derek}
\begin{chunk}{axiom.bib}
@inproceedings{Jung18,
author = "Jung, Ralf and Jourdan, JacquesHenri and
Krebbers, Robbert and Dreyer, Derek",
title = {{RustBelt: Securing the Foundations of the Rust Programming
Language}},
booktitle = "POPL '18",
publisher = "ACM",
year = "2018",
abstract =
"Rust is a new systems programming language that promises to
overcome the seemingly fundamental tradeoff between highlevel
safety guarantees and lowlevel control over resource
management. Unfortunately, none of Rust's safety claims have been
formally proven, and there is good reason to question whether they
actually hold. Specifically, Rust employs a string,
ownershipbased, type system, but then extends the expressive
power of this core type system through libraries that internally
use unsafe features. In this paper, we give the first formal (and
machinechecked) safety proof for a language representing a
realistic subset of Rust. Our proof is extensible in the sense
that, for each new Rust library that uses unsafe features, we can
say what verification condition it must satisfy in order for it to
be deemed a safe extension to the language. We have carried out
this verification for some of the most important libraries that
are used throughout the Rust ecosystem.",
paper = "Jung18.pdf"
}
\end{chunk}
\index{Grossman, Dan}
\begin{chunk}{axiom.bib}
@article{Gros06,
author = "Grossman, Dan",
title = {{Quantified Types for Imperative Languages}},
journal = "Trans. on Prog. Lang. and Systems",
volume = "28",
number = "3",
year = "2006",
pages = "429475",
abstract =
"We describe universal types, existential types, and type
constructors in Cyclone, a stronglytyped Clike language. We show
how the language naturally supports firstclass polymorphism and
polymorphic recursion while requiring an acceptable amount of
explicit type information. More importantly, we consider the
soundness of type variables in the presence of Cstyle mutation
and the addressof operator. For polymorphic references, we
describe a solution more natural for the C level than the MLstyle
``value restriction''. For existential types, we discover and
subsequently avoid a subtle unsoundness issue resulting from the
addressof operator. We develop a formal abstract machine and
typesafety proof that captures the essence of type variables at
the C level.",
paper = "Gros06.pdf"
}
\end{chunk}
\index{Sarkar, Dipanwita}
\index{Waddell, Oscar}
\index{Dybvig, R. Kent}
\begin{chunk}{axiom.bib}
@inproceedings{Sark04,
author = "Sarkar, Dipanwita and Waddell, Oscar and Dybvig, R. Kent",
title = {{A Nanopass Infrastructure for Compiler Education}},
booktitle = "9th ACM SIGPLAN",
publisher = "ACM",
pages = "201212",
year = "2004",
isbn = "1581139055",
abstract =
"A compiler structured as a small number of monolithic passes is
difficult to understand and difficult to maintain. The steep
learning curve is daunting, and even experienced developers find
that modifying existing passes is difficult and often introduces
subtle and tenacious bugs. These problems are especially
frustrating when the developer is a student in a compiler
class. An attractive alternative is to structure a compiler as a
collection of many finegrained passes, each of which performs a
single task. This structure aligns the implementation of a
compiler with its logical organization, simplifying development,
testing, and debugging. This paper describes the methodology and
tools comprising a framework for constructing such compilers.",
paper = "Sark04.pdf",
keywords = "printed"
}
\end{chunk}
\index{SanchezStern, Alex}
\index{Alhessi, Yousef}
\index{Saul, Lawrence}
\index{Lerner, Sorin}
\begin{chunk}{axiom.bib}
@misc{Sanc19,
author = "SanchezStern, Alex and Alhessi, Yousef and Saul, Lawrence
and Lerner, Sorin",
title = {{Generating Correctness Proofs with Neural Networks}},
year = "2019",
link = "\url{https://arxiv.org/pdf/1907.07794.pdf}",
abstract =
"Foundational verification allows programmers to build software
which has been empirically shown to have high levels of assurance
in a variety of important domains. However, the cost of producing
foundationally verified software remains prohibitively high for
most projects, as it requires significant manual effort by highly
trained experts. In this paper we present Proverbot9001 a proof
search system using machine learning techniques to produce proofs
of software correctness in interactive theorem provers. We
deomonstrate Proverbot9001 on the proof obligations from a large
practical proof project, the CompCert verified C compiler, and
show that it can effectively automate what was previously manual
proofs, automatically solving 15.77\% of proofs in our test
dataset. This corresponds to an over 3X improvement over the prior
state of the art machine learning technique for generating proofs
in Coq.",
paper = "Sanc19.pdf"
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri12,
author = "Christiansen, David Thrane",
title = {{Converting Regular Expressions to Discrete Finite Automata}},
year = "2012",
link = "\url{http://davidchristiansen.dk/tutorials/regextonfa.pdf}"
paper = "Chri12.pdf"
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri13,
author = "Christiansen, David Thrane",
title = {{Didirectional Typing Rules: A Tutorial}},
year = "2013",
link = "\url{http://davidchristiansen.dk/tutorials/bidirectional.pdf}"
paper = "Chri13.pdf"
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri14,
author = "Christiansen, David Thrane",
title = {{A Tutorial on Polymorphic Type Derivations}},
year = "2014",
link = "\url{http://davidchristiansen.dk/tutorials/typeruletutorial.pdf}"
paper = "Chri14.pdf"
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri18,
author = "Christiansen, David Thrane",
title = {{A Little Taste of Dependent Types}},
year = "2018",
link = "\url{https://www.youtube.com/watch?v=VxINoKFmS4}",
abstract =
"Dependent types let us use the same programming language for
compiletime and runtime code, and are inching their way towards the
mainstream from research languages like Coq, Agda and Idris. Dependent
types are useful for programming, but they also unite programming and
mathematical proofs, allowing us to use the tools and techniques we
know from programming to do math.
The essential beauty of dependent types can sometimes be hard to find
under layers of powerful automatic tools. The Little Typer is an
upcoming book on dependent types in the tradition of the The Little
Schemer that features a tiny dependently typed language called Pie. We
will demonstrate a proof in Pie that is also a program."
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri18a,
author = "Christiansen, David Thrane",
title = {{Coding for Types: The Universe Pattern in Idris}},
year = "2018",
link = "\url{https://www.youtube.com/watch?v=AWeT_G04a0A}"
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri19,
author = "Christiansen, David Thrane",
title = {{Bidirectional Type Checking}},
year = "2019",
link = "\url{http://www.youtube.com/watch?v=utyBNDj7s2w}"
}
\end{chunk}
\index{Coquand, Thierry}
\begin{chunk}{axiom.bib}
@misc{Coqu96a,
author = "Coquand, Thierry",
title = {{An Algorithm for TypeChecking Dependent Types}},
year = "1996",
abstract =
"We present a simple typechecker for a language with dependent
types and let expressions, with a simple proof of correctness.",
paper = "Coqu96a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Pierce, Benjamin C.}
\index{Turner, David N.}
\begin{chunk}{axiom.bib}
@misc{Pier98,
author = "Pierce, Benjamin C. and Turner, David N.",
title = {{Local Type Inference}},
year = "1998",
link =
"\url{http://www.cis.upenn.edu/~bcpierce/papers/ltitoplas.pdf}",
abstract =
"We study two partial type inference methods for a language
combining subtyping and impredicative polymorphism. Both methods
are local in the sense that missing annotations are recovered
using only information from adjacent nodes in the syntax tree,
without longdistance constraints such as unification
variables. One method infers type arguments in polymorphic
applications using a local constraint solver. The other infers
annotations on bound variables in function abstractions by
propagating type constraints downward from enclosing application
nodes. We motivate our design choices by a statistical analysis of
the uses of type inference in a sizable body of existing ML code.",
paper = "Pier98.pdf",
keywords = "printed"
}
\end{chunk}
\index{Davies, Rowan}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@misc{Davi00,
author = "Davies, Rowan and Pfenning, Frank",
title = {{Intersection Types and Computational Effects}},
year = "2000",
link = "\url{http://www.cs.cmu.edu/~fp/papers/icpf00.pdf}",
abstract =
"We show that standard formulations of intersection type systems
are unsound in the presence of computational effects, and propose
a solution similar to the value restriction for polymorphism
adopted in the revised definition of Standard ML. It differs in
that it is not tied to letexpressions and requires an additional
weakening of the usual subtyping rules. We also present a
bidirectional typechecking algorithm for the resulting language
that does not require an excessive amount of type annotations and
illustrate it through some examples. We further show that the type
assignment system can be extended to incorporate parametric
polymorphism. Taken together, we see our system and associated
typechecking algorithm as a significant step towards the
introduction of intersection types into realistic programming
languages. The added expressive power would allow many more
properties of programs to be stated by the programmer and
statically verified by the compiler.",
paper = "Davi00.pdf",
keywords = "printed"
}
\end{chunk}
\index{Odersky, Martin}
\index{Zenger, Christoph}
\index{Zenger, Matthias}
\begin{chunk}{axiom.bib}
@misc{Oder01,
author = "Odersky, Martin and Zenger, Christoph and
Zenger, Matthias",
title = {{Colored Local Type Inference}},
year = "2001",
abstract =
"We present a type system for a language based on $F_{\le}$, which
allows certain type annotations to be elided in actual
programs. Local type inference determines types by a combination
of type propagation and local constraint solving, rather than by
global constraint solving. We refine the previously existing local
type inference system of Pierce and Turner by allowing partial
type information to be propagated. This is expressed by coloring
types to indicate propagation directions. Propagating partial type
information allows us to omit type annotations for the visitor
pattern, the analogue of pattern matching in languages without sum
types.",
paper = "Oder01.pdf",
keywords = "printed"
}
\end{chunk}
\index{Norell, Ulf}
\begin{chunk}{axiom.bib}
@phdthesis{Nore07,
author = "Norell, Ulf",
title = {{Towards a Practical Programming Language Based on
Dependent Type Theory}},
school = "Chalmers University",
year = "2007",
link = "\url{http://www.cse.chalmers.se/~ulfn/papers/thesis.pdf}",
abstract =
"Dependent type theories have a long history of being used for
theorem proving. One aspect of type theory which makes it very
powerful as a proof language is that it mixes deduction with
computation. This also makes type theory a good candidate for
programming  the strength of the type system allows properties
of programs to be stated and established, and the computational
properties provide semantics for the programs.
This thesis is concerned with bridging the gap between the
theoretical presentations of type theory and the requirements of
practical programming languages. Although there are many
challenging research problems left to solve before we have an
industrial scale programming language based on type theory, this
thesis takes us a good step along the way.
In functional programming languages pattern matching provides a
concise notation for defining functions. In dependent type theory,
pattern matching becomes even more powerful, in that inspecting
the value of a particular term can reveal information about the
types and values of other terms. In this thesis we give a type
checking algorithm for definitions by pattern matching in type
theory, supporting overlapping patterns, and pattern matching on
intermediate results using the {\sl with} rule.
Traditional presentations of type theory suffer from rather
verbose notation, cluttering programs and proofs with, for
instance, explicit type information. One solution to this problem
is to allow terms that can be inferred automatically to be
omitted. This is usually implemented by inserting metavariables in
place of the omitted terms and using unification to solve these
metavariables during type checking. We present a type checking
algorithm for a theory with metavariables and prove its soundness
independent of whether the metavariables are solved or not.
In any programming language it is important to be able to
structure large programs into separate units or modules and limit
the interaction between these modules. In this thesis we present a
simple, but powerful module system for a dependently typed
language. The main focus of the module system is to manage the
name space of a program, and an important characteristic is a
clear separation between the module system and the type checker,
making it largely independent of the underlying language.
As a side track, not directly related to the use of type theory
for programming, we present a connnection between type theory and
a firstorder logic theorem prover. This connection saves the user
the burden of proving simple, but tedious firstorder theorems by
leaving them for the prover. We use a transparent translation to
firstorder logic which makes the proofs constructed by the
theorem prover human readable. The soundness of the connection is
established by a general metatheorem.
Finally we put our work into practice in the implementation of a
programming language, Agda, based on type theory. As an
illustrating example, we show how to program a simple certified
prover for equations in a commutative monoid, which can be used
internally in Agda. Much more impressive examples have been done
by others, showing that the ideas developed in this thesis are
viable in practice.",
paper = "Nore07.pdf"
}
\end{chunk}
\index{Dunfield, Joshua}
\index{Krishnaswami, Neelakantan R.}
\begin{chunk}{axiom.bib}
@misc{Dunf18,
author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
title = {{Sound and Complete Bidirectional Typechecking for
HigherRank Polymorphism with Existentials and
Indexed Types}},
year = "2018",
link = "\url{https://arxiv.org/pdf/1601.05106.pdf}",
abstract =
"Bidirectional typechecking, in which terms either synthesize a
type or are checked against a known type, has become popular for
its applicability to a variety of type systems, its error
reporting, and its ease of implementation. Following principles
from proof theory, bidirectional typing can be applied to many
type constructs. The principles underlying a bidirectional
approach to indexed types (\sl generalized algebraic datatypes}
are less clear. Building on prooftheoretic treatments of
equality, we give a declarative specification of typing based on
{\sl focalization}. This approach permits declarative rules for
coverage of pattern matching, as well as support for firstclass
existential types using a focalized subtyping judgment. We use
refinement types to avoid explicitly passing equality proofs in
our term syntax, making our calculus similar to languages such as
Haskell and OCaml. We also extend the declarative specification
with an explicit rules for deducing when a type is principal,
permitting us to give a complete declarative specification for a
rich type system with significant type inference. We also give a
set of algorithmic typing rules, and prove that it is sound and
complete with respect to the declarative system. The proof
requires a number of technical innovations, including proving
soundness and completeness in a mutually recursive fashion.",
paper = "Dunf18.pdf"
}
\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@misc{Pfen04a,
author = "Pfenning, Frank",
title = {{Lecture Notes on Bidirectional Type Checking}},
year = "2004",
paper = "Pfen04a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Fieker, Claus}
\index{Hart, William}
\index{Hofmann, Tommy}
\index{Johansson, Fredrik}
\begin{chunk}{axiom.bib}
@inproceedings{Fiek17,
author = "Fieker, Claus and Hart, William and Hofmann, Tommy and
Johansson, Fredrik",
title = {{Nemo/Hecke: Computer Algebra and Number Theory Package
for the Julia Programming Language}},
booktitle = "ISSAC'17",
publisher = "ACM",
year = "2017",
pages = "157164",
abstract =
"We introduce two new packages, Nemo and Hecke, written in the
Julia programming language for computer algebra and number
theory. We demonstrate that high performance generic algorithms
can be implemented in Julia, without the need to resort to a
lowlevel C implementation. For specialised algorithms, we use
Julia's efficient native C interface to wrap existing C/C++
libraries such as Flint, Arb, Antic and Singular. We give examples
of how to use Hecke and Nemo and discuss some algorithms that we
have implemented to provide high performance basic arithmetic.",
paper = "Fiek17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Yallop, Jeremy}
\index{White, Leo}
\begin{chunk}{axiom.bib}
@inproceedings{Yall19,
author = "Yallop, Jeremy and White, Leo",
title = {{Lambda: The Ultimate Sublanguage (Experience Report}},
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM",
year = "2019",
abstract =
"We describe our experience teaching an advanced typed functional
programming course based around the use of Sysmte $F_\omega$ as a
programming language.",
paper = "Yall19.pdf"
}
\end{chunk}
\index{Zhao, Jinxu}
\index{Oliveira, Bruno C.D.S}
\index{Schrijvers, Tom}
\begin{chunk}{axiom.bib}
@inproceedings{Zhao19,
author = "Zhao, Jinxu and Oliveira, Bruno C.D.S and
Schrijvers, Tom",
title = {{A Mechanical Formalization of HigherRanked Polymorphic
Type Interence}},
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM"
year = "2019",
abstract =
"Modern functional programming languages, such as Haskell or
OCaml, use sophisticated forms of type inference. While an
important topic in the Programming Languages research, there is
little work on the mechanization of the metatheory of type
inference in theorem provers. In particular we are unaware of any
complete formalization of the type inference algorithms that are
the backbone of modern functional languages.
This paper presents the first full mechanical formalization of the
metatheory for higherranked polymorphic type inference. The
system that we formalize is the bidirectional type system by
Dunfield and Krishnaswami (DK). The DK type system has two
variants (a declarative and an algorithmic one) that have been
manually proven sound, complete and decidable. We present a
mechanical formalization in the Abella theorem provers of DK's
declarative type system with a novel algorithmic system. We have a
few reasons to use a new algorithm. Firstly, our new algorithm
employs worklist judgments, which precisely capture the scope of
variables and simplify the formalization of scoping in a theorem
prover. Secondly, while DKs original formalizations comes with
very wellwritten manual proofs, there are several details missing
and some incorrect proofs, which complicate the task of writing a
mechanized proof. Despite the use of a different algorithm we
prove the same results as DK, although with significantly
different proofs and proof techniques. Since such type inference
algorithms are quite subtle and have a complex metatheory,
mechanical formalizations are an important advance in type
inference research.",
paper = "Zhao19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Swierstra, Wouter}
\index{Baanen, Tim}
\begin{chunk}{axiom.bib}
@inproceedings{Swie19,
author = "Swierstra, Wouter and Baanen, Tim",
title = {{A Predicate Transformer Semantics for Effects}},
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM"
year = "2019",
abstract =
"Reasoning about programs that use effects can be much harder than
reasoning about their pure counterparts. This paper presents a
predicate transformer semantics for a variety of effects,
including exceptions, state, nondeterminism, and general
recursion. The predicate transformer semantics gives rise to a
refinement relation that can be used to relate a program to its
specification, or even calculate effectful programs that are
correct by construction.",
paper = "Swie19.pdf"
}
\end{chunk}
\index{Eremondi, Joseph}
\index{Tanter, Eric}
\index{Garcia, Ronald}
\begin{chunk}{axiom.bib}
@inproceedings{Erem19,
author = "Eremondi, Joseph and Tanter, Eric and Garcia, Ronald",
title = {{Approximate Normalization for Gradual Dependent Types",
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM",
year = "2019",
abstract =
"Dependent types help programmers write highly reliable
code. However, this reliability comes at a cost: it can be
challenging to write new prototypes in (or migrate old code to)
dependentlytyped programming languages. Gradual typing makes
static type disciplines more flexible, so an appropriate notion of
gradual dependent types could fruitfully lower this cost. However,
dependent types raise unique challenges for gradual
typing. Dependent typechecking involves the execution of program
code, but graduallytyped code can signal runtime type errors or
diverge. These runtime errors threaten the soundness guarantees
that make dependen types so attractive, while divergence spoils
the typedriven programming experience.
this paper presents GDTL, a gradual dependentlytyped language
that emphasizes pragmatic dependentlytyped programming. GDTL
fully embeds both an untyped and dependentlytyped language, and
allows for smooth transitions between the two. In addition to
gradual types we introduce gradual terms, which allow the user to
be imprecise in type indices and to omit proof terms; runtime
checks ensure type safety. To account for nontermination and
failure, we distinguish between compiletype normalization and
runtime execution: compiletime normalization is approximate but
total, while runtime execution is exact, but may fail or
diverge. We prove that GDTL has decidable typechecking and
satisfies all the expected properties of gradual languages. In
particular, GDTL satisfies the static and dynamic gradual
guarantees: reducing type precision preserves typedness, and
altering type precision does not change program behavior outside
of dynamic type failures. To prove these properties, we were led
to establish a novel normalization gradual guarantee thata
captures the monotonicity of approximate normalization with
respect to imprecision.",
paper = "Erem19.pdf"
}
\end{chunk}
\index{Patterson, Daniel}
\index{Ahmed, Amal}
\begin{chunk}{axiom.bib}
@inproceedings{Patt19,
author = "Patterson, Daniel and Ahmed, Amal",
title = {{The Next 700 Compiler Correctness Theorems}},
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM",
year = "2019",
abstract =
"Compiler correctness is an old problem, with results stretching
back beyond the last halfcentury. Founding the field, John
McCarthy and James Painter set out to build ' completely
trustworthy compiler'. And yet, until quite recently, even despite
truly impressive verification efforts, the theorems being proved
were only about the compilation of whole programs, a theoretically
quite appealing but practically unrealistic simplification. For a
compiler correctness theorem to assure complete trust, the theorem
must reflect the reality of how the compiler will be used.
There has been much recent work on more realistic 'compositional'
compiler correctness aimed at proving correct compilation of
components while supporting linking with components compiled from
different languages using different compilers. However, the
variety of theorems, stated in remarkably different ways, raises
questions about what researchers even mean by a 'compiler is
correct'. In this pearl, we develop a new framework with which to
understand compiler correctness theorems in the presence of
linking, and apply it to understanding and comparing this
diversity of results. In doing so, not only are we better able to
assess their relative strengths and weaknesses, but gain insight
into what we as a community should expect from compiler
correctness theorems of the future.",
paper = "Patt19.pdf"
}
\end{chunk}
\index{Paraskevopoulou, Zoe}
\index{Appel, Andrew W.}
\begin{chunk}{axiom.bib}
@inproceedings{Para19,
author = "Paraskevopoulou, Zoe and Appel, Andrew W.",
title = {{Closure Conversion is Safe for Space}},
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM",
year = "2019",
abstract =
"We formally prove that closure conversion with flat environments
for CPS lambda calculus is correct (preserves semantics) and safe
for time and space, meaning that produced code preserves the time
and space required for execution of the source program.
We give a cost model to pre and postclosureconversion code by
formalizing profiling semantics that keep track of the time and
space resources needed for the execution of a program, taking
garbage collection into account. To show preservation of time and
space we set up a genera 'garbagecollection compatible' binary
logical relation that establishes invariants on resource
consumption of the related programs, along with functional
correctness. Using this framework, we show semantics preservation
and space and time safety for terminating source programs, and
divergence preservation and space safety for diverging source
programs.
This is the first formal proof of spacesafety of a
closureconversion transformation. The transformation and the
proof are parts of the CertiCoq compiler pipeline from Coq
(Gallina) through CompCert Clight to assembly language. Our
results are mechanized in the Coq proof assistant.",
paper = "Para19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cong, Youyou}
\index{Osvald, Leo}
\index{Essertel, Gregory M.}
\index{Rompf, Tiark}
\begin{chunk}{axiom.bib}
@inproceedings{Cong19,
author = "Cong, Youyou and Osvald, Leo and Essertel, Gregory M.
and Rompf, Tiark",
title = {{Compiling with Continuations, or without? Whatever}},
booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM",
year = "2019",
abstract =
"What makes a good compiler IR? In the context of functional
languages, there has been an extensive debate on the advantages
and disadvantages of continuationpassing style (CPS). The
consensus seems to be that some form of explicit continuations is
necessary to model jumps in a functional style, but that they
should have a 2ndclass status, separate from regular functions,
to ensure efficient code generation. Building on this observation,
a recent study from PLDI 2017 proposed a directstyle IR with
explicit join points, which essentially represent local
continuations, i.e. functions that do not return or escape. While
this IR can work well in practice, as evidenced by the
implementation of join points in the Glasgow Haskell Compiler
(GHC), there still seems to be room for improvement, especially
with regard to the way continuations are handled in the course of
optimization.
In this paper, we contribute to the CPS debate by developing a
novel IR with the following features. First, we integrate a
control operator that resembles Felleisen's C, eliminating certain
redundant rewrites observed in the previous study. Second, we
treat the nonreturning and nonescaping aspects of continuations
separately, allowing efficient compilation of wellbehaved
functions defined by the user. Third, we define a selective CPS
translation of our IR, which erases control operators while
preserving the meaning and typing of programs. These features
enable optimizations in both direct style and full CPS, as well as
in any intermediate style with selectively exposed continuations.
Thus, we change the spectrum of available options from 'CPS yes or
no' to 'as much or as little CPS as you want, when you want it'.",
paper = "Cong19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Zavialov, Vladislav}
\begin{chunk}{axiom.bib}
@misc{Zavi18,
author = "Zavialov, Vladislav",
title = {{Why Dependent Haskell is the Future of Software Development}},
link = "\url{https://serokell.io/blog/whydependenthaskell}",
year = "2018"
}
\end{chunk}
\index{Eisenberg, Richard A.}
\begin{chunk}{axiom.bib}
@phdthesis{Eise16,
author = "Eisenberg, Richard A.",
title = {{Dependent Types in Haskell: Theory and Practice}},
school = "University of Pennsylvania",
year = "2016",
abstract =
"Haskell, as implemented by the Glasgow Haskell Compiler (GHC),
has been adding new typelevel programming features for some
time. Many of these features  generalized algebraic datatypes
(GADT)s, type families, kind polymorphism, and promoted datatypes
 have brought Haskell to the doorstep of dependent types. Many
dependently typed programs can even currently be encoded, but
often the constructions are painful.
In this dissertation, I describe Dependent Haskell, which supports
full dependent types via a backwardcompatible extension to
today's Haskell. An important contribution to this work is an
implementation, in GHC, of a portion of Dependent Haskell, with
the rest to follow. The features I have implemented are already
released, in GHC 8.0. This dissertation contains several practical
examples of Dependent Haskell code, a full description of the
differences between Dependent Haskell and today's Haskell, a novel
dependently typed lambdacalculus (called PICO) suitable for use
as an intermediate language for compiling Dependent Haskell, and a
type inference and elaboration algorithm, BAKE, that translates
Dependent Haskell to typecorrect PICO. Full proofs of type safety
of PICO and the soundness of BAKE are included in the appendix.",
paper = "Eise16.pdf"
}
\end{chunk}
\index{Pressler, Ron}
\begin{chunk}{axiom.bib}
@misc{Pres19,
author = "Pressler, Ron",
title = {{Correctness and Complexity}},
year = "2019",
link = "\url{https://pron.github.io/posts/correctnessandcomplexity}"
}
\end{chunk}
\index{Rado, Tibor}
\begin{chunk}{axiom.bib}
@article{Rado61,
author = "Rado, Tibor",
title = {{On NonComputable Functions}},
journal = "Bell System Technical Journal",
volume = "41",
number = "3"
year = "1961",
abstract =
"The construction of noncomputable functions used in this paper
is based on the priciple that a finite, nonempty set of
nonnegative integers has a largest element. Also, this principle
is used only for sets which are exceptionally welldefined by
current standards. No enumeration of computable functions is used,
and in this sense the diagonal process is not employed. This, it
appears that an apparently selfevident principle, of constant use
in every area of mathematics, yields nonconstructive entities.",
paper = "Rado61.pdf",
keywords = "printed"
}
\end{chunk}
\index{Yedidia, Adam}
\index{Aaronson, Scott}
\begin{chunk}{axiom.bib}
@article{Yedi16,
author = "Yedidia, Adam and Aaronson, Scott",
title = {{A Relatively Small Turing Machine Whose Behavior Is
Independent of Set Theory}},
journal = "Complex Systems",
volume = "25",
number = "4",
pages = "297327",
link = "\url{http://www.complexsystems.com/pdf/2555.pdf}",
year = "2016",
abstract =
"Since the definition of the Busy Beaver function in Rado in 1962,
an interesting open question has been what the smallest value of
$n$ for which $BB(n)$ is independent of ZFC set theory. Is this
$n$ approximately 10, or closer to 1,000,000, or is it even
larger? In this paper, we show that it is at most 7,918 by
presenting an explicit description of a 7,918state Turing machine
$Z$ with 1 tape and a 2symbol alphabet that cannot be proved to
run forever in ZFC (even though it presumably does), assuming ZFC
is consistent. The machine is based on work of Harvey Friedman on
independent statements involving orderinvariant graphs. In doing
so, we give the first known upper bound on the highest provable
Busy Beaver number in ZFC. We also present a 4,888state Turing
machine $G$ that halts if and only if there is a counterexample of
Goldbach's conjecure, an at 5,372state Turing machine $R$ that
halts if and only if the Riemann hypothesis is false. To create
$G$, $R$, and $Z$, we develop and use a higherlevel language,
Laconic, which is much more convenient than direct state
manipulation.",
paper = "Yedi16.pdf"
}
\end{chunk}
\index{Hartmanis, J.}
\index{Stearns, R.E.}
\begin{chunk}{axiom.bib}
@article{Hart63,
author = "Hartmanis, J. and Stearns, R.E.",
title = {{On the Computational Complexity of Algorithms}},
journal = "Trans. American Mathematical Society",
volume = "117",
pages = "285306",
year = "1963",
paper = "Hart63.pdf"
}
\end{chunk}
\index{Brooks, Frederick P.}
\begin{chunk}{axiom.bib}
@misc{Broo86a,
author = "Brooks, Frederick P.",
title = {{No Silver Bullet  Essence and Accident in Software
Engineering}},
booktitle = "The Mythical ManMonth, Anniversary Edition",
publisher = "Elsevier Science",
pages = "10691076",
year = "1986",
comment = "chapter",
abstract =
"There is no single development, in either technology or
management technique, which by itself promises even one
orderofmagnitude improvement within a decade in productivity, in
reliability, in simpliity",
paper = "Broo86a.pdf",
keywords = "printed, DONE"
}
\end{chunk}
\index{Turing, A. M.}
\begin{chunk}{axiom.bib}
@misc{Turi36,
author = "Turing, A. M.",
title = {{On Computable Numbers, with an Application to the
Entscheidungsproblem}},
year = "1936",
link =
"\url{https://www.cs.virginia.edu/~robins/Turing_Paper_1936.pdf}",
abstract =
"The ``computeble'' numbers may be described briefly as the real
numbers whose expressions as a decimal are calculable by finite
means. Although the subjec of this paper is ostensibly the
computable {\sl numbers}, it is almost equally easy to define and
investigate computable functions of an integral variable or a real
or computable variable, computable predicates, and so forth. The
fundamental problems involed are, however, the same in each case,
and I have chosen the computable numbers for explicit treatment as
involving the least cumbrous technique. I hope shortly to give an
account of the relations of the computable numbers, functions, and
so forth to one another. This will include a development of the
theory of functions of a real variable expressed in terms of
computable numbers. According to my definition, a number is
computable if its decimal can be written down by a machine.",
paper = "Turi36.pdf",
keywords = "printed"
}
\end{chunk}
\index{Turing, A. M.}
\begin{chunk}{axiom.bib}
@misc{Turi47,
author = "Turing, A. M.",
title = {{Lecture to the London Mathematical Society on 20 February
1947}},
year = "1947",
link = "\url{http://www.vordenker.de/downloads/turingvorlesung.pdf}",
paper = "Turi47.pdf",
keywords = "printed"
}
\end{chunk}
\index{Demri, S.}
\index{Laroussinie, F.}
\index{Schnoebelen, Ph.}
\begin{chunk}{axiom.bib}
@article{Demr06,
author = "Demri, S. and Laroussinie, F. and Schnoebelen, Ph.",
title = {{A Parametric Analysis of the StateExplosion Problem in
Model Checking}},
journal = "Computer and System Sciences",
volume = "72",
pages = "547575",
year = "2006",
abstract =
"In model checking, the stateexplosion problem occurs when one
checks a {\sl nonflat system}, i.e., a system implicitly described
as a synchronized product of elementary subsystems. In this paper,
we investigate the complexity of a wide variedty of modelchecking
problems for nonflat systems under the light of
{\sl parameterized complexity}, taking the number of synchronized
components as a parameter. We provide precise complexity measures
(in the parameterized sense) for most of the problems we
investigate, and evidence that the results are robust.",
paper = "Demr06.pdf"
}
\end{chunk}
\index{Schnoebelen, Ph.}
\begin{chunk}{axiom.bib}
@article{Schn02,
author = "Schnoebelen, Ph.",
title = {{The Complexity of Temporal Logic Model Checking}},
journal = "Advances in Modal Logic",
volume = "4",
pages = "144",
year = "2002",
paper = "Schn02.pdf"
}
\end{chunk}
\index{Noonan, Matt}
\begin{chunk}{axiom.bib}
@inproceedings{Noon18,
author = "Noonan, Matt",
title = {{Ghosts of Departed Proofs (Functional Pearl)}},
booktitle = "Haskell '18",
publisher = "ACM",
isbn = "9781450358354",
abstract =
"Library authors often are faced with a design chice: should a
function with preconditions be implemented as a partial function,
or by returning a failure condition on incorrect use? Neither
option is ideal. Partial functions lead to frustrating runtime
errors. Failure conditions must be checked at the usesite,
placing an unfair tax on the users who have ensured that the
function's preconditions were correctly met.
In this paper, we introduce an API design concept called `ghosts
of departed proofs' based on the following observation:
sophisticated preconditions can be encoded in Haskell's type
system with no runtime overhead, by using proofs that inhabit
phantom type parameters attached to new type wrappers. The user
expresses correctness arguments by constructing proofs to inhabit
these phantom types. Critically, this technique allows the library
{\sl user} to decide when and how to validate that the API's
preconditions are met.
The 'ghost of departed proofs' approach to API design can achieve
many of the benefits of dependent types and refinement types, yet
only requires some minor and wellunderstood extensions to Haskell
2010. We demonstrate the utility of this approach through a series
of case studies, showing how to enforce novel invariants for
lists, maps, graphs, shared memory regions, and more.",
paper = "Noon18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Innes, Sean}
\index{Uu, Nicolas}
\begin{chunk}{axiom.bib}
@inproceedings{Inne19,
author = "Innes, Sean and Uu, Nicolas",
title = {{Tic Tak Types}},
booktitle  "Int. Workshop on Type Driven Development",
publisher = "ACM",
year = "2019",
abstract =
"TicTacToe is a simple, familiar, classic game enjoyed by
many. This pearl is designed to give a flavour of the world of
dependent types to the uninitiated functional programmer. We cover
a journey from TicTakTerrible implementations in the harsh world
of virtually untyped {\sl Strings}, through the safe haven of
vectors that know their own length, and into a TicTacTitanium
version that is too strongly typed for its own good. Along the way
we discover something we knew all along: types are great, but in
moderation. This lesson is quickly put to use in a more complex
recursive version.",
paper = "Inne19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Naur, Peter}
\begin{chunk}{axiom.bib}
@misc{Naur85,
author = "Naur, Peter",
title = {{Programming as Theory Building}},
link = "\url{http://pages.cs.wisc.edu/~remzi/Naur.pdf}",
paper = "Naur85.pdf",
keywords = "DONE"
}
\end{chunk}
\index{Elliott, Conal}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Elli90,
author = "Elliott, Conal and Pfenning, Frank",
title = {{A SemiFunctional Implementation of a HigherOrder Logic
Programming Language}},
year = "1990",
link = "\url{http://www.cs.cmu.edu/~fp/papers/elpsml90.pdf}",
comment = "\url{http://www.cs.cmu.edu/~fp/papers/elpsmlpaper.tar.gz}",
paper = "Elli90.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carlsson, Mats}
\begin{chunk}{axiom.bib}
@article{Carl84,
author = "Carlsson, Mats",
title = {{On Implementing Prolog in Functional Programming}},
journal = "New Generation Computing",
volume = "2",
pages = "347359",
year = "1984",
abstract =
"This report surveys techniques for implementing the programming
language Prolog. It focuses on explaining the procedural semantics
of the language in terms of functional programming constructs. The
techniques {\sl success continuations} and {\sl proof streams} are
introduced, and it is shown how Horn clause interpreters can be
built upon them. Continuations are well known from denotational
semantics theory, in this paper it is shown that they are viable
constructs in actual programs.",
paper = "Carl84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Baker, Henry}
\begin{chunk}{axiom.bib}
@misc{Bake84,
author = "Baker, Henry",
title = {{The Nimble Type Inferencer for Common Lisp84}},
year = "1984",
link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
abstract =
"We describe a framework and an algorithm for doing type inference
analysis on programs written in full Common Lisp84 (Common Lisp
without the CLOS objectoriented extensions). The objective of
type inference is to determine tight lattice upper bounds on the
range of runtime data types for Common Lisp program variables and
temporaries. Depending upon the lattice used, type inference can
also provide range analysis information for numeric
variables. This lattice upper bound information can be used by an
optimizing compiler to choose more restrictive, and hence more
efficient, representations for these program variables. Our
analysis also produces tighter control flow information, which can
be used to eliminate redundant tests which result in dead
code. The overall goal of type inference is to mechanically
extract from Common Lisp programs the same degree of
representation information that is usually provided by the
programmer in traditiional stronglytyped languages. In this way,
we can provide some classes of Common Lisp programs execution time
efficiency expected only for more stronglytyped compiled languages.",
paper = "Bake84.pdf",
keywords = "printed, DONE"
}
\end{chunk}
\index{Dunfield, Joshua}
\index{Krishnaswami, Neel}
\begin{chunk}{axiom.bib}
@misc{Dunf19,
author = "Dunfield, Joshua and Krishnaswami, Neel",
title = {{Bidirection Typing}},
year = "2019",
link = "\url{https://www.cl.cam.ac.uk/~nk480/bidirsurvey.pdf}",
abstract =
"Bidirectional typing combines two modes of typing: type checking,
which checks that a program satisfies a known type, and type
synthesis, which determines a type from the program. Using
checking enables bidirectional typing to break the decidability
barrier of DamasMilner approaches; using synthesis enables
bidirectional typing to avoid the large annotation burden of
explicitly typed languages. In addition, bidirectional typing
improves error locality. We highlight the design principles that
underlie bidirectional type systems, survey the development of
bidirectional typing from the prehistoric period before Pierce and
Turner's local type inference to the present day, and provide
guidance for future investigations.",
paper = "Dunf19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sannella, Donald}
\index{Tarlecki, Andrzej}
\begin{chunk}{axiom.bib}
@book{Sann12,
author = "Sannella, Donald and Tarlecki, Andrzej",
title = {{Foundations of Algebraic Specification and Formal Software
Development}},
publisher = "Springer",
year = "2012",
isbn = "9783642173363",
paper = "Sann12.pdf"
}
\end{chunk}
\index{Sannella, D.}
\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@inproceedings{Sann91,
author = "Sannella, D. and Tarlecki, A.",
title = {{Formal Program Development in Extended ML for the Working
Programmer}},
booktitle = "3rd BCS/FACS Workshop on Refinement",
publisher = Springer",
pages = "99130",
year = "1991",
abstract =
"Extened ML is a framework for the formal development of programs
in the Standard ML programming language from highlevel
specifications of their required input/output behavior. It
strongly supports the development of modular programs consisting
of an interconnected collection of generic and reusable units. The
Extended ML framework includes a methodology for formal program
development which establishes a number of ways of proceeding from
a given specification of a programming task towards a
program. Each such step gives rise to one or more proof
oblisgations which must be proved in order to establish the
correctness of that step. This paper is inteded as a useroriented
summary of the Extended ML language and mthodology. Theoretical
technicalities are avoided whenever possible, with emphasis placed
on the practical aspects of formal program development. An
extended example of a complete program development in Extended ML
is included.",
paper = "Sann91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sannella, D.}
\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@article{Sann99,
author = "Sannella, Donald and Tarlecki, Andrzej",
title = {{Algebraic Methods for Specification and Formal Development
of Programs}},
journal = "ACM Computing Surveys",
volume = "31",
year = "1999",
paper = "Sann99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kahrs, Stefan}
\begin{chunk}{axiom.bib}
@techreport{Kahr95,
author = "Kahrs, Stefan",
title = {{On the Static Analysis of Extended ML}},
type = "technical report",
institution = "Lab for Foudations of Comp Sci. Univ. Edinburgh",
number = "Research Note",
abstract =
"This is a short note describing differences in static analysis of
EML, as defined in [KST94] and SML, as defined in [MTH90] and
[MT91]. It is intended for use by people who are building an EML
parser/typechecker by modifying an existing SML compiler.",
paper = "Kahr95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kahrs, S.}
\index{Sannella, D.}
\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@article{Kahr94,
author = "Kahrs, S. and Sannella, D. and Tarlecki, A.",
title = {{Interfaces and Extended ML}},
journal = "SIGPLAN Notices",
volume = "29",
number = "8",
pages = "111118",
year = "1994",
abstract =
"This is a position paper giving our views on the uses and makeup
of module interfaces. The position espoused is inspired by our
work on the Extended ML (EML) formal software development
framework and by ideas in the algebraic foundations of
specification and formal development. The present state of
interfaces in EML is outlined and set in the context of plans for
a more general EMLlike framework with axioms in interfaces taken
from an arbitrary logical system formulated as an
{\sl institution}. Some more speculative plans are sketched
concerning the simultaneous use of multiple institutions in
specification and development.",
paper = "Kahr94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sannella, D.}
\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@article{Sann91a,
author = "Sannella, Donald and Tarlecki, Andrzej",
title = {{Extended ML: Past, Present and Future}},
journal = "LNCS",
volume = "534",
pages = "297322",
year = "1991",
abstract =
"An overview of past, present and future work on the Extended ML
formal program development framework is given, with emphasis on
two topics of current active research: the semantics of the
Extened ML specification langauge, and tools to support formal
program development.",
paper = "Sann91a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sannella, Donald}
\begin{chunk}{axiom.bib}
@misc{Sann86a,
author = "Sannella, Donald",
title = {{Formal Specification of ML Programs}},
link =
"\url{http://www.lfcs.inf.ed.ac.uk/reports/86/ECSLFCS8615/ECSLFCS86.15.ps}",
year = "1986",
abstract =
"These notes were written to accompany lectures on program
specification which formed part of a course on functional
programming in ML. Functions can be specified using a
specification language obtained by extending ML with
(nonexecutable) firstorder axioms. Simple inductive proofs
suffice to show that in ML function satisfies such a
specification. This approach can also be used to specify and
verify larger programs built from smaller pieces using ML's
modularisation facilities. Examples are used to illustrate the
methods discussed.",
paper = "Sann86a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sannella, Donald}
\index{Tarlecki, Andrzej}
\begin{chunk}{axiom.bib}
@article{Sann97,
author = "Sannella, Donald and Tarlecki, Andrzej",
title = {{Essential Concepts of Algebraic Specification and Program
Development}},
journal = "Formal Aspects of Computing",
volume = "9",
pages = "229269",
year = "1997",
abstract =
"The main ideas underlying work on the modeltheoretic foundations
of algebraic specification and formal program development are
presented in an informal way. An attempt is made to offer an
overall view, rather than new results, and to focus on the basic
motivation behind the technicalities presented elsewhere.",
paper = "Sann97.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wright, Andrew K.}
\begin{chunk}{axiom.bib}
@inproceedings{Wrig95,
author = "Wright, Andrew K.",
title = {{Simple Imperative Polymorphism}},
booktitle = "LISP and Symbolic Computation",
publisher = "Kluwer Academic",
pages = "242256",
year = "1995",
abstract =
"This paper describes a simple extension of the HindleyMilner
polymorphic type discipline to callbyvalue languages that
incorporate imperative features like references, exceptions, and
continuations. This extension sacrifices the ability to type every
purely functional expression that is typable in the HindleyMilner
system. In return, it assigns the same type to functional and
imperative implementations of the same abstraction. Hence with a
module system that separates specifications from implementations,
imperative features can be freely used to implement polymorphic
specifications. A study of a number of ML programs shows that the
inability to type all HindleyMilner typable expressions seldom
impacts realistic programs. Furthermore, most programs that are
rendered untypable by the new system can be easily repaired.",
paper = "Wrig95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Tofte, Mads}
\begin{chunk}{axiom.bib}
@phdthesis{Toft88,
author = "Tofte, Mads",
title = {{Operational Semantics and Polymorphic Type Inference}},
school = "Univ. of Edinburgh",
year = "1988",
abstract =
"Three languages with polymorphic type disciplines are discussed,
namely the $\lambda$calculus with Milner's polymorphic type
discipline; a language with imperative features (polymorphic
references); and a skeletal module language with structures,
signatures and functors. In each of the two first cases we show
that the type inference system is consistent with an operational
dynamic semantics.
On the module level, polymorphic types correspond to
signatures. There is a notion of principal signatures. Socalled
signature checking is the module level equivalent of type
checking. In particular, there exists an algorithm which either
fails or produces a principal signature.",
paper = "Toft88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@inproceedings{Reyn83,
author = "Reynolds, John C.",
title = {{Types, Abstraction and Parametric Polymorphism}},
booktitle = "Information Processing 83",
publisher = "Elsevier Science Publishers",
year = "1983",
abstract =
"We explore the thesis that type structure is a syntactic
discipline for maintaining levels of abstraction. Traditionally,
this view has beeen formalized algebraically, but the algebraic
approach fails to encompass higherorder functions. For this
purpose, it is necessary to generalize homomorphic functions to
relations; the result is an ``abstraction'' theorem that is
applicable to the typed lambda calculus and various extensions,
including userdefined types.
Finally, we consider polymorphic functions, and show that the
abstraction theorem captures Strachey's concept of parametric, as
opposed to ad hoc, polymorphism.",
paper = "Reyn83.pdf",
keywords = "printed"
}
\end{chunk}
\index{Leroy, Xavier}
\begin{chunk}{axiom.bib}
@techreport{Lero92,
author = "Leroy, Xavier",
title = {{Polymorphic Typing of an Algorithmic Language}},
type = "research report",
institution = "INRIA",
number = "N1778",
year = "1992",
abstract =
"The polymorphic type discipline, as in the ML language, fits well
within purely applicative languages, but does not extend naturally
to the main feature of algorithmic languages: inplace update of
data structures. Similar typing difficulties arise with other
extensions of applicative languages: logical variables,
communication channels, continuation handling. This work studies
(in the setting of relational semantics) two new approaches to the
polymorphic typing of these nonapplicative features. The first
one relies on a restriction of generalization over types (the
notion of dangerous variables), and on a refined typing of
functional values (closure typing). The resulting type system is
compatible with the ML core language, and is the most expressive
type systems for ML with imperative features so far. The second
approach relies on switching to ``byname'' sematics for the
constructs of polymorphism, instead of the usual ``byvalue''
semanticcs. The resulting language differs from ML, but lends
itself easily to polymorphic typing. Both approaches smoothly
integrate nonapplicative features and polymorphic typing.",
paper = "Lero92.pdf"
}
\end{chunk}
\index{Hutton, Graham}
\begin{chunk}{axiom.bib}
@article{Hutt99,
author = "Hutton, Graham",
title = {{A Tutorial on the Universality and Expressiveness of
Fold}},
journal = "J. Functional Programming",
volume = "9",
number = "4",
pages = "355372",
year = "1999",
abstract =
"In functional programming, {\sl fold} is a standard operator that
encapsulates a simple pattern of recursion for processing
lists. This article is a tutorial on two key aspects of the fold
operator for lists. First of all, we emphasize the use of the
universal property of fold both as a proof principle that avoids
the need for inductive proofs, and as a definition principle that
guides the transformation of recursive functions into definitions
using fold. Secondly, we show that even though the pattern of
recursion encapsulated by fold is simple, in a language with
tuples and functions as firstclass values the fold operator has
greater expressive power than might first be expected.",
paper = "Hutt99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Card88b,
author = "Cardelli, Luca",
title = {{Basic Polymorphic Typechecking}},
journal = "Science of Computer Programming",
volume = "8",
number = "2",
year = "1988",
paper = "Card88b.pdf",
keywords = "printed"
}
\end{chunk}
\index{Hughes, John}
\begin{chunk}{axiom.bib}
@misc{Hugh19,
author = "Hughes, John",
title = {{How to Specify it!}},
year = "2019",
link = "\url{https://www.dropbox.com/s/tx2b84kae4bw1p4/paper.pdf}",
abstract =
"Propertybased testing tools test software against a
specification, rather than a set of examples. This tutorial paper
presents five generic approaches to writing such specifications
(for purely functional code). We discuss costs, benefits, and
bugfinding power of each approach, with reference to a simple
example with eight buggy variants. The lessons learned should help
the reader to develope effective propertybased tests in the future.",
paper = "Hugh19.pdf",
keywords = "printed, DONE"
}
\end{chunk}
\index{Goto, Kazushige}
\index{van de Geijn, Robert A.}
\begin{chunk}{axiom.bib}
@article{Goto19,
author = "Goto, Kazushige and van de Geijn, Robert A.",
title = {{Anatomy of HighPerformance Matrix Multiplication}},
journal = "Transactions on Mathematical Software",
volume = "V",
number = "N",
year = "2019",
abstract =
"We present the basic principles which underlie the high
performance implementation of the matrix multiplication that is
part of the widely used GotoBLAS library. Design decisions are
justified by successively refining a model of architectures with
multilevel memories. A simple but effective algorithm for
executing this operation results. Implementations on a broad
selection of architectures are shown to achieve nearpeak
performacne.",
paper = "Goto19.pdf"
}
\end{chunk}
\index{Wernhard, Christoph}
\begin{chunk}{axiom.bib}
@misc{Wern19,
author = "Wernhard, Christoph",
title = {{PIE  Proving, Interpolating and Eliminating on the Basis
of FirstOrder Logic}},
year = "2019",
link = "\url{https://arxiv.org/pdf/1908.11137.pdf}",
abstract =
"PIE is a Prologembedded environment for automated reasoning on
the basis of firstorder logic. It includes a versatile formula
macro system and supports the creation of documents that
intersperse macro definitions, reasoner invocations and LaTeX
formatted natural language text. Invocation of various reasoners
is supported. External provers as well as subsystems of PIE,
which include preprocessors, a Prologbased firstorder prover,
methods for Craig interpolation and methods for secondorder
quantifier elimination.",
paper = "Wern19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Aldrich, Jonathan}
\begin{chunk}{axiom.bib}
@inproceedings{Aldr13,
author = "Aldrich, Jonathan",
title = {{The Power of Interoperability: Why Objects are Inevitable}},
booktitle = "Onward!",
publisher = "ACM",
year = "2013",
link = "\url{https://www.cs.cmu.edu/~aldrich/papers/objectsessay.pdf}",
abstract =
"Three years ago, in this venue, Cook argued that in their
essence, objects are what Reynolds called {\sl procedural data
structures}. His observations raises a natural question: if
procedural data structures are the essence of objects, has this
contributed to the empirical success of objects, and if so, how?
This essay attempts to answer that question. After reviewing
Cook's definition, I propose the term {\sl service abstractions}
to capture the essential nature of objects. This terminology
emphasizes, following Kay, that objects are not primarily about
representing and manipulating data, but are more about providing
services in support of higherlevel goals. Using examples taken
from object oriented frameworks, I illustrate the unique design
leverage that service abstractions provide: the ability to define
abstractions that can be extended, and whose extensions are
interoperable in a firstclass way. The essay argues that the form
of interoperable extension supported by service abstractions is
essential to modern software: many modern frameworks and
ecosystems could not have been built without service
abstractions. In this sense, the success of objects was not a
coincidence: it was an inevitable consequence of their service
abstraction nature.",
paper = "Aldr13.pdf",
keywords = "printed, DONE"
}
\end{chunk}
\index{Chang, Stephen}
\index{Knauth, Alex}
\index{Greenman, Ben}
\begin{chunk}{axiom.bib}
@inproceedings{Chan17,
author = "Chang, Stephen and Knauth, Alex and Greenman, Ben",
title = {{Type Systems as Macros}},
booktitle = "Principles of Programming Languages",
publisher = "ACM",
year = "2017",
abstract =
"We present TURNSTILE, a metalanguage for creating typed embedded
languages. To implement the type system, programmers write type
checking rules resembling traditional judgment syntax. To
implement the semantics, they incorporate elaborations into these
rules. TURNSTILE critically depends on the idea of linguistic
reuse. It exploits a macro system in a novel way to simultaneously
type check and rewrite a surface program into a target
language. Reusing a macro system also yields modular
implementations whose rules may be mixed and matched to create
other languages. Combined with typical compiler and runtime reuse,
TURNSTILE produces performant typed embedded languages with little
effort.",
paper = "Chan17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Xi, Hongwei}
\begin{chunk}{axiom.bib}
@book{Xixx19,
author = "Xi, Hongwei",
title = {{Introduction to Programming in ATS}},
publisher = "ATS Trustful Software, Inc",
year = "2019",
abstract =
"As a programming language, ATS is both syntaxrich and
featurerich. This book introduces the reader to some core
features of ATS, including basic functional programming, simple
types, (recursively defined) datatypes, polymorphic types,
dependent types, linear types, theorem proving, programming with
theorem proving (PwTP), and templatebased programming. Although
the reader is not assumed to be familiar with programming in
genera, the book is likely to be rather dense for someone without
considerable programming experience",
paper = "X1xx19.pdf"
}
\end{chunk}
\index{Loh, Andres}
\index{McBride, Conor}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@article{Lohx01,
author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
title = {{A Tutorial Implementation of a Dependently Typed Lambda
Calculus}},
journal = "Foundations Informaticae",
volume = "XXI",
pages = "10011031",
year = "2001",
abstract =
"We present the type rules for a dependently typed core calculus
together with a straightforward implementation in Haskell. We
explicitly highlight the changes necessary to shift from a
simplytyped lambda calculus to a dependently typed lambda
calculus. We also describe how to extend our core language with
data types and write several small example programs. The article
is accompanied by an executable interpreter and example code that
allows immediate experimentation with the system we describe.",
paper = "Lohx01.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bahr, Patrick}
\index{Hutton, Graham}
\begin{chunk}{axiom.bib}
@article{Bahr15,
author = "Bahr, Patrick and Hutton, Graham",
title = {{Calculating Correct Compilers}},
journal = "Functional Programming",
year = "2015",
link = "\url{www.cs.nott.ac.uk/~pszgmh/ccc.pdf}",
abstract =
"In this article we present a new approach to the problem of
calculating compilers. In particular, we develop a simple but
general technique that allos us to derive correct compilers from
highlevel semantics by systematic calculation, with all details
of the implementation of the compilers falling naturally out of
the calculation process. Our approach is based upon the use of
standard equational reasoning techniques, and has been applied to
calculate compilers for a wide range of language features and
their combination, including arithmetic expressions, exceptions,
state, various forms of lambda calculi, bounded and unbounded
loops, nondeterminism, and interrupts. All the calculations in
the article have been formalised using the Coq proof assistant,
which serves as a convenient interactive tool for developing and
verifying the calculations."
paper = "Bahr15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bahr, Patrick}
\index{Hutton, Graham}
\begin{chunk}{axiom.bib}
@article{Bahr19,
author = "Bahr, Patrick and Hutton, Graham",
title = {{Calculating Correct Compilers II}},
journal = "Functional Programming",
year = "2019",
link = "\url{www.cs.nott.ac.uk/~pszgmh/ccc2.pdf}",
abstract =
"In 'Calculating Correct Compilers' (Bahr and Hutton, 2015) we
developed a new approach to calculating compilers directly from
specifications of their correctness. Our approach only required
elementary reasoning techniques, and has been used to calculate
compilers for a wide range of language features and their
combination. However, the methodology was focused on stackbased
target machines, whereas real compilers often target
registerbased machines. In this article, we show how our approach
can naturally be adapted to calculate compilers for register
machines.",
paper = "Bahr19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Swords, Sol}
\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@article{Swor11,
author = "Swords, Sol and Davis, Jared",
title = {{Bit_Blasiting ACL2 Theorems}},
journal = "EPTCS",
volume = "70",
pages = "84102",
year = "2011",
abstract =
"Interactive theorem proving requires a lot of human
guidance. Proving a property involes (1) figuring out why it
holds, the (2) coaxing the theorem prover into believing it. Both
steps can take a long time. We explain how to use GL, a framework
for proving finite ACL2 theorems with BDD and SATbased
reasoning. This approach makes it unnecessary to deeply understand
why a property is true, and automates the process of admitting it
as a theorem. We use GL at Centaur Technology to verify execution
units for x86 Integer, MMX, SSE, and floatingpoint arithmetic.",
paper = "Swor11.pdf"
}
\end{chunk}

books/bookvolbib.pamphlet  4775 +++++++++++++++++++++++++
changelog  10 +
patch  6672 +++++++++++++++++++
src/axiomwebsite/patches.html  4 +
4 files changed, 7590 insertions(+), 3871 deletions()
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 1b0730d..747462f 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 588,6 +588,118 @@ paragraph for those unfamiliar with the terms.
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Baker, Henry}
+\begin{chunk}{axiom.bib}
+@misc{Bake84,
+ author = "Baker, Henry",
+ title = {{The Nimble Type Inferencer for Common Lisp84}},
+ year = "1984",
+ link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
+ abstract =
+ "We describe a framework and an algorithm for doing type inference
+ analysis on programs written in full Common Lisp84 (Common Lisp
+ without the CLOS objectoriented extensions). The objective of
+ type inference is to determine tight lattice upper bounds on the
+ range of runtime data types for Common Lisp program variables and
+ temporaries. Depending upon the lattice used, type inference can
+ also provide range analysis information for numeric
+ variables. This lattice upper bound information can be used by an
+ optimizing compiler to choose more restrictive, and hence more
+ efficient, representations for these program variables. Our
+ analysis also produces tighter control flow information, which can
+ be used to eliminate redundant tests which result in dead
+ code. The overall goal of type inference is to mechanically
+ extract from Common Lisp programs the same degree of
+ representation information that is usually provided by the
+ programmer in traditiional stronglytyped languages. In this way,
+ we can provide some classes of Common Lisp programs execution time
+ efficiency expected only for more stronglytyped compiled languages.
+
+ The Nimble type inference system follows the traditional
+ lattice/algebraic data flow techniques [Kaplan80], rather than the
+ logical/theoremproving unification techniques of ML [Milner78]. It
+ can handle polymorphic variables and functions in a natural way, and
+ provides for ``casebased'' analysis that is quite similar to that used
+ intuitively by programmers. Additionally, this inference system can
+ deduce the termination of some simple loops, thus providing
+ surprisingly tight upper lattice bounds for many loop variables.
+
+ By using a higher resolution lattice, more precise typing of primitive
+ functions, polymorphic types and case analysis, the Nimble type
+ inference algorithm can often produce sharper bounds than
+ unificationbased type inference techniques. At the present time,
+ however, our treatment of higherorder data structures and functions
+ is not as elegant as that of the unification techniques.",
+ paper = "Bake84.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Baker, Henry G.}
+\begin{chunk}{axiom.bib}
+@article{Bake91,
+ author = "Baker, Henry G.",
+ title = {{Pragmatic Parsing in Common Lisp}},
+ journal = "ACM Lisp Pointers",
+ volume = "IV",
+ number = "2",
+ pages = "315",
+ year = "1991",
+ abstract =
+ "We review META, a classic technique for building recursive descent
+ parsers, that is both simple and efficient. While META does not
+ handle all possible regular or contextfree grammars, it handles a
+ surprisingly large fraction of the grammars encountered by Lisp
+ programmers. We show how META can be used to parse streams, strings
+ and lists—including Common Lisp's hairy lambda expression parameter
+ lists. Finally, we compare the execution time of this parsing method
+ to the builtin methods of Common Lisp.",
+ paper = "Bake91.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Baker, Henry}
+\begin{chunk}{axiom.bib}
+@article{Bake91a,
+ author = "Baker, Henry",
+ title = {{CLOStrophobia: Its Etiology and Treatment}},
+ journal = "OOPS Messenger",
+ volume = "2",
+ number = "4",
+ publisher = "ACM",
+ year = "1991",
+ pages = "415",
+ abstract =
+ "The Common Lisp Object System (CLOS) has received some praise and
+ some criticism, both deserved. One of the most controversial
+ features of standard CLOS is its linearlyordered 'class
+ precedence list', which is used to linearly order the execution of
+ its 'combination' methods. In addition to the problems already
+ known regarding the linear ordering of superclasses, we show that
+ the standard CLOS class precedence ordering produces gratuitously
+ complex and nonintuitive behavior. We then show that a slight
+ modification of the standard ordering rules produces a linear
+ ordering which can achieve most of the goals of CLOS more
+ efficiently, and without impacting most programs. We describe a
+ subset of CLOS called {\sl Static} CLOS which preserves much of
+ the praise due CLOS, while eliminating some of the
+ criticism. Static CLOS is tuned for {\sl deliver} of debugged
+ code, rather than for prototype development. This 'delivery' CLOS
+ determines as many methods as possible at compiletime using 'type
+ inference' techniques. While these techniques generally result in
+ fasterexecuting code, the space requirements can grow quite
+ large. We argue that this space explosion can be partially
+ ameliorated through the use of our modified class precedence
+ ordering.",
+ paper = "Bake91a.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Baker, Martin}
\begin{chunk}{axiom.bib}
@misc{Bake16b,
@@ 1337,6 +1449,38 @@ paragraph for those unfamiliar with the terms.
\end{chunk}
+\index{Dunfield, Joshua}
+\index{Krishnaswami, Neelakantan R.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Dunf13,
+ author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
+ title = {{Complete and Easy Bidirectional Typechecking for
+ HigherRank Polymorphism}},
+ booktitle = "ICFP'13",
+ publisher = "ACM",
+ year = "2013",
+ link = "\url{https://arxiv.org/pdf/1306.6032.pdf}",
+ abstract =
+ "Bidirectional typechecking, in which terms either synthesize a
+ type or are checked against a known type, has become popular for
+ its scalability (unlike DamasMilner type inference, bidirectional
+ typing remains decidable even for very expressive type systems),
+ its error reporting, and its relative ease of
+ implementation. Following design principles from proof theory,
+ bidirectional typing can be applied to many type constructs. The
+ principles underlying a bidirectional approach to polymorphism,
+ however, are less obvious. We give a declarative, bidirectional
+ account of higherrank polymorphism, grounded in proof theory;
+ this calculus enjoys many properties such as $\eta$reduction and
+ predictability of annotations. We give an algorithm for
+ implementing the declarative system; our algorithm is remarkably
+ simple and wellbehaved, despite being both sound and complete.",
+ paper = "Dunf13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Ehrig, H.}
@@ 3276,7 +3420,8 @@ paragraph for those unfamiliar with the terms.
avoiding anomalies in the interaction of implicit conversions and
generic operators. The definition of a simple imperative language
(without any binding mechanisms) is used as an example.",
 paper = "Reyn80.pdf, printed"
+ paper = "Reyn80.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 7903,7 +8048,7 @@ when shown in factored form.
number = "PAM554",
link = "\url{http://www.dtic.mil/dtic/tr/fulltext/u2/a256582.pdf}",
abstract =
 "We have discovered a new implementaiton of the qd algorithm that has
+ "We have discovered a new implementation of the qd algorithm that has
a far wider domain of stability than Rutishauser's version. Our
algorithm was developed from an examination of the LRCholesky
transformation and can be adapted to parallel computation in stark
@@ 9004,13 +9149,14 @@ when shown in factored form.
\index{Turing, A. M.}
\begin{chunk}{axiom.bib}
@article{Turi48,
+@article{Turi48a,
author = "Turing, A. M.",
title = {{Roundingoff errors in matrix processes}},
journal = "Q. J. Mech. Appl. Math.",
volume = "1",
pages = "287308",
 year = "1948"
+ year = "1948",
+ paper = "Turi48a.pdf"
}
\end{chunk}
@@ 9904,7 +10050,7 @@ when shown in factored form.
reusability but also to encourage a more systematic comparison of
different softwares and architectural solutions.",
paper = "Aspe06a.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 10098,7 +10244,7 @@ when shown in factored form.
level. In this paper, we give an account of the whole system, its
peculiarities and its main applications.",
paper = "Aspe11.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 10816,7 +10962,7 @@ when shown in factored form.
benefits similar to those offered by the SPARK language subset and
associated tools can be brought to a C++ development environment.",
paper = "Croc14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 11494,7 +11640,7 @@ when shown in factored form.
levels of interpretation of types in a novel way.
We finally demonstrate the expressive power of our system using
 our prototype implementaiton, by proving properties of standard
+ our prototype implementation, by proving properties of standard
programs like the map function on lists or the insertion sort.",
paper = "Lepi16.pdf"
}
@@ 12151,7 +12297,7 @@ when shown in factored form.
metalanguage. We discuss details of the logic implemented, kernel
design, and novel proof state and tactic representations.",
paper = "Mull13.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 12686,6 +12832,30 @@ when shown in factored form.
\end{chunk}
+\index{Abrahams, Paul W.}
+\index{Barnett, Jeffrey A.}
+\index{Book, Erwin}
+\index{Firth, Donna}
+\index{Kameny, Stanley L.}
+\index{Weissman, Clark}
+\index{Hawkinson, Lowell}
+\index{Levin, Michael I.}
+\index{Saunders, Robert A.}
+\begin{chunk}{axiom.bib}
+@misc{Abra66,
+ author = "Abrahams, Paul W. and Barnett, Jeffrey A. and Book, Erwin
+ and Firth, Donna and Kameny, Stanley L. and Weissman, Clark
+ and Hawkinson, Lowell and Levin, Michael I. and
+ Saunders, Robert A.",
+ title = {{The Lisp 2 Programming Language and System}},
+ link =
+ "\url{http://www.softwarepreservation.org/projects/LISP/lisp2/AbrahamsEtAlLISP2.pdf}",
+ year = "1966",
+ paper = "Abra66.pdf"
+}
+
+\end{chunk}
+
\index{Adams, Andrew A.}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@@ 12745,6 +12915,60 @@ when shown in factored form.
\end{chunk}
+\index{Aldrich, Jonathan}
+\begin{chunk}{axiom.bib}
+@inproceedings{Aldr13,
+ author = "Aldrich, Jonathan",
+ title = {{The Power of Interoperability: Why Objects are Inevitable}},
+ booktitle = "Onward!",
+ publisher = "ACM",
+ year = "2013",
+ link = "\url{https://www.cs.cmu.edu/~aldrich/papers/objectsessay.pdf}",
+ abstract =
+ "Three years ago, in this venue, Cook argued that in their
+ essence, objects are what Reynolds called {\sl procedural data
+ structures}. His observations raises a natural question: if
+ procedural data structures are the essence of objects, has this
+ contributed to the empirical success of objects, and if so, how?
+
+ This essay attempts to answer that question. After reviewing
+ Cook's definition, I propose the term {\sl service abstractions}
+ to capture the essential nature of objects. This terminology
+ emphasizes, following Kay, that objects are not primarily about
+ representing and manipulating data, but are more about providing
+ services in support of higherlevel goals. Using examples taken
+ from object oriented frameworks, I illustrate the unique design
+ leverage that service abstractions provide: the ability to define
+ abstractions that can be extended, and whose extensions are
+ interoperable in a firstclass way. The essay argues that the form
+ of interoperable extension supported by service abstractions is
+ essential to modern software: many modern frameworks and
+ ecosystems could not have been built without service
+ abstractions. In this sense, the success of objects was not a
+ coincidence: it was an inevitable consequence of their service
+ abstraction nature.",
+ paper = "Aldr13.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Altenkirch, Thorsten}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@inproceedings{Alte07,
+ author = "Altenkirch, Thorsten and McBride, Conor and Swierstra, Wouter",
+ title = {{Observational Equality, Now!}},
+ booktitle = "ACM Workshop Programming Languages meets Program
+ Verification",
+ publisher = "ACM",
+ pages = "5768",
+ year = "2007"
+}
+
+\end{chunk}
+
\index{Altenkirch, Thorsten}
\begin{chunk}{axiom.bib}
@misc{Alte18,
@@ 12826,6 +13050,21 @@ when shown in factored form.
\end{chunk}
+\index{Andrews, Peter B.}
+\begin{chunk}{axiom.bib}
+@book{Andr02,
+ author = "Andrews, Peter B.",
+ title = {{An Introduction to Mathematical Logic and Type Theory: To
+ Truth Through Proof}},
+ comment = "Applied Logic Series 27",
+ publisher = "Springer",
+ year = "2002",
+ isbn = "9789401599344",
+ paper = "Andr02.pdf"
+}
+
+\end{chunk}
+
\index{Antoy, Sergio}
\index{Peters, Arthur}
\begin{chunk}{axiom.bib}
@@ 13063,6 +13302,38 @@ when shown in factored form.
\end{chunk}
+\index{Asperti, Andrea}
+\index{Ricciotti, Wilmer}
+\index{Coen, Claudio Sacerdoti}
+\index{Tassi, Enrico}
+\begin{chunk}{axiom.bib}
+@article{Aspe12b,
+ author = "Asperti, Andrea and Ricciotti, Wilmer and
+ Coen, Claudio Sacerdoti and Tassi, Enrico",
+ title = {{Formal Metatheory of Programming Languages in the Matita
+ Interactive Theorem Prover}},
+ journal = "Journal of Automated Reasoning",
+ volume = "49",
+ number = "3",
+ pages = "427451",
+ year = "2012",
+ abstract =
+ "This paper is a report about the use of Matita, an interactive
+ theorem prover under development at the University of Bologna, for
+ the solution of the POPLmark Challenges, part 1a. We provide three
+ different formalizations, including two direct solutions using
+ pure de Bruijn and locally nameless encodings of bound variables,
+ and a formalization using named variables, obtained by means of a
+ sound translation to the locally nameless encoding. According to
+ this experience, we also discuss some of the proof principles used
+ in our solutions, which have led to the development of a
+ generalized inversion tactic for Matita.",
+ paper = "Aspe12b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Aspinall, David}
\index{Compagnoni, Adriana}
\begin{chunk}{axiom.bib}
@@ 13149,6 +13420,20 @@ when shown in factored form.
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
+@article{Avig07,
+ author = "Avigad, Jeremy",
+ title = {{A Formally Verified Proof of the Prime Number Theorem}},
+ journal = "ACM Trans. Comput. Logic",
+ volume = "9",
+ number = "1",
+ pages = "2",
+ year = "2007"
+}
+
+\end{chunk}
+
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
@misc{Avig19,
author = "Avigad, Jeremy",
title = {{The Mechanization of Mathematics}},
@@ 13160,6 +13445,19 @@ when shown in factored form.
\end{chunk}
+\index{Awodey, Steve}
+\begin{chunk}{axiom.bib}
+@misc{Awod12,
+ author = "Awodey, Steve",
+ title = {{Category Theory Foundations. Lectures 14}},
+ year = "2012",
+ comment = "Oregon Programming Language Summer School 2013",
+ link =
+ "\url{http://www.youtube.com/watch?v=ZKmodCApZwk&list=PL8Ky8IYL8Oh7awp0sqa82o7Ggt4AGhyf}"
+}
+
+\end{chunk}
+
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Backeljauw, Franky}
@@ 13232,6 +13530,80 @@ when shown in factored form.
\end{chunk}
+\index{Bahr, Patrick}
+\index{Hutton, Graham}
+\begin{chunk}{axiom.bib}
+@article{Bahr15,
+ author = "Bahr, Patrick and Hutton, Graham",
+ title = {{Calculating Correct Compilers}},
+ journal = "Functional Programming",
+ year = "2015",
+ link = "\url{www.cs.nott.ac.uk/~pszgmh/ccc.pdf}",
+ abstract =
+ "In this article we present a new approach to the problem of
+ calculating compilers. In particular, we develop a simple but
+ general technique that allos us to derive correct compilers from
+ highlevel semantics by systematic calculation, with all details
+ of the implementation of the compilers falling naturally out of
+ the calculation process. Our approach is based upon the use of
+ standard equational reasoning techniques, and has been applied to
+ calculate compilers for a wide range of language features and
+ their combination, including arithmetic expressions, exceptions,
+ state, various forms of lambda calculi, bounded and unbounded
+ loops, nondeterminism, and interrupts. All the calculations in
+ the article have been formalised using the Coq proof assistant,
+ which serves as a convenient interactive tool for developing and
+ verifying the calculations.",
+ paper = "Bahr15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bahr, Patrick}
+\index{Hutton, Graham}
+\begin{chunk}{axiom.bib}
+@article{Bahr19,
+ author = "Bahr, Patrick and Hutton, Graham",
+ title = {{Calculating Correct Compilers II}},
+ journal = "Functional Programming",
+ year = "2019",
+ link = "\url{www.cs.nott.ac.uk/~pszgmh/ccc2.pdf}",
+ abstract =
+ "In 'Calculating Correct Compilers' (Bahr and Hutton, 2015) we
+ developed a new approach to calculating compilers directly from
+ specifications of their correctness. Our approach only required
+ elementary reasoning techniques, and has been used to calculate
+ compilers for a wide range of language features and their
+ combination. However, the methodology was focused on stackbased
+ target machines, whereas real compilers often target
+ registerbased machines. In this article, we show how our approach
+ can naturally be adapted to calculate compilers for register
+ machines.",
+ paper = "Bahr19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bailleux, Olivier}
+\begin{chunk}{axiom.bib}
+@misc{Bail19,
+ author = "Bailleux, Olivier",
+ title = {{Subsumptiondriven Clause Learning with DPLL+Restarts}},
+ year = "2019",
+ link = "\url{https://arxiv.org/pdf/1906.07508.pdf}",
+ abstract =
+ "We propose to use a DPLL+restart to solve SAT instances by
+ successive simplifications based on the production of clauses that
+ subsume the initial clauses. We show that this approach allows the
+ refutation of pebbling formulae in polynomial time and linear
+ space, as effectvely as with a CDCL solver.",
+ paper = "Bail19.pdf"
+}
+
+\end{chunk}
+
\index{Baker, Josef B.}
\index{Sexton, Alan P.}
\index{Sorge, Volker}
@@ 13470,6 +13842,56 @@ when shown in factored form.
\end{chunk}
+\index{Barthe, Gilles}
+\begin{chunk}{axiom.bib}
+@article{Bart85,
+ author = "Barthe, Gilles",
+ title = {{Implicit Coercions in Type Systems}},
+ journal = "LNCS",
+ volume = "1158",
+ pages = "115",
+ year = "1985",
+ abstract =
+ "We propose a notion of pure type system with implicit
+ coercions. In our framework, judgements are extended with a
+ context of coerions $\Delta$ and the application rule is modified
+ so as to allow coercions to be left implicit. The setting supports
+ multiple inheritance and can be applied to all type theories with
+ $\Pi$types. One originality of our work is to propose a
+ computational interpretation of implict coercions. In this paper,
+ we demonstrate how this interpretation allows a strict control on
+ the logical properties of pure type systems with implicit coercions.",
+ paper = "Bart85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Barthe, Gilles}
+\index{Ruys, Mark}
+\index{Barendregt, Henk}
+\begin{chunk}{axiom.bib}
+@article{Bart85a,
+ author = "Barthe, Gilles and Ruys, Mark and Barendregt, Henk",
+ title = {{A TwoLevel Approach Towards Lean ProofChecking}},
+ journal = "LNCS",
+ volume = "1158",
+ pages = "1635",
+ year = "1985",
+ abstract =
+ "We present a simple and effective methodology for equational
+ reasoning in proof checkers. The method is based on a twolevel
+ approach distinguishing between syntax and semantics of
+ mathematical theories. The method is very general and can be
+ carried out in any system with inductive and oracle types. The
+ potential of our twolevel approach is illustrated by some
+ examples developed in Lego.",
+ paper = "Bart85a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Barthe, G.}
\index{Elbers, H.}
\begin{chunk}{axiom.bib}
@@ 13544,6 +13966,62 @@ when shown in factored form.
\end{chunk}
+\index{Berger, U.}
+\index{Schwichtenberg, H.}
+\begin{chunk}{axiom.bib}
+@article{Berg85,
+ author = "Berger, U. and Schwichtenberg, H.",
+ title = {{The Greatest Common Divisor: A Case Study for Program
+ Extraction from Classical Proofs}},
+ journal = "LNCS",
+ volume = "1158",
+ pages = "3646",
+ year = "1985",
+ paper = "Berg85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bernstein, Daniel J.}
+\index{Yang, BoYin}
+\begin{chunk}{axiom.bib}
+@misc{Bern19,
+ author = "Bernstein, Daniel J. and Yang, BoYin",
+ title = {{Fast ConstantTime GCD and Modular Inversion}},
+ year = "2019",
+ link = "\url{https://gcd.crypto.to/safegcd20190413.pdf}",
+ abstract =
+ "This paper introduces streamlined constanttime variants of
+ Euclid's algorithm, both for polynomial inputs and for integer
+ inputs. As concrete applications, this paper saves time in (1)
+ modular inversion for Curve25519, which was previously believed to
+ be handled much more efficiently by Fermat's method, and (2) key
+ generation for the ntruhrss701 and sntrup4591761 latticebased
+ cryptosystems.",
+ paper = "Bern19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bertot, Yves}
+\index{Gonthier, Georges}
+\index{Biha, Sidi Ould}
+\index{Pasca, Ioana}
+\begin{chunk}{axiom.bib}
+@inproceedings{Bert08,
+ author = "Bertot, Yves and Gonthier, Georges and Biha, Sidi Ould and
+ Pasca, Ioana",
+ title = {{Canonical Big Operators}},
+ booktitle = "Theorem Proving in Higher Order Logics",
+ publisher = "Springer",
+ pages = "86101",
+ year = "2008"
+}
+
+\end{chunk}
+
\index{Biha, Sidi Ould}
\begin{chunk}{axiom.bib}
@article{Biha09,
@@ 13565,6 +14043,19 @@ when shown in factored form.
\end{chunk}
+\index{Birkhoff, Garrett}
+\begin{chunk}{axiom.bib}
+@article{Birk35,
+ author = "Birkhoff, Garrett",
+ title = {{On the Structure of Abstract Algebra}},
+ journal = "Proc. of the Cambridge Philosophical Society",
+ volume = "31",
+ year = "1935",
+ paper = "Birk35.pdf"
+}
+
+\end{chunk}
+
\index{Black, A.P.}
\begin{chunk}{axiom.bib}
@techreport{Blac80,
@@ 13966,6 +14457,28 @@ when shown in factored form.
\end{chunk}
+\index{Brooks, Frederick P.}
+\begin{chunk}{axiom.bib}
+@misc{Broo86a,
+ author = "Brooks, Frederick P.",
+ title = {{No Silver Bullet  Essence and Accident in Software
+ Engineering}},
+ booktitle = "The Mythical ManMonth, Anniversary Edition",
+ publisher = "Elsevier Science",
+ pages = "10691076",
+ year = "1986",
+ comment = "chapter 16",
+ abstract =
+ "There is no single development, in either technology or
+ management technique, which by itself promises even one
+ orderofmagnitude improvement within a decade in productivity, in
+ reliability, in simpliity",
+ paper = "Broo86a.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Brooks, Rodney A.}
\index{Gabriel, Richard P.}
\index{Steele, Guy L.}
@@ 14000,7 +14513,7 @@ when shown in factored form.
volume = "17",
number = "6",
year = "1982",
 pages = "261275',
+ pages = "261275",
abstract =
"We are developing an optimizing compiler for a dialect of the
LISP language. The current target architecture is the S1, a
@@ 14316,6 +14829,21 @@ when shown in factored form.
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
+@article{Card88b,
+ author = "Cardelli, Luca",
+ title = {{Basic Polymorphic Typechecking}},
+ journal = "Science of Computer Programming",
+ volume = "8",
+ number = "2",
+ year = "1988",
+ paper = "Card88b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cardelli, Luca}
+\begin{chunk}{axiom.bib}
@techreport{Card93,
author = "Cardelli, Luca",
title = {{Typeful Programmnig}},
@@ 14474,6 +15002,30 @@ when shown in factored form.
\end{chunk}
+\index{Carlsson, Mats}
+\begin{chunk}{axiom.bib}
+@article{Carl84,
+ author = "Carlsson, Mats",
+ title = {{On Implementing Prolog in Functional Programming}},
+ journal = "New Generation Computing",
+ volume = "2",
+ pages = "347359",
+ year = "1984",
+ abstract =
+ "This report surveys techniques for implementing the programming
+ language Prolog. It focuses on explaining the procedural semantics
+ of the language in terms of functional programming constructs. The
+ techniques {\sl success continuations} and {\sl proof streams} are
+ introduced, and it is shown how Horn clause interpreters can be
+ built upon them. Continuations are well known from denotational
+ semantics theory, in this paper it is shown that they are viable
+ constructs in actual programs.",
+ paper = "Carl84.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Castagna, Giuseppe}
\index{Lanvin, Victor}
\index{Petrucciani, Tommaso}
@@ 14562,6 +15114,35 @@ when shown in factored form.
\end{chunk}
+\index{Chang, Stephen}
+\index{Knauth, Alex}
+\index{Greenman, Ben}
+\begin{chunk}{axiom.bib}
+@inproceedings{Chan17,
+ author = "Chang, Stephen and Knauth, Alex and Greenman, Ben",
+ title = {{Type Systems as Macros}},
+ booktitle = "Principles of Programming Languages",
+ publisher = "ACM",
+ year = "2017",
+ abstract =
+ "We present TURNSTILE, a metalanguage for creating typed embedded
+ languages. To implement the type system, programmers write type
+ checking rules resembling traditional judgment syntax. To
+ implement the semantics, they incorporate elaborations into these
+ rules. TURNSTILE critically depends on the idea of linguistic
+ reuse. It exploits a macro system in a novel way to simultaneously
+ type check and rewrite a surface program into a target
+ language. Reusing a macro system also yields modular
+ implementations whose rules may be mixed and matched to create
+ other languages. Combined with typical compiler and runtime reuse,
+ TURNSTILE produces performant typed embedded languages with little
+ effort.",
+ paper = "Chan17.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Char, Bruce W.}
\index{Geddes, Keith O.}
\index{Gonnet, Gaston H.}
@@ 14760,6 +15341,20 @@ when shown in factored form.
\end{chunk}
+\index{Chiswell, Ian}
+\index{Hodges, Wilfrid}
+\begin{chunk}{axiom.bib}
+@book{Chis07,
+ author = "Chiswell, Ian and Hodges, Wilfrid",
+ title = {{Mathematical Logic}},
+ publisher = "Oxford University Press",
+ year = "2007",
+ isbn = "9780198571001",
+ paper = "Chis07.pdf"
+}
+
+\end{chunk}
+
\index{Chojecki, Przemyslaw}
\begin{chunk}{axiom.bib}
@article{Choj17,
@@ 14781,45 +15376,51 @@ when shown in factored form.
\end{chunk}
\index{Chiswell, Ian}
\index{Hodges, Wilfrid}
+\index{Chow, Timothy Y.}
\begin{chunk}{axiom.bib}
@book{Chis07,
 author = "Chiswell, Ian and Hodges, Wilfrid",
 title = {{Mathematical Logic}},
 publisher = "Oxford University Press",
 year = "2007",
 isbn = "9780198571001",
 paper = "Chis07.pdf"
+@misc{Chow18,
+ author = "Chow, Timothy Y.",
+ title = {{The Consistency of Arithmetic}},
+ year = "2018",
+ link = "\url{http://timothychow.net/consistent.pdf}",
+ paper = "Chow18.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Conrad, Brian}
+\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Conr05,
 author = "Conrad, Brian",
 title = {{Impossibility Theorems for Elementary Integration}},
 year = "2005",
 link =
 "\url{http://www2.maths.ox.ac.uk/cmi/library/academy/LectureNotes05/Conrad.pdf}",
 abstract =
 "Liouville proved that certain integrals, most famously
 $\int{e^{x^2}}~dx$, cannot be expressed in elementary terms. We
 explain how to give precise meaning to the notion of integration
 ``in elementary terms'', and we formulate Liouville's theorem
 that characterizes the possible form of elementary
 antiderivatives. Using this theorem, we deduce a practical
 criterion for proving such impossibility results in special cases.
+@misc{Chri12,
+ author = "Christiansen, David Thrane",
+ title = {{Converting Regular Expressions to Discrete Finite Automata}},
+ year = "2012",
+ link = "\url{http://davidchristiansen.dk/tutorials/regextonfa.pdf}",
+ paper = "Chri12.pdf"
+}
 This criterion is illustrated for the Gaussian integral
 $\int{e^{x^2}}~dx$ from probability theory, the logarithmic
 integral $\int{}~dt/log(t)$ from the study of primes, and
 elliptic integrals. Our exposition is aimed at students who are
 familiar with calculus and elementary abstract algebra (at the
 level of polynomial rings $F(t)$ over a field $F$).",
 paper = "Conr05.pdf",
 keywords = "printed"
+\end{chunk}
+
+\index{Christiansen, David Thrane}
+\begin{chunk}{axiom.bib}
+@misc{Chri13,
+ author = "Christiansen, David Thrane",
+ title = {{Didirectional Typing Rules: A Tutorial}},
+ year = "2013",
+ link = "\url{http://davidchristiansen.dk/tutorials/bidirectional.pdf}",
+ paper = "Chri13.pdf"
+}
+
+\end{chunk}
+
+\index{Christiansen, David Thrane}
+\begin{chunk}{axiom.bib}
+@misc{Chri14,
+ author = "Christiansen, David Thrane",
+ title = {{A Tutorial on Polymorphic Type Derivations}},
+ year = "2014",
+ link = "\url{http://davidchristiansen.dk/tutorials/typeruletutorial.pdf}",
+ paper = "Chri14.pdf"
}
\end{chunk}
@@ 14848,26 +15449,24 @@ when shown in factored form.
\end{chunk}
\index{Chow, Timothy Y.}
+\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chow18,
 author = "Chow, Timothy Y.",
 title = {{The Consistency of Arithmetic}},
+@misc{Chri18a,
+ author = "Christiansen, David Thrane",
+ title = {{Coding for Types: The Universe Pattern in Idris}},
year = "2018",
 link = "\url{http://timothychow.net/consistent.pdf}",
 paper = "Chow18.pdf",
 keywords = "printed"
+ link = "\url{https://www.youtube.com/watch?v=AWeT_G04a0A}"
}
\end{chunk}
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri18a,
+@misc{Chri19,
author = "Christiansen, David Thrane",
 title = {{Coding for Types: The Universe Pattern in Idris}},
 year = "2018",
 link = "\url{https://www.youtube.com/watch?v=AWeT_G04a0A}"
+ title = {{Bidirectional Type Checking}},
+ year = "2019",
+ link = "\url{http://www.youtube.com/watch?v=utyBNDj7s2w}"
}
\end{chunk}
@@ 14884,6 +15483,36 @@ when shown in factored form.
\end{chunk}
+\index{Ciolli, Gianni}
+\index{Gentili, Graziano}
+\index{Maggesi, Marco}
+\begin{chunk}{axiom.bib}
+@article{Ciol11,
+ author = "Ciolli, Gianni and Gentili, Graziano and Maggesi, Marco",
+ title = {{A Certified Proof of the Cartan Fixed Point Theorem}},
+ journal = "J. Autom. Reasoning",
+ volume = "47",
+ number = "3",
+ pages = "319336",
+ year = "2011"
+}
+
+\end{chunk}
+
+\index{Clark, Kevin}
+\begin{chunk}{axiom.bib}
+@misc{Clar11,
+ author = "Clark, Kevin",
+ title = {{An Algorithm that Decides PRIMES in Polynomial Time}}
+, year = "2011",
+ link =
+ "\url{https://sites.math.washington.edu/~morrow/336_11/papers/kevin.pdf}",
+ paper = "Clar11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Clark, K.L.}
\index{Tarnlund, S.A.}
\begin{chunk}{axiom.bib}
@@ 15072,31 +15701,94 @@ when shown in factored form.
\end{chunk}
\index{Corless, Robert}
\index{Postma, Erik}
\index{Stoutemyer, David}
+\index{Cong, Youyou}
+\index{Osvald, Leo}
+\index{Essertel, Gregory M.}
+\index{Rompf, Tiark}
\begin{chunk}{axiom.bib}
@inproceedings{Corl11,
 author = "Corless, Robert and Postma, Erik and Stoutemyer, David",
 title = {{GCD of Multivariate Approximate Polynomials using
 Beautification with the Subtractive Algorithm}},
 booktitle = "Int. Workshop on SymbolicNumeric Computation",
+@inproceedings{Cong19,
+ author = "Cong, Youyou and Osvald, Leo and Essertel, Gregory M.
+ and Rompf, Tiark",
+ title = {{Compiling with Continuations, or without? Whatever}},
+ booktitle = "Inter. Conf. on Functional Programming",
publisher = "ACM",
 year = "2011",
 paper = "Corl11.pdf",
+ year = "2019",
+ abstract =
+ "What makes a good compiler IR? In the context of functional
+ languages, there has been an extensive debate on the advantages
+ and disadvantages of continuationpassing style (CPS). The
+ consensus seems to be that some form of explicit continuations is
+ necessary to model jumps in a functional style, but that they
+ should have a 2ndclass status, separate from regular functions,
+ to ensure efficient code generation. Building on this observation,
+ a recent study from PLDI 2017 proposed a directstyle IR with
+ explicit join points, which essentially represent local
+ continuations, i.e. functions that do not return or escape. While
+ this IR can work well in practice, as evidenced by the
+ implementation of join points in the Glasgow Haskell Compiler
+ (GHC), there still seems to be room for improvement, especially
+ with regard to the way continuations are handled in the course of
+ optimization.
+
+ In this paper, we contribute to the CPS debate by developing a
+ novel IR with the following features. First, we integrate a
+ control operator that resembles Felleisen's C, eliminating certain
+ redundant rewrites observed in the previous study. Second, we
+ treat the nonreturning and nonescaping aspects of continuations
+ separately, allowing efficient compilation of wellbehaved
+ functions defined by the user. Third, we define a selective CPS
+ translation of our IR, which erases control operators while
+ preserving the meaning and typing of programs. These features
+ enable optimizations in both direct style and full CPS, as well as
+ in any intermediate style with selectively exposed continuations.
+ Thus, we change the spectrum of available options from 'CPS yes or
+ no' to 'as much or as little CPS as you want, when you want it'.",
+ paper = "Cong19.pdf",
keywords = "printed"
}
\end{chunk}
+\end{chunk}
\index{Copeland, B. Jack}
+\index{Conrad, Brian}
\begin{chunk}{axiom.bib}
@book{Cope04,
 author = "Copeland, B. Jack",
 title = {{The Essential Turing}},
 publisher = "Oxford University Press",
 year = "2004",
 isbn = "9780198250807"
+@misc{Conr05,
+ author = "Conrad, Brian",
+ title = {{Impossibility Theorems for Elementary Integration}},
+ year = "2005",
+ link =
+ "\url{http://www2.maths.ox.ac.uk/cmi/library/academy/LectureNotes05/Conrad.pdf}",
+ abstract =
+ "Liouville proved that certain integrals, most famously
+ $\int{e^{x^2}}~dx$, cannot be expressed in elementary terms. We
+ explain how to give precise meaning to the notion of integration
+ ``in elementary terms'', and we formulate Liouville's theorem
+ that characterizes the possible form of elementary
+ antiderivatives. Using this theorem, we deduce a practical
+ criterion for proving such impossibility results in special cases.
+
+ This criterion is illustrated for the Gaussian integral
+ $\int{e^{x^2}}~dx$ from probability theory, the logarithmic
+ integral $\int{}~dt/log(t)$ from the study of primes, and
+ elliptic integrals. Our exposition is aimed at students who are
+ familiar with calculus and elementary abstract algebra (at the
+ level of polynomial rings $F(t)$ over a field $F$).",
+ paper = "Conr05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Coquand, Thierry}
+\index{Paulin, Christine}
+\begin{chunk}{axiom.bib}
+@inproceedings{Coqu90,
+ author = "Coquand, Thierry and Paulin, Christine",
+ title = {{Inductively Defined Types}},
+ booktitle = "Int. Conf. on Computer Logic",
+ publisher = "Springer",
+ pages = "5066",
+ year = "1990",
+ paper = "Coqu90.pdf"
}
\end{chunk}
@@ 15128,6 +15820,35 @@ when shown in factored form.
\end{chunk}
+\index{Copeland, B. Jack}
+\begin{chunk}{axiom.bib}
+@book{Cope04,
+ author = "Copeland, B. Jack",
+ title = {{The Essential Turing}},
+ publisher = "Oxford University Press",
+ year = "2004",
+ isbn = "9780198250807"
+}
+
+\end{chunk}
+
+\index{Corless, Robert}
+\index{Postma, Erik}
+\index{Stoutemyer, David}
+\begin{chunk}{axiom.bib}
+@inproceedings{Corl11,
+ author = "Corless, Robert and Postma, Erik and Stoutemyer, David",
+ title = {{GCD of Multivariate Approximate Polynomials using
+ Beautification with the Subtractive Algorithm}},
+ booktitle = "Int. Workshop on SymbolicNumeric Computation",
+ publisher = "ACM",
+ year = "2011",
+ paper = "Corl11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Cosmo, Roberto Di}
\begin{chunk}{axiom.bib}
@book{Cosm95,
@@ 15157,6 +15878,40 @@ when shown in factored form.
\end{chunk}
+\index{Cramer, Marcos}
+\index{Koepke, Peter}
+\index{Schroder, Bernhard}
+\begin{chunk}{axiom.bib}
+@article{Cram11,
+ author = "Cramer, Marcos and Koepke, Peter and Schroder, Bernhard",
+ title = {{Parsing and Disambiguation of Symbolic Mathematics in the
+ Naproche System}},
+ journal = "LNAI",
+ volume = "6824",
+ pages = "180195",
+ year = "2011",
+ publisher = "Springer",
+ abstract =
+ "The Naproche system is a system for linguistically analysing and
+ proofchecking mathematical texts written in a controlled natural
+ language. The aim is to have an input language that is as close as
+ possible to the language that mathematicians actually use when
+ writing textbooks or papers.
+
+ Mathematical texts consist of a combination of natural language
+ and symbolic mathematics, with symbolic mathematics obeying its
+ own syntactic rules. We discuss the difficulties that a program
+ for parsing and disambiguating symbolic mathematics must face and
+ present how these difficulties have been tackled in the Naproche
+ system. One of these difficulties is the fact that information
+ provided in the preceding context  including information
+ provided in natural language  can influence the way a symbolic
+ expression has to be disambiguated.",
+ paper = "Cram11.pdf"
+}
+
+\end{chunk}
+
\index{Cutland, Nigel}
\begin{chunk}{axiom.bib}
@book{Cutl80,
@@ 15381,6 +16136,37 @@ when shown in factored form.
\end{chunk}
+\index{Davies, Rowan}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Davi00,
+ author = "Davies, Rowan and Pfenning, Frank",
+ title = {{Intersection Types and Computational Effects}},
+ year = "2000",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/icpf00.pdf}",
+ abstract =
+ "We show that standard formulations of intersection type systems
+ are unsound in the presence of computational effects, and propose
+ a solution similar to the value restriction for polymorphism
+ adopted in the revised definition of Standard ML. It differs in
+ that it is not tied to letexpressions and requires an additional
+ weakening of the usual subtyping rules. We also present a
+ bidirectional typechecking algorithm for the resulting language
+ that does not require an excessive amount of type annotations and
+ illustrate it through some examples. We further show that the type
+ assignment system can be extended to incorporate parametric
+ polymorphism. Taken together, we see our system and associated
+ typechecking algorithm as a significant step towards the
+ introduction of intersection types into realistic programming
+ languages. The added expressive power would allow many more
+ properties of programs to be stated by the programmer and
+ statically verified by the compiler.",
+ paper = "Davi00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Day, Martin V.}
\begin{chunk}{axiom.bib}
@book{Dayx16,
@@ 15394,6 +16180,61 @@ when shown in factored form.
\end{chunk}
+\index{Benoit, Alexandre}
+\index{Chyzak, Frederic}
+\index{Darrasse, Alexis}
+\index{Gregoire, Thomas}
+\index{Koutschan, Christoph}
+\index{Mezzarobba, Marc}
+\index{Salvy Bruno}
+\begin{chunk}{axiom.bib}
+@misc{DDMF19,
+ author = "Benoit, Alexandre and Chyzak, Frederic and Darrasse, Alexis
+ and Gregoire, Thomas and Koutschan, Christoph and
+ Mezzarobba, Marc and Salvy Bruno",
+ title = {{Digital Dictionary of Mathematical Functions}},
+ year = "2019",
+ link = "\url{ddfm.msrinria.inria.fr/1.9.1/ddmf}",
+ abstract =
+ "Interactive site on Mathematical Functions with properties,
+ truncated expansions, numerical evaluations, plots, and more. The
+ functions currently presented are elementary functions with
+ special functions of a single variable. More functions  special
+ functions with parameters, orthogonal polynomials, sequences 
+ will be added with the project advances.",
+ paper = "DDMF19.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Demri, S.}
+\index{Laroussinie, F.}
+\index{Schnoebelen, Ph.}
+\begin{chunk}{axiom.bib}
+@article{Demr06,
+ author = "Demri, S. and Laroussinie, F. and Schnoebelen, Ph.",
+ title = {{A Parametric Analysis of the StateExplosion Problem in
+ Model Checking}},
+ journal = "Computer and System Sciences",
+ volume = "72",
+ pages = "547575",
+ year = "2006",
+ abstract =
+ "In model checking, the stateexplosion problem occurs when one
+ checks a {\sl nonflat system}, i.e., a system implicitly described
+ as a synchronized product of elementary subsystems. In this paper,
+ we investigate the complexity of a wide variedty of modelchecking
+ problems for nonflat systems under the light of
+ {\sl parameterized complexity}, taking the number of synchronized
+ components as a parameter. We provide precise complexity measures
+ (in the parameterized sense) for most of the problems we
+ investigate, and evidence that the results are robust.",
+ paper = "Demr06.pdf"
+}
+
+\end{chunk}
+
\index{Denes, Maxime}
\index{Mortberg, Anders}
\index{Siles, Vincent}
@@ 15487,6 +16328,48 @@ when shown in factored form.
\end{chunk}
+\index{Deutsch, David}
+\begin{chunk}{axiom.bib}
+@article{Deut85,
+ author = "Deutsch, David",
+ title = {{Quantum Theory, the ChurchTuring Principle and the
+ Universal Quantum Computer}},
+ journal = "Proc. Royal Society of London",
+ volume = "400",
+ pages = "97117",
+ year = "1985",
+ abstract =
+ "It is argued that underlying the ChurchTuring hypothesis there
+ is an implicit physical assertion. Here, this assertion is
+ presented explicitly as a physical principle: 'every finitely
+ realizable physical system can be perfectly simulated by a
+ universal model computing machine operating by finite
+ means'. Classical physics and the universal Turing machine,
+ because the former is continuous and the latter discrete, do not
+ obey the principle, at leeast in the strong form above. A class of
+ model computing machines that is the quantum generalization of the
+ class of Turing machines is described, and it is shown that
+ quantum theory and the 'universal quantum computer' are compatible
+ with the principle. Computing machines resembling the universal
+ quantum computer could, in principle, be built and would have many
+ remarkable properties not reproducible by any Turing
+ machine. These do not include the computation of nonrecursive
+ functions, but they do include 'quantum parallelism', a method by
+ which certain probabilistic tasks can be performed faster by a
+ universal quantum computer than by any classical restriction of
+ it. The intuitive explanation of these properties places an
+ intolerable strain on all interpretations of quantum theory other
+ than Everett's. Some of the numerous connections between the
+ quantum theory of computation and the rest of physics are
+ explored. Quantum complexity theory allows a physically more
+ reasonable definition of the 'complexity' or 'knowledge' in a
+ physical system than does classical complexity theory.",
+ paper = "Deut85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Dewar, Mike}
\index{Carlisle, David}
\begin{chunk}{axiom.bib}
@@ 15616,33 +16499,81 @@ when shown in factored form.
\end{chunk}
+\index{Dowek, Gilles}
+\begin{chunk}{axiom.bib}
+@inbook{Dowe01,
+ author = "Dowek, Gilles",
+ title = {{Handbook of Automated Reasoning, Vol II}},
+ publisher = "Elsevier Science",
+ year = "2001",
+ chapter = "16",
+ pages = "10091062"
+}
+
+\end{chunk}
+
\index{Dunfield, Joshua}
\index{Krishnaswami, Neelakantan R.}
\begin{chunk}{axiom.bib}
@misc{Dunf13,
+@misc{Dunf18,
author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
 title = {{Complete and Easy Bidirectional Typechecking for HigherRank
 Polymorphism}},
 link = "\url{https://arxiv.org/pdf/1306.6032.pdf}",
 year = "2013",
 abstract =
+ title = {{Sound and Complete Bidirectional Typechecking for
+ HigherRank Polymorphism with Existentials and
+ Indexed Types}},
+ year = "2018",
+ link = "\url{https://arxiv.org/pdf/1601.05106.pdf}",
+ abstract =
"Bidirectional typechecking, in which terms either synthesize a
type or are checked against a known type, has become popular for
 its scalability (unlike DamasMilner type inference, bidirectional
 typing remains decidable even for very expressive type systems),
 its error reporting, and its relative ease of
 implementation. Following design principles from proof theory,
 bidirectional typing can be applied to many type constructs. The
 principles underlying a bidirectional approach to polymorphism,
 however, are less obvious. We give a declarative, bidirectional
 account of higherrank polymorphism, grounded in proof theory;
 this calculus enjoys many properties such as $\eta$reduction and
 predictability of annotations. We give an algorithm for
 implementing the declarative system; our algorithm is remarkably
 simple and wellbehaved, despite being both sound and complete.",
 paper = "Dunf13.pdf",
+ its applicability to a variety of type systems, its error
+ reporting, and its ease of implementation. Following principles
+ from proof theory, bidirectional typing can be applied to many
+ type constructs. The principles underlying a bidirectional
+ approach to indexed types {\sl generalized algebraic datatypes}
+ are less clear. Building on prooftheoretic treatments of
+ equality, we give a declarative specification of typing based on
+ {\sl focalization}. This approach permits declarative rules for
+ coverage of pattern matching, as well as support for firstclass
+ existential types using a focalized subtyping judgment. We use
+ refinement types to avoid explicitly passing equality proofs in
+ our term syntax, making our calculus similar to languages such as
+ Haskell and OCaml. We also extend the declarative specification
+ with an explicit rules for deducing when a type is principal,
+ permitting us to give a complete declarative specification for a
+ rich type system with significant type inference. We also give a
+ set of algorithmic typing rules, and prove that it is sound and
+ complete with respect to the declarative system. The proof
+ requires a number of technical innovations, including proving
+ soundness and completeness in a mutually recursive fashion.",
+ paper = "Dunf18.pdf"
+}
+
+\end{chunk}
+
+\index{Dunfield, Joshua}
+\index{Krishnaswami, Neel}
+\begin{chunk}{axiom.bib}
+@misc{Dunf19,
+ author = "Dunfield, Joshua and Krishnaswami, Neel",
+ title = {{Bidirection Typing}},
+ year = "2019",
+ link = "\url{https://www.cl.cam.ac.uk/~nk480/bidirsurvey.pdf}",
+ abstract =
+ "Bidirectional typing combines two modes of typing: type checking,
+ which checks that a program satisfies a known type, and type
+ synthesis, which determines a type from the program. Using
+ checking enables bidirectional typing to break the decidability
+ barrier of DamasMilner approaches; using synthesis enables
+ bidirectional typing to avoid the large annotation burden of
+ explicitly typed languages. In addition, bidirectional typing
+ improves error locality. We highlight the design principles that
+ underlie bidirectional type systems, survey the development of
+ bidirectional typing from the prehistoric period before Pierce and
+ Turner's local type inference to the present day, and provide
+ guidance for future investigations.",
+ paper = "Dunf19.pdf",
keywords = "printed"
}
+}
\end{chunk}
@@ 15683,6 +16614,44 @@ when shown in factored form.
\end{chunk}
+\index{Dybjer, Peter}
+\begin{chunk}{axiom.bib}
+\article{Dybj94,
+ author = "Dybjer, Peter",
+ title = {{Inductive Families}},
+ journal = "Formal Aspects of Computing",
+ volume = "6",
+ number = "4",
+ pages = "440465",
+ year = "1994",
+ abstract =
+ "A general formulation of inductive and recursive definitions in
+ MartinLof's type theory is presented. It extends Backhouse's
+ 'DoItYourself Type Theory' to include inductive definitions of
+ families of sets and definitions of functions by recursion on the
+ way elements of such sets are generated. The formulation is in
+ natural deduction and is intended to be a natural generalization
+ to type theory of MartinLof's theory of iterated inductive
+ definitions of predicate logic.
+
+ Formal criteria are given for correct formation and introduction
+ rules of a new set former capturing definition by strictly
+ positive, iterated, generalized induction. Moreover, there is an
+ inversion principle for deriving elimination and equality rules
+ from the formation and introduction rules. Finally, there is an
+ alternative schematic presentation of definition by recursion.
+
+ The resulting theory is a flexible and powerful language for
+ programming and constructive mathematics. We hint at the wealth of
+ possible applications by showing several basic examples: predicate
+ logic, generalized induction, and a formalization of the untyped
+ lambda calculus.",
+ paper = "Dybj94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Dzamonja, Mirna}
\begin{chunk}{axiom.bib}
@misc{Dzam18,
@@ 15712,6 +16681,147 @@ when shown in factored form.
\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Ehrig, Hartmut}
+\index{Kreowski, HansJorg}
+\index{Thatcher, James}
+\index{Wagner, Eric}
+\index{Wright, Jesse}
+\begin{chunk}{axiom.bib}
+@article{Ehri80a,
+ author = "Ehrig, Hartmut and Kreowski, HansJorg and Thatcher, James
+ and Wagner, Eric and Wright, Jesse",
+ title = {{Parameterized Data Types in Algebraic Specification Languages}},
+ journal = "LNCS",
+ volume = "85",
+ year = "1980",
+ booktitle = "Automata, Languages and Programming",
+ publisher = "Springer",
+ paper = "Ehri80a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Ehrig, H.}
+\index{Kreowski, H.J.}
+\index{Mahr, B.}
+\index{Padawitz, P.}
+\begin{chunk}{axiom.bib}
+@article{Ehri82,
+ author = "Ehrig, H. and Kreowski, H.J. and Mahr, B. and Padawitz, P.",
+ title = {{Algebraic Implementation of Abstract Data Types}},
+ journal = "Theoretical Computer Science",
+ volume = "20",
+ pages = "209263",
+ year = "1982",
+ abstract =
+ "Starting with a review of the theory of algebraic specifications
+ in the sense of the ADJgroup a new theory for algebraic
+ implementation of abstract data types is presented.
+
+ While main concepts of this new theory were given already at
+ several conferences this paper provides the full theory of
+ algebraic implementations developed in Berlin except of complexity
+ considerations which are given in a separate paper. This new
+ concept of algebraic implementations includes implementations for
+ algorithms in specific programming languages and on the other hand
+ it meets also the requirements for stepwise refinement of
+ structured programs and software systems as introduced by Dijkstra
+ and Wirth. On the syntactical level an algebraic implementation
+ corresponds to a system of recursive programs while the semantical
+ level is defined by algebraic constructions, called SYNTHESIS,
+ RESTRICTION and IDENTIFICATION. Moreover the concept allows
+ composition of implementations and a rigorous study of
+ correctness. The main results of the paper are different kinds of
+ correctness criteria which are applied to a number of illustrating
+ examples including the implementation of sets by hashtables.
+ Algebraic implementations of larger systems like a histogram or a
+ parts system are given in separate case studies which, however,
+ are not included in this paper.",
+ paper = "Ehri82.pdf"
+}
+
+\end{chunk}
+
+\index{Eisenberg, Richard A.}
+\begin{chunk}{axiom.bib}
+@phdthesis{Eise16,
+ author = "Eisenberg, Richard A.",
+ title = {{Dependent Types in Haskell: Theory and Practice}},
+ school = "University of Pennsylvania",
+ year = "2016",
+ abstract =
+ "Haskell, as implemented by the Glasgow Haskell Compiler (GHC),
+ has been adding new typelevel programming features for some
+ time. Many of these features  generalized algebraic datatypes
+ (GADT)s, type families, kind polymorphism, and promoted datatypes
+  have brought Haskell to the doorstep of dependent types. Many
+ dependently typed programs can even currently be encoded, but
+ often the constructions are painful.
+
+ In this dissertation, I describe Dependent Haskell, which supports
+ full dependent types via a backwardcompatible extension to
+ today's Haskell. An important contribution to this work is an
+ implementation, in GHC, of a portion of Dependent Haskell, with
+ the rest to follow. The features I have implemented are already
+ released, in GHC 8.0. This dissertation contains several practical
+ examples of Dependent Haskell code, a full description of the
+ differences between Dependent Haskell and today's Haskell, a novel
+ dependently typed lambdacalculus (called PICO) suitable for use
+ as an intermediate language for compiling Dependent Haskell, and a
+ type inference and elaboration algorithm, BAKE, that translates
+ Dependent Haskell to typecorrect PICO. Full proofs of type safety
+ of PICO and the soundness of BAKE are included in the appendix.",
+ paper = "Eise16.pdf"
+}
+
+\end{chunk}
+
+\index{Elliott, Conal}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Elli90,
+ author = "Elliott, Conal and Pfenning, Frank",
+ title = {{A SemiFunctional Implementation of a HigherOrder Logic
+ Programming Language}},
+ year = "1990",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/elpsml90.pdf}",
+ comment = "\url{http://www.cs.cmu.edu/~fp/papers/elpsmlpaper.tar.gz}",
+ paper = "Elli90.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+
+\index{Elliott, Conal}
+\begin{chunk}{axiom.bib}
+@inproceedings{Elli17,
+ author = "Elliott, Conal",
+ title = {{Compiling to Categories}},
+ booktitle = "Proc. ACM Program. Lang. Vol 1",
+ publisher = "ACM",
+ year = "2017",
+ link = "\url{http://conal.net/papers/compilingtocategories/compilingtocategories.pdf}",
+ abstract =
+ "It is wellknown that the simply typed lambdacalculul is modeled
+ by any cartesian closed category (CCC). This correspondence
+ suggests giving typed functional programs a variety of
+ interpretations, each corresponding to a different category. A
+ convenient way to realize this idea is as a collection of
+ meaningpreserving transformations added to an existing compiler,
+ such as GHC for Haskell. This paper describes automatic
+ differentiation, incremental computation, and interval
+ analysis. Each such interpretation is a category easily defined in
+ Haskell (outside of the compiler). The general technique appears
+ to provide a compelling alternative to deeply embedded
+ domainspecific languages.",
+ paper = "Elli17.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Ellis, Ferris}
\begin{chunk}{axiom.bib}
@misc{Elli18,
@@ 15738,27 +16848,6 @@ when shown in factored form.
\end{chunk}
\index{Ehrig, Hartmut}
\index{Kreowski, HansJorg}
\index{Thatcher, James}
\index{Wagner, Eric}
\index{Wright, Jesse}
\begin{chunk}{axiom.bib}
@article{Ehri80a,
 author = "Ehrig, Hartmut and Kreowski, HansJorg and Thatcher, James
 and Wagner, Eric and Wright, Jesse",
 title = {{Parameterized Data Types in Algebraic Specification Languages}},
 journal = "LNCS",
 volume = "85",
 year = "1980",
 booktitle = "Automata, Languages and Programming",
 publisher = "Springer",
 paper = "Ehri80a.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Enderton, Herbert B.}
\begin{chunk}{axiom.bib}
@book{Ende01,
@@ 15772,6 +16861,54 @@ when shown in factored form.
\end{chunk}
+\index{Eremondi, Joseph}
+\index{Tanter, Eric}
+\index{Garcia, Ronald}
+\begin{chunk}{axiom.bib}
+@inproceedings{Erem19,
+ author = "Eremondi, Joseph and Tanter, Eric and Garcia, Ronald",
+ title = {{Approximate Normalization for Gradual Dependent Types}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "Dependent types help programmers write highly reliable
+ code. However, this reliability comes at a cost: it can be
+ challenging to write new prototypes in (or migrate old code to)
+ dependentlytyped programming languages. Gradual typing makes
+ static type disciplines more flexible, so an appropriate notion of
+ gradual dependent types could fruitfully lower this cost. However,
+ dependent types raise unique challenges for gradual
+ typing. Dependent typechecking involves the execution of program
+ code, but graduallytyped code can signal runtime type errors or
+ diverge. These runtime errors threaten the soundness guarantees
+ that make dependen types so attractive, while divergence spoils
+ the typedriven programming experience.
+
+ this paper presents GDTL, a gradual dependentlytyped language
+ that emphasizes pragmatic dependentlytyped programming. GDTL
+ fully embeds both an untyped and dependentlytyped language, and
+ allows for smooth transitions between the two. In addition to
+ gradual types we introduce gradual terms, which allow the user to
+ be imprecise in type indices and to omit proof terms; runtime
+ checks ensure type safety. To account for nontermination and
+ failure, we distinguish between compiletype normalization and
+ runtime execution: compiletime normalization is approximate but
+ total, while runtime execution is exact, but may fail or
+ diverge. We prove that GDTL has decidable typechecking and
+ satisfies all the expected properties of gradual languages. In
+ particular, GDTL satisfies the static and dynamic gradual
+ guarantees: reducing type precision preserves typedness, and
+ altering type precision does not change program behavior outside
+ of dynamic type failures. To prove these properties, we were led
+ to establish a novel normalization gradual guarantee thata
+ captures the monotonicity of approximate normalization with
+ respect to imprecision.",
+ paper = "Erem19.pdf"
+}
+
+\end{chunk}
+
\index{Ershov, A.P.}
\begin{chunk}{axiom.bib}
@misc{Ersh77,
@@ 15907,6 +17044,20 @@ when shown in factored form.
\end{chunk}
+\index{Feit, Walter}
+\index{Thompson, John G.}
+\begin{chunk}{axiom.bib}
+@article{Feit63,
+ author = "Feit, Walter and Thompson, John G.",
+ title = {{Solvability of Groups of Odd Order}},
+ journal = "Pacific Journal of Mathematics",
+ volume = "13",
+ pages = "7751029",
+ year = "1963"
+}
+
+\end{chunk}
+
\index{Felleisen, Matthias}
\begin{chunk}{axiom.bib}
@techreport{Fell85,
@@ 15922,6 +17073,36 @@ when shown in factored form.
\end{chunk}
+\index{Fieker, Claus}
+\index{Hart, William}
+\index{Hofmann, Tommy}
+\index{Johansson, Fredrik}
+\begin{chunk}{axiom.bib}
+@inproceedings{Fiek17,
+ author = "Fieker, Claus and Hart, William and Hofmann, Tommy and
+ Johansson, Fredrik",
+ title = {{Nemo/Hecke: Computer Algebra and Number Theory Package
+ for the Julia Programming Language}},
+ booktitle = "ISSAC'17",
+ publisher = "ACM",
+ year = "2017",
+ pages = "157164",
+ abstract =
+ "We introduce two new packages, Nemo and Hecke, written in the
+ Julia programming language for computer algebra and number
+ theory. We demonstrate that high performance generic algorithms
+ can be implemented in Julia, without the need to resort to a
+ lowlevel C implementation. For specialised algorithms, we use
+ Julia's efficient native C interface to wrap existing C/C++
+ libraries such as Flint, Arb, Antic and Singular. We give examples
+ of how to use Hecke and Nemo and discuss some algorithms that we
+ have implemented to provide high performance basic arithmetic.",
+ paper = "Fiek17.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Filliatre, JeanChristophe}
\begin{chunk}{axiom.bib}
@misc{Fill13a,
@@ 16001,6 +17182,52 @@ when shown in factored form.
\end{chunk}
+\index{Ganesalingam, Mohan}
+\begin{chunk}{axiom.bib}
+@phdthesis{Gane09,
+ author = "Ganesalingam, Mohan",
+ title = {{The Language of Mathematics}},
+ school = "University of Cambridge",
+ year = "2009"
+}
+
+\end{chunk}
+
+\index{Ganesalingam, M.}
+\index{Gowers, W.T.}
+\begin{chunk}{axiom.bib}
+@article{Gane17,
+ author = "Ganesalingam, M. and Gowers, W.T.",
+ title = {{A Fully Automatic Theorem Prover with HumanStyle Output}},
+ journal = "J. Automated Reasoning",
+ volume = "58",
+ pages = "253291",
+ year = "2017",
+ abstract =
+ "This paper describes a program that solves elementary
+ mathematical problems, mostly in metric space theory, and presents
+ solutions that are hard to distinguish from solutions that might
+ be written by a human mathematician.",
+ paper = "Gane17.pdf"
+}
+
+\end{chunk}
+
+\index{Ganzinger, Harald}
+\begin{chunk}{axiom.bib}
+@article{Ganz80,
+ author = "Ganzinger, Harald",
+ title = {{Transforming Denotational Semantics into Practical
+ Attribute Grammars}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "169",
+ year = "1980",
+ paper = "Ganz80.pdf"
+}
+
+\end{chunk}
+
\begin{chunk}{axiom.bib}
@misc{Gapt19,
author = "Unknown",
@@ 16012,6 +17239,35 @@ when shown in factored form.
\end{chunk}
+\index{Gaudel, M.C.}
+\begin{chunk}{axiom.bib}
+@article{Gaud80,
+ author = "Gaudel, M.C.",
+ title = {{Specification of Compilers as Abstract Data Type
+ Representations}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "140164",
+ year = "1980",
+ abstract =
+ "This paper presents a method for specifying and proving
+ compilers. This method is based on the algebraic data types
+ ideas. The main points are:
+ \begin{itemize}
+ \item to each language is associated an algebraic abstract data type
+ \item the semantic value of a program is given as a term of this
+ data type
+ \item the translation of the semantic values of source programs
+ into semantic values of target programs is specified and proved as
+ the representation of an algebrayc data type by another one.
+ \end{itemize}
+ A compiler generator, PERLUETTE, which accepts such specifications
+ as input is described. The proof technic is discussed.",
+ paper = "Gaud80.pdf"
+}
+
+\end{chunk}
+
\index{Geuvers, Herman}
\begin{chunk}{axiom.bib}
@article{Geuv00,
@@ 16188,6 +17444,96 @@ when shown in factored form.
\end{chunk}
+\index{Gonthier, Goerges}
+\index{Mahboubi, Assia}
+\index{Rideau, Laurence}
+\index{Tassi, Enrico}
+\index{Thery, Laurent}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont07,
+ author = "Gonthier, Goerges and Mahboubi, Assia and Rideau, Laurence
+ and Tassi, Enrico and Thery, Laurent",
+ title = {{A Modular Formalisation of Finite Group Theory}},
+ booktitle = "Theorem Proving in Higher Order Logics",
+ publisher = "Springer",
+ pages = "86101",
+ year = "2007"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\begin{chunk}{axiom.bib}
+@article{Gont08,
+ author = "Gonthier, Goerges",
+ title = {{Formal Proof  The Four Color Theorem}},
+ journal = "Notices Amer. Math. Soc.",
+ volume = "55",
+ number = "11",
+ pages = "13821393",
+ year = "2008"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\index{Mahboubi, Assia}
+\begin{chunk}{axiom.bib}
+@article{Gont10,
+ author = "Gonthier, Goerges and Mahboubi, Assia",
+ title = {{An Introduction to Small Scale Reflection in Coq}},
+ journal = "J. Formaliz. Reason.",
+ volume = "3",
+ number = "2",
+ pages = "95152",
+ year = "2010"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont11,
+ author = "Gonthier, Goerges",
+ title = {{Advances in the Formalization of the Odd Order Theorem}},
+ booktitle = "Interactive Theorem Proving",
+ publisher = "Springer",
+ pages = "2",
+ year = "2011"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont11a,
+ author = "Gonthier, Goerges",
+ title = {{PointFree, SetFree Concrete Linear Algebra}},
+ booktitle = "Interactive Theorem Proving",
+ publisher = "Springer",
+ pages = "103118",
+ year = "2011"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\index{Ziliani, Beta}
+\index{Nanevski, Aleksandar}
+\index{Dreyer, Derek}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont11b,
+ author = "Gonthier, Goerges and Ziliani, Beta and Nanevski, Aleksandar
+ and Dreyer, Derek",
+ title = {{How to make Ad Hoc Proof Automation less Ad Hoc}},
+ booktitle = "Int. Conf. on Functional Programming",
+ publisher = "ACM",
+ pages = "163175",
+ year = "2011"
+}
+
+\end{chunk}
+
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
@@ 16278,6 +17624,47 @@ when shown in factored form.
\end{chunk}
+\index{Goto, Kazushige}
+\index{van de Geijn, Robert A.}
+\begin{chunk}{axiom.bib}
+@article{Goto19,
+ author = "Goto, Kazushige and van de Geijn, Robert A.",
+ title = {{Anatomy of HighPerformance Matrix Multiplication}},
+ journal = "Transactions on Mathematical Software",
+ volume = "V",
+ number = "N",
+ year = "2019",
+ link = "\url{https://www.cs.utexas.edu/~flame/pubs/GotoTOMS_revision.pdf}",
+ abstract =
+ "We present the basic principles which underlie the high
+ performance implementation of the matrix multiplication that is
+ part of the widely used GotoBLAS library. Design decisions are
+ justified by successively refining a model of architectures with
+ multilevel memories. A simple but effective algorithm for
+ executing this operation results. Implementations on a broad
+ selection of architectures are shown to achieve nearpeak
+ performacne.",
+ paper = "Goto19.pdf"
+}
+
+\end{chunk}
+
+\index{Grabowski, Adam}
+\index{Kornilowicz, Artur}
+\index{Naumowicz, Adam}
+\begin{chunk}{axiom.bib}
+@article{Grab10,
+ author = "Grabowski, Adam and Kornilowicz, Artur and Naumowicz, Adam",
+ title = {{Mizar in a Nutshell}},
+ journal = "J. Formaliz. Reason.",
+ volume = "3",
+ number = "2",
+ pages = "153245",
+ year = "2010"
+}
+
+\end{chunk}
+
\index{Gravel, Katherine}
\index{Jananthan, Hayden}
\index{Kepner, Jeremy}
@@ 16329,6 +17716,22 @@ when shown in factored form.
\end{chunk}
\index{Griesmer, J.H.}
+\begin{chunk}{axiom.bib}
+@article{Grie76,
+ author = "Griesmer, James",
+ title = {{Symbolic Mathematical Computation: A Survey}},
+ journal = "SIGSAM Bulletin",
+ volume = "10",
+ number = "2",
+ pages = "3032",
+ year = "1976",
+ paper = "Grie76.pdf",
+ keywords = "printed, axiomref, DONE"
+}
+
+\end{chunk}
+
+\index{Griesmer, J.H.}
\index{Jenks, R.D.}
\index{Yun, D.Y.Y}
\begin{chunk}{axiom.bib}
@@ 16350,6 +17753,59 @@ when shown in factored form.
\end{chunk}
+\index{Grossman, Dan}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gros02,
+ author = "Grossman, Dan",
+ title = {{Existential Types for Imperative Languages}},
+ booktitle = "Euro. Symp. on Prog. Langs. and Systems",
+ publisher = "SpringerVerlag",
+ pages = "2135",
+ year = "2002",
+ isbn = "3540433635",
+ abstract =
+ "We integrate existential types into a strongly typed Clike
+ language. In particular, we show how a bad combination of
+ existential types, mutation, and aliasing can cause a subtle
+ violation of type safety. We explort two independent ways to
+ strengthen the type system to restore safety. One restricts the
+ mutation of existential packages. The other restricts the types of
+ aliases of extential packages. We use our framework to explain why
+ other languages with existential types are safe.",
+ paper = "Gros02.pdf"
+}
+
+\end{chunk}
+
+\index{Grossman, Dan}
+\begin{chunk}{axiom.bib}
+@article{Gros06,
+ author = "Grossman, Dan",
+ title = {{Quantified Types for Imperative Languages}},
+ journal = "Trans. on Prog. Lang. and Systems",
+ volume = "28",
+ number = "3",
+ year = "2006",
+ pages = "429475",
+ abstract =
+ "We describe universal types, existential types, and type
+ constructors in Cyclone, a stronglytyped Clike language. We show
+ how the language naturally supports firstclass polymorphism and
+ polymorphic recursion while requiring an acceptable amount of
+ explicit type information. More importantly, we consider the
+ soundness of type variables in the presence of Cstyle mutation
+ and the addressof operator. For polymorphic references, we
+ describe a solution more natural for the C level than the MLstyle
+ ``value restriction''. For existential types, we discover and
+ subsequently avoid a subtle unsoundness issue resulting from the
+ addressof operator. We develop a formal abstract machine and
+ typesafety proof that captures the essence of type variables at
+ the C level.",
+ paper = "Gros06.pdf"
+}
+
+\end{chunk}
+
\index{Gurevich, Yuri}
\begin{chunk}{axiom.bib}
@article{Gure12,
@@ 16361,12 +17817,12 @@ when shown in factored form.
year = "2012",
abstract =
"We attempt to put the title problem and the ChurchTuring thesis into
 a proper perspective and to clarify some common misconcep tions
+ a proper perspective and to clarify some common misconceptions
related to Turing’s analysis of computation. We examine two approaches
to the title problem, one wellknown among philosophers and another
among logicians.",
paper = "Gure12.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 16434,6 +17890,54 @@ when shown in factored form.
\end{chunk}
+\index{Hales, Thomas C.}
+\begin{chunk}{axiom.bib}
+@article{Hale07,
+ author = "Hales, Thomas C.",
+ title = {{The Jordan Curve Theorem, Formally and Informally}},
+ journal = "Amer. Math. Monthly",
+ volume = "114",
+ number = "10",
+ pages = "882894",
+ year = "2007"
+}
+
+\end{chunk}
+
+\index{Hales, Thomas C.}
+\index{Harrison, John}
+\index{McLaughlin, Sean}
+\index{Nipkow, Tobias}
+\index{Obua, Steven}
+\index{Zumkeller, Roland}
+\begin{chunk}{axiom.bib}
+@article{Hale10,
+ author = "Hales, Thomas C. and Harrison, John and McLaughlin, Sean
+ and Nipkow, Tobias and Obua, Steven and Zumkeller,
+ Roland",
+ title = {{A Revision of the Proof of the Kepler Conjecture}},
+ journal = "Discrete and Computational Geometry",
+ volume = "44",
+ number = "1",
+ pages = "134",
+ year = "2010",
+ abstract =
+ "The Kepler conjecture asserts that no packing of congruent balls
+ in threedimentional Euclidean space has density greater than that
+ of the facecentered cubic packing. The original proof, announced
+ in 1998 and published in 2006, is long and complex. The process of
+ revision and review did not end with the publication of the proof.
+ This article summarizes the current status of a longterm
+ initiative to reorganize the original proof into a more
+ transparent form and to provide a greater level of certification
+ of the correctness of the computer code and other details of the
+ proof. A final part of this article lists errata in the original
+ proof of the Keper conjecture.",
+ paper = "Hale10.pdf"
+}
+
+\end{chunk}
+
\index{Hammack, Richard}
\begin{chunk}{axiom.bib}
@book{Hamm18,
@@ 16583,6 +18087,87 @@ when shown in factored form.
\end{chunk}
+\index{Harrison, John}
+\begin{chunk}{axiom.bib}
+@inproceedings{Harr96a,
+ author = "Harrison, John",
+ title = {{HOL Light: A Tutorial Introduction}},
+ booktitle = "First Int. Conf. on Formal Methods in ComputerAided Design",
+ publisher = "unknownn",
+ pages = "265269",
+ year = "1996"
+}
+
+\end{chunk}
+
+\index{Harrison, John}
+\begin{chunk}{axiom.bib}
+@article{Harr09a,
+ author = "Harrison, John",
+ title = {{A Formalized Proof of Dirichlet's Theorem on Primes in
+ Arithmetic Progression}},
+ journal = "J. Formaliz. Reason.",
+ volume = "2",
+ number = "1",
+ pages = "6383",
+ year = "2009"
+}
+
+\end{chunk}
+
+\index{Harrison, John}
+\begin{chunk}{axiom.bib}
+@article{Harr09b,
+ author = "Harrison, John",
+ title = {{Formalizing an Analytic Proof of the Prime Number Theorem}},
+ journal = "J. Automated Reasoning",
+ volume = "43",
+ pages = "243261",
+ year = "2009"
+}
+
+\end{chunk}
+
+\index{Harrison, John}
+\begin{chunk}{axiom.bib}
+@misc{Harr13,
+ author = "Harrison, John",
+ title = {{A Survey of Automated Theorem Proving}},
+ year = "2013",
+ link = "\url{https://www.lektorium.tv/lecture/14805}"
+}
+
+\end{chunk}
+
+\index{Hartmanis, J.}
+\index{Stearns, R.E.}
+\begin{chunk}{axiom.bib}
+@article{Hart63,
+ author = "Hartmanis, J. and Stearns, R.E.",
+ title = {{On the Computational Complexity of Algorithms}},
+ journal = "Trans. American Mathematical Society",
+ volume = "117",
+ pages = "285306",
+ year = "1963",
+ paper = "Hart63.pdf"
+}
+
+\end{chunk}
+
+\index{Hartmanis, Juris}
+\begin{chunk}{axiom.bib}
+@misc{Hart95,
+ author = "Hartmanis, Juris",
+ title = {{On Computational Complexity and the Nature of Computer
+ Science}},
+ year = "1995",
+ comment = "Turing Award Lecture",
+ paper = "Hart95.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Harvey, David}
\index{van der Hoeven, Joris}
\begin{chunk}{axiom.bib}
@@ 16721,6 +18306,155 @@ when shown in factored form.
\end{chunk}
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear71,
+ author = "Hearn, Anthony C.",
+ title = {{Applications of Symbol Manipulation in Theoretical Physics}},
+ journal = "Communications of the ACM",
+ volume = "14",
+ number = "8",
+ pages = "511516",
+ year = "1971",
+ paper = "Hear71.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear72,
+ author = "Hearn, Anthony C.",
+ title = {{An Improved NonModular Polynomial GCD Algorithm}},
+ journal = "ACM SIGSAM Bulletin",
+ volume = "23",
+ pages = "1015",
+ year = "1972",
+ abstract =
+ "An improved nonmodular algorithm for the calculation of the
+ greatest common divisor of two multivariate polynomials is
+ presented.",
+ paper = "Hear72.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@book{Hear73,
+ author = "Hearn, Anthony C.",
+ title = {{REDUCE2 Users Manual}},
+ comment = "Computing Physics Group",
+ publisher = "University of Utah",
+ year = "1973"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Hear74,
+ author = "Hearn, Anthony C.",
+ title = {{A Mode Analysing Algebraic Manipulation Program}},
+ booktitle = "Proc. 1974 annual ACM Conference. Vol 2",
+ publisher = "ACM",
+ year = "1974"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear79,
+ author = "Hearn, Anthony C.",
+ title = {{Nonmodular computation of polynomial GCDs using Trial Division}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "227239",
+ year = "1979",
+ abstract =
+ "This paper describes a new algorithm for the determination of the
+ GCD of two multivariate polynomials by nonmodular means.",
+ paper = "Hear79.pdf"
+}
+
+\end{chunk}
+
+\index{Hebisch, Waldemar}
+\begin{chunk}{axiom.bib}
+@misc{Hebi07,
+ author = "Hebisch, Waldemar",
+ title = {{FriCAS Project Statement}},
+ year = "2007",
+ link = "\url{http://www.math.uni.wroc.ps/~hebish/fricas/fricasreg.html}",
+ abstract =
+ "Problem Statement: FriCAS a fork of Axiom project. Its starting
+ point is whsandbox branch of the Axiom project. Axiom project
+ tried to use literate programming methodology and switch emphasis
+ from code to documentation. In practice that meants that almost
+ all code is wrapped in so called pamhlet files and must be
+ extracted by noweb tool before use in build process. This causes
+ significant difficulties during developement, starting from having
+ long file names, problems of tracking errors to exact source
+ locations and complicationss in Makefiles. Literate programming
+ practice in Axiom produced little documentation, but makes sources
+ harder to read for programmer. FriCAS will use traditional
+ methodology for new developement and gradually convert other files
+ back to traditional form (back because Axiom was originally
+ developed using traditional methodology, and only during
+ transition to open source project files where mechanically
+ converted to ``literate'' form). Axiom project evolved very
+ slowly, many simple fixes were not applied for long time
+ (years). FriCAS will use lightweight developement, allowing much
+ faster evolution.
+
+ Short term planned new technical developments: Axiom system is
+ written in multiple languages, main two called Boot and Spad are
+ specific to Axiom. Boot and Spad are translated to Common
+ Lisp. Currently Axiom is fully functional when working on top of
+ Gnu Common Lisp (GCL). In whsandbox branch core functionlity
+ works also with other Lisp implementations, and FriCAS should
+ fully support other Lisp implementations. This should increase
+ acceptance of FriCAS, because on some platforms GCL is hard to
+ build, on some does not work at all and also Lisp developers
+ frequently find other Lisp implementation preferable. The Spad
+ language used for mathematical code in Axiom is strongly
+ typed. Axiom system caches type information between builds 
+ during build it uses type information from previous build. This
+ makes modifications to Axiom code very difficult, because after
+ changing type in source old type information is used in many
+ cases, causing spurious type errors. FriCAS will use only type
+ information from sources eliminationg such problems. Many modern
+ mathematical codes to get better speed are written in C or
+ C++. FriCAS will interface to such codes to gain speed
+ advantage. Currently Axiom system offers its own user interface,
+ but it is somewhat dated  mostly text based,graphics has old
+ looks and feel. FriCAS add hoos which make adding alternative user
+ interfaces easier.
+
+ Longer term plans: The compiler for Spad language included in
+ Axiom is buggy and has serious performance problems  new
+ compiler is needed. New mathematical algorithms. Assertion
+ support.",
+}
+
+\end{chunk}
+
+\index{Hebisch, Waldemar}
+\begin{chunk}{axiom.bib}
+@misc{Hebi16,
+ author = "Hebisch, Waldemar",
+ title = {{Integration in terms of exponential integrals and
+ incomplete gamma functions}},
+ year = "2016",
+ link = "\url{http://www.math.uni.wroc.pl/~hebisch/other/icms.pdf}",
+ comments = "slides",
+ paper = "Hebi16.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{van Heijenoort, Jean}
\begin{chunk}{axiom.bib}
@book{Heij67,
@@ 16764,7 +18498,7 @@ when shown in factored form.
languages. In our opinion its brevity and simple semantics make
$\mu$Kanren uniquely elegant.",
paper = "Hema13.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 16842,76 +18576,58 @@ when shown in factored form.
\end{chunk}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear71,
 author = "Hearn, Anthony C.",
 title = {{Applications of Symbol Manipulation in Theoretical Physics}},
 journal = "Communications of the ACM",
 volume = "14",
 number = "8",
 pages = "511516",
 year = "1971",
 paper = "Hear71.pdf"
}

\end{chunk}

\index{Hearn, Anthony C.}
+\index{von Henke, F.W.}
+\index{Dold, A.}
+\index{Ruess, H.}
+\index{Schwier, D.}
+\index{Strecker, M.}
\begin{chunk}{axiom.bib}
@article{Hear72,
 author = "Hearn, Anthony C.",
 title = {{An Improved NonModular Polynomial GCD Algorithm}},
 journal = "ACM SIGSAM Bulletin",
 volume = "23",
 pages = "1015",
 year = "1972",
+@article{Henk94,
+ author = "von Henke, F.W. and Dold, A. and Ruess, H. and Schwier, D.
+ and Strecker, M.",
+ title = {{Construction and Deduction Methods for the Formal
+ Development of Software}},
+ journal = "LNCS",
+ volume = "1009",
+ year = "1994",
abstract =
 "An improved nonmodular algorithm for the calculation of the
 greatest common divisor of two multivariate polynomials is
 presented.",
 paper = "Hear72.pdf"
}

\end{chunk}

\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@book{Hear73,
 author = "Hearn, Anthony C.",
 title = {{REDUCE2 Users Manual}},
 comment = "Computing Physics Group",
 publisher = "University of Utah",
 year = "1973"
}

\end{chunk}

\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@inproceedings{Hear74,
 author = "Hearn, Anthony C.",
 title = {{A Mode Analysing Algebraic Manipulation Program}},
 booktitle = "Proc. 1974 annual ACM Conference. Vol 2",
 publisher = "ACM",
 year = "1974"
}
+ "In this paper we present an approach towards a framework based on
+ the type theory ECC (Extended Calculus of Constructions) in which
+ specifications, programs and operators for modular development by
+ stepwise refinement can be formally described and reasoned
+ about. We show that generic software development steps can be
+ expressed as higherorder functions and demonstrate that proofs
+ about their asserted effects can be carried out in the underlying
+ logical calculus.
+
+ For transformations requiring syntactic manipulations of objects,
+ a twolevel system comprising a Meta and an Objectlevel is
+ provided, and it is shown how transformations can be formalized
+ that faithfully represent operators on the object level.",
+ paper = "Henk94.pdf",
+ keywords = "printed"
+}
\end{chunk}
\index{Hearn, Anthony C.}
+\index{von Henke, F.W.}
+\index{Luther, M.}
+\index{Pfeifer, H.}
+\index{Ruess, H.}
+\index{Schwier, D.}
+\index{Strecker, M.}
+\index{Wagner, M.}
\begin{chunk}{axiom.bib}
@article{Hear79,
 author = "Hearn, Anthony C.",
 title = {{Nonmodular computation of polynomial GCDs using Trial Division}},
+@article{Henk96,
+ author = "von Henke, F.W. and Luther, M. and Pfeifer, H. and Ruess, H.
+ and Schwier, D. and Strecker, M. and Wagner, M.",
+ title = {{The TYPELAB Specification and Verification Environment}},
journal = "LNCS",
 volume = "72",
 pages = "227239",
 year = "1979",
 abstract =
 "This paper describes a new algorithm for the determination of the
 GCD of two multivariate polynomials by nonmodular means.",
 paper = "Hear79.pdf"
+ volume = "1101",
+ pages = "604607",
+ year = "1996",
+ paper = "Henk96.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 17077,6 +18793,20 @@ when shown in factored form.
\end{chunk}
+\index{Holzl, Johannes}
+\index{Heller, Armin}
+\begin{chunk}{axiom.bib}
+@inproceedings{Holz11,
+ author = "Holzl, Johannes and Heller, Armin",
+ title = {{Three Chapters of Measure Theory in Isabelle / HOL}},
+ booktitle = "Interactive Theorem Proving",
+ publisher = "Springer",
+ pages = "135151",
+ year = "2011"
+}
+
+\end{chunk}
+
\index{Horozal, Fulya}
\index{Iacob, Alin}
\index{Jucovschi, Constantin}
@@ 17199,6 +18929,71 @@ when shown in factored form.
\end{chunk}
+\index{Huet, Gerard}
+\index{Saibi, Amokrane}
+\begin{chunk}{axiom.bib}
+@inproceedings{Huet00,
+ author = "Huet, Gerard and Saibi, Amokrane",
+ title = {{Constructive Category Theory}},
+ booktitle = "Proof, Language, and Interaction: Essays in Honour of
+ Robin Milner",
+ publisher = "MIT Press",
+ pages = "235275",
+ year = "2000"
+}
+
+\end{chunk}
+
+\index{Hughes, John}
+\begin{chunk}{axiom.bib}
+@misc{Hugh19,
+ author = "Hughes, John",
+ title = {{How to Specify it!}},
+ year = "2019",
+ link = "\url{https://www.dropbox.com/s/tx2b84kae4bw1p4/paper.pdf}",
+ abstract =
+ "Propertybased testing tools test software against a
+ specification, rather than a set of examples. This tutorial paper
+ presents five generic approaches to writing such specifications
+ (for purely functional code). We discuss costs, benefits, and
+ bugfinding power of each approach, with reference to a simple
+ example with eight buggy variants. The lessons learned should help
+ the reader to develope effective propertybased tests in the future.",
+ paper = "Hugh19.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Hutton, Graham}
+\begin{chunk}{axiom.bib}
+@article{Hutt99,
+ author = "Hutton, Graham",
+ title = {{A Tutorial on the Universality and Expressiveness of
+ Fold}},
+ journal = "J. Functional Programming",
+ volume = "9",
+ number = "4",
+ pages = "355372",
+ year = "1999",
+ abstract =
+ "In functional programming, {\sl fold} is a standard operator that
+ encapsulates a simple pattern of recursion for processing
+ lists. This article is a tutorial on two key aspects of the fold
+ operator for lists. First of all, we emphasize the use of the
+ universal property of fold both as a proof principle that avoids
+ the need for inductive proofs, and as a definition principle that
+ guides the transformation of recursive functions into definitions
+ using fold. Secondly, we show that even though the pattern of
+ recursion encapsulated by fold is simple, in a language with
+ tuples and functions as firstclass values the fold operator has
+ greater expressive power than might first be expected.",
+ paper = "Hutt99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{I} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{chunk}{axiom.bib}
@@ 17244,6 +19039,32 @@ when shown in factored form.
\end{chunk}
+\index{Innes, Sean}
+\index{Uu, Nicolas}
+\begin{chunk}{axiom.bib}
+@inproceedings{Inne19,
+ author = "Innes, Sean and Uu, Nicolas",
+ title = {{Tic Tak Types}},
+ booktitle = "Int. Workshop on Type Driven Development",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "TicTacToe is a simple, familiar, classic game enjoyed by
+ many. This pearl is designed to give a flavour of the world of
+ dependent types to the uninitiated functional programmer. We cover
+ a journey from TicTakTerrible implementations in the harsh world
+ of virtually untyped {\sl Strings}, through the safe haven of
+ vectors that know their own length, and into a TicTacTitanium
+ version that is too strongly typed for its own good. Along the way
+ we discover something we knew all along: types are great, but in
+ moderation. This lesson is quickly put to use in a more complex
+ recursive version.",
+ paper = "Inne19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{J} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Jammer, Max}
@@ 17257,6 +19078,43 @@ when shown in factored form.
\end{chunk}
+\index{Jedynak, Wojciech}
+\index{Biernacka, Malgorzata}
+\index{Biernacki, Dariusz}
+\begin{chunk}{axiom.bib}
+@inproceedings{Jedy13,
+ author = "Jedynak, Wojciech and Biernacka, Malgorzata and
+ Biernacki, Dariusz",
+ title = {{An Operational Foundation for the Tactic Language of Coq}},
+ booktitle = "Proc. 15th Symp. on Principles and Practices of
+ Declarative Programming",
+ publisher = "ACM",
+ pages = "2536",
+ year = "2013",
+ isbn = "9781450321549",
+ abstract =
+ "We introduce a semantic toolbox for Ltac, the tactic language of
+ the popular Coq proof assistant. We present three formats of
+ operational semantics, each of which has its use in the practice
+ of tactic programming: a bigstep specification in the form of
+ natural semantics, a model of implementation in the form of an
+ abstract machine, and a smallstep characterization of computation
+ in the form of reduction semantics. The three semantics are
+ provably equivalent and have been obtained via offtheshelf
+ derivation techniques of the functional correspondence and the
+ syntactic correspondence. We also give examples of Ltac programs
+ and discuss some of th eissues that the formal semantics help to
+ clarify.
+
+ With this work we hope to enhance the operational understanding of
+ Ltac as well as to set up a framework to reason about Coq scripts
+ and to build tools supporting tactic programming based on rigorous
+ semantics.",
+ paper = "Jedy13.pdf"
+}
+
+\end{chunk}
+
\index{Jeffrey, David J.}
\index{Rich, Albert D.}
\begin{chunk}{axiom.bib}
@@ 17324,8 +19182,162 @@ when shown in factored form.
\end{chunk}
+\index{Jones, Neil D.}
+\index{Schmidt, David A.}
+\begin{chunk}{axiom.bib}
+@article{Jone80,
+ author = "Jones, Neil D. and Schmidt, David A.",
+ title = {{Compiler Generation from Denotational Semantics}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "7093",
+ year = "1980",
+ abstract =
+ "A methodology is described for generating provably correct
+ compilers from denotational definitions of programming
+ languages. An application is given to produce compilers into STM
+ code (an STM or state transition machine is a flowchartlike
+ program, lowlevel enough to be translated into efficient code on
+ conventional computers). First, a compiler $\phi:LAMC\rightarrow
+ STM$ from a lambda calculus dialect is defined. Any denotational
+ defintion $\Delta$ of language $L$ defines a map
+ $\over{\rightarrow}{\Delta}:L\rightarrow LAMC$, so
+ $\over{\rightarrow}{\Delta}\circ \phi$ compiles $L$ into STM
+ code. Correctness follows from the correctness of $\phi$.
+
+ The algebraic framework of Morris, ADJ, etc. is used. The set of
+ STMs is given an algebraic structure so any
+ $\over{\rightarrow}{\Delta} \circ \phi$ may be specified by giving
+ a derived operator on STM for each syntax rule of $L$.
+
+ This approach yields quite redundant object programs, so the paper
+ ends by describing two flow analytic optimization methods. The
+ first analyzes an alreadyproduced STM to obtain information about
+ its runtime behaviour which is used to optimize the STM. The
+ second analyzer the generated compiling scheme to determine
+ runtime properties of object programs in general which a compiler
+ can use to produce less redundant STMs.",
+ paper = "Jone80.pdf"
+}
+
+\end{chunk}
+
+\index{Jones, Neil D.}
+\index{Madsen, Michael}
+\begin{chunk}{axiom.bib}
+@article{Jone80a,
+ author = "Jones, Neil D. and Madsen, Michael",
+ title = {{AttributeInfluenced LR Parsing}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "393407",
+ year = "1980",
+ abstract =
+ "Methods are described which make it possible, when given an
+ arbitrary attribute grammar (or AG),
+ \begin{enumerate}
+ \item to analyze the AG to determine which of its attributes may
+ be computed during LR parsing,
+ \item to augment the parser with instructions and data structures
+ to compute many attributes during parsing,
+ \item to use attribute values to assist the parsing process
+ (e.g. to use symbol table information to decide whether P(X) is an
+ array element or a function call).
+ \end{enumerate}",
+ paper = "Jone80a.pdf"
+}
+
+\end{chunk}
+
+\index{Jung, Ralf}
+\index{Jourdan, JacquesHenri}
+\index{Krebbers, Robbert}
+\index{Dreyer, Derek}
+\begin{chunk}{axiom.bib}
+@inproceedings{Jung18,
+ author = "Jung, Ralf and Jourdan, JacquesHenri and
+ Krebbers, Robbert and Dreyer, Derek",
+ title = {{RustBelt: Securing the Foundations of the Rust Programming
+ Language}},
+ booktitle = "POPL '18",
+ publisher = "ACM",
+ year = "2018",
+ abstract =
+ "Rust is a new systems programming language that promises to
+ overcome the seemingly fundamental tradeoff between highlevel
+ safety guarantees and lowlevel control over resource
+ management. Unfortunately, none of Rust's safety claims have been
+ formally proven, and there is good reason to question whether they
+ actually hold. Specifically, Rust employs a string,
+ ownershipbased, type system, but then extends the expressive
+ power of this core type system through libraries that internally
+ use unsafe features. In this paper, we give the first formal (and
+ machinechecked) safety proof for a language representing a
+ realistic subset of Rust. Our proof is extensible in the sense
+ that, for each new Rust library that uses unsafe features, we can
+ say what verification condition it must satisfy in order for it to
+ be deemed a safe extension to the language. We have carried out
+ this verification for some of the most important libraries that
+ are used throughout the Rust ecosystem.",
+ paper = "Jung18.pdf",
+ keywords = "printed"
+
+}
+
+\end{chunk}
+
\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Kahrs, S.}
+\index{Sannella, D.}
+\index{Tarlecki, A.}
+\begin{chunk}{axiom.bib}
+@article{Kahr94,
+ author = "Kahrs, S. and Sannella, D. and Tarlecki, A.",
+ title = {{Interfaces and Extended ML}},
+ journal = "SIGPLAN Notices",
+ volume = "29",
+ number = "8",
+ pages = "111118",
+ year = "1994",
+ abstract =
+ "This is a position paper giving our views on the uses and makeup
+ of module interfaces. The position espoused is inspired by our
+ work on the Extended ML (EML) formal software development
+ framework and by ideas in the algebraic foundations of
+ specification and formal development. The present state of
+ interfaces in EML is outlined and set in the context of plans for
+ a more general EMLlike framework with axioms in interfaces taken
+ from an arbitrary logical system formulated as an
+ {\sl institution}. Some more speculative plans are sketched
+ concerning the simultaneous use of multiple institutions in
+ specification and development.",
+ paper = "Kahr94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kahrs, Stefan}
+\begin{chunk}{axiom.bib}
+@techreport{Kahr95,
+ author = "Kahrs, Stefan",
+ title = {{On the Static Analysis of Extended ML}},
+ type = "technical report",
+ institution = "Lab for Foudations of Comp Sci. Univ. Edinburgh",
+ number = "Research Note",
+ year = "1995",
+ abstract =
+ "This is a short note describing differences in static analysis of
+ EML, as defined in [KST94] and SML, as defined in [MTH90] and
+ [MT91]. It is intended for use by people who are building an EML
+ parser/typechecker by modifying an existing SML compiler.",
+ paper = "Kahr95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Kahrs, Stefan}
\index{Sannella, Donald}
\index{Tarlecki, Andrzej}
@@ 17342,7 +19354,8 @@ when shown in factored form.
Extended ML language. It is based on the published semantics of
Standard ML in an attempt to ensure compatibility between the two
languages.",
 paper = "Kahr97.pdf"
+ paper = "Kahr97.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 17366,8 +19379,8 @@ when shown in factored form.
specification language for use with a given programming language.
Consequently the lessons learned go far beyond our original aim of
designing a specification language for ML.",
 paper = "Karh98.pdf",
 keywords = "printed"
+ paper = "Kahr98.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 17409,6 +19422,22 @@ when shown in factored form.
\end{chunk}
+\index{Kamareddine, Fairouz}
+\index{Laan, Twan}
+\index{Nederpelt, Rob}
+\begin{chunk}{axiom.bib}
+@book{Kama05,
+ author = "Kamareddine, Fairouz and Laan, Twan and Nederpelt, Rob",
+ title = {{A Modern Perspective on Type Theory}},
+ comment = "Applied Logic Series 29",
+ publisher = "Kluwer Academic Publishers",
+ isbn = "1402023359",
+ year = "2005",
+ paper = "Kama05.pdf"
+}
+
+\end{chunk}
+
\index{Kaminski, Paul}
\begin{chunk}{axiom.bib}
@techreport{Kami12,
@@ 17642,6 +19671,21 @@ when shown in factored form.
\end{chunk}
+\index{Kornilowicz, Artur}
+\begin{chunk}{axiom.bib}
+@article{Korn07,
+ author = "Kornilowicz, Artur",
+ title = {{A Proof of the Jordan Curve Theorem via the Brouwer Fixed
+ Point Theorem}},
+ journal = "Mechanized Mathematics and Its Applications",
+ volume = "6",
+ number = "1",
+ pages = "3340",
+ year = "2007"
+}
+
+\end{chunk}
+
\index{Kotelnikov, Evgenii}
\index{Kovacs, Laura}
\index{Reger, Giles}
@@ 17787,7 +19831,7 @@ when shown in factored form.
on the part of the user in order to ensure that the results are
correct. This burden can be shifted away from the user by
providing a library of exact analysis in which the computer
 handles the error estimates. We provide an implementaiton of the
+ handles the error estimates. We provide an implementation of the
exact real numbers in the Coq proof assistant. This improves on
the earlier Coqimplementation by O'Connor in two ways: we use
dyadic rationals built from the machine integers and we optimize
@@ 17801,6 +19845,22 @@ when shown in factored form.
\end{chunk}
+\index{Kreitz, Christoph}
+\begin{chunk}{axiom.bib}
+@inbook{Krei98,
+ author = "Kreitz, Christoph",
+ title = {{Program Synthesis}},
+ booktitle = "Automated Deduction  A Basis for Applications (Vol III)",
+ publisher = "Springer",
+ year = "1998",
+ chapter = "5",
+ pages = "105134",
+ isbn = "9789401704373",
+ comment = "Applied Logic Series, volume 10",
+ paper = "Krei98.pdf"
+}
+
+\end{chunk}
\index{Kreitz, Christoph}
\index{Rahli, Vincent}
@@ 17909,6 +19969,39 @@ when shown in factored form.
\end{chunk}
+\index{Leroy, Xavier}
+\begin{chunk}{axiom.bib}
+@techreport{Lero92,
+ author = "Leroy, Xavier",
+ title = {{Polymorphic Typing of an Algorithmic Language}},
+ type = "research report",
+ institution = "INRIA",
+ number = "N1778",
+ year = "1992",
+ abstract =
+ "The polymorphic type discipline, as in the ML language, fits well
+ within purely applicative languages, but does not extend naturally
+ to the main feature of algorithmic languages: inplace update of
+ data structures. Similar typing difficulties arise with other
+ extensions of applicative languages: logical variables,
+ communication channels, continuation handling. This work studies
+ (in the setting of relational semantics) two new approaches to the
+ polymorphic typing of these nonapplicative features. The first
+ one relies on a restriction of generalization over types (the
+ notion of dangerous variables), and on a refined typing of
+ functional values (closure typing). The resulting type system is
+ compatible with the ML core language, and is the most expressive
+ type systems for ML with imperative features so far. The second
+ approach relies on switching to ``byname'' sematics for the
+ constructs of polymorphism, instead of the usual ``byvalue''
+ semanticcs. The resulting language differs from ML, but lends
+ itself easily to polymorphic typing. Both approaches smoothly
+ integrate nonapplicative features and polymorphic typing.",
+ paper = "Lero92.pdf"
+}
+
+\end{chunk}
+
\begin{chunk}{axiom.bib}
@misc{lion137,
author = "Unknown",
@@ 17976,6 +20069,179 @@ when shown in factored form.
\end{chunk}
+\index{Loh, Andres}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@article{Lohx01,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{A Tutorial Implementation of a Dependently Typed Lambda
+ Calculus}},
+ journal = "Foundations Informaticae",
+ volume = "XXI",
+ pages = "10011031",
+ year = "2001",
+ abstract =
+ "We present the type rules for a dependently typed core calculus
+ together with a straightforward implementation in Haskell. We
+ explicitly highlight the changes necessary to shift from a
+ simplytyped lambda calculus to a dependently typed lambda
+ calculus. We also describe how to extend our core language with
+ data types and write several small example programs. The article
+ is accompanied by an executable interpreter and example code that
+ allows immediate experimentation with the system we describe.",
+ paper = "Lohx01.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lou, Zhaohui}
+\begin{chunk}{axiom.bib}
+@phdthesis{Loux90,
+ author = "Lou, Zhaohui",
+ title = {{An Extended Calculus of Constructions}},
+ school = "University of Edinburgh",
+ year = "1990",
+ abstract =
+ "This thesis presents and studies a unifying theory of dependent
+ types ECC Extended Calculus of Constructions. ECC integrates
+ CoquandHuet's (impredicative) calculus of constructions and
+ MartinLof's (predicative) type theory with universes, and turns
+ out to be a strong and expressive calculus for formalization of
+ mathematics, structured proof development and program specification.
+
+ The meta theory of ECC is studied and we show that the calculus
+ has good metatheoretic properties. The main proof theoretic
+ result is the {\sl strong normalization theorem} which makes
+ explicit the predicativity of the predicative universes. The
+ strong normalization result shows the proof theoretic consistency
+ of the calculus; in particular, it implies the consistency of the
+ embedded intuitionistic higherorder logic and the decidability of
+ the theory. The metatheoretic results establish the theoretical
+ foundations both for pragmatic applications in theorem proving and
+ program specification and for computer implementations of the
+ theory. ECC has been implemente in the proof development system
+ LEGO developed by Pollack.
+
+ In ECC, dependent $\Sigma$ types are nonpropositional types
+ residing in the predicative universes and propositions are lifted
+ as higherlevel types as well. This solves the known difficulty
+ that adding strong $\Sigma$ types to an impredicative system
+ results in logical paradox and enables $\Sigma$ types to be used
+ to express the intuitionistic notion of subsets. $\Sigma$ types
+ together with type universes hence provide useful abstraction and
+ module mechanisms for abstract description of mathematical
+ theories and basic mechanisms for program specification and
+ adequate formalization of abstract mathematics (e.g. abstract
+ algebras and notions in category theory). A notion of (abstract)
+ mathematical theory can be described and leads to a promising
+ approach to {\sl abstract reasoning} and {\sl structured
+ reasoning}. Program specifications can be expressed by $\Sigma$
+ types, using propositions in the embedded logic to describe
+ program properties (for example, by an equality reflection result,
+ computational equality can be modeled by the propositional
+ Leibniz's equality definable in the theory). These developments
+ allow comprehensive structuring of formal or rigorous development
+ of proofs and programs.
+
+ Also discussed is how the calculus can be understood
+ settheoretically. We explain an $\omegaSet$ (realizability)
+ model of the theory. In particular, propositions can be
+ interpreted as partial equivalence relations and the predicative
+ type universes as corresponding to large set universes.",
+ paper = "Loux90.pdf"
+}
+
+\end{chunk}
+
+\index{Lou, Zhaohui}
+\begin{chunk}{axiom.bib}
+@article{Loux08,
+ author = "Lou, Zhaohui",
+ title = {{Coercions in a Polymorphic Type System}},
+ journal = "Math. Struct. in Comp. Science",
+ volume = "18",
+ pages = "729751",
+ year = "2008",
+ abstract =
+ "We incorporate the idea of coercive subtyping, a theory of
+ abbreviation for dependent type theories, into the polymorphic
+ type system in functional programming languages. The traditional
+ type system with letpolymorphism is extended with argument
+ coercions and function coercions, and a corresponding type
+ inference algorithm is presented and proved to be sound and complete.",
+ paper = "Loux08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lou, Zhaohui}
+\begin{chunk}{axiom.bib}
+@article{Loux12,
+ author = "Lou, Zhaohui",
+ title = {{Formal Semantics in Modern Type Theories with Coercive Semantics}},
+ journal = "Linguistics and Philosophy",
+ volume = "35",
+ pages = "491513",
+ year = "2012",
+ abstract =
+ "In the formal semantics based on modern type theories, common
+ nouns are interpreted as types, rather than as predicates on
+ entities as in Montague's semantics. This brings about important
+ advantages in linguistic interpretations but also leads to a
+ limitation of expressive power because there are fewer operations
+ on types as compared with those on predicates. The theory of
+ coercive subtyping adequately extends the modern type theories
+ and, as shown in this paper, plays a very useful role in making
+ type theories more expressive for formal semantics. It not only
+ gives a satisfactory solution to the basic problem of 'multiple
+ categorisations' caused by interpreting common nouns as types, but
+ provides a powerful formal framework to model interesting
+ linguistic phenomena such as copredication, whose formal treatment
+ has been found difficult in a Montagovian setting. In particular,
+ we show how to formally introduce dottypes in a type theory with
+ coercive subtyping and study some typetheoretic constructs that
+ provide useful representational tools for reference transfers and
+ multiple word meanings in formal lexical semantics.",
+ paper = "Loux12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Luther, Marko}
+\index{Strecker, Martin}
+\begin{chunk}{axiom.bib}
+@misc{Luth98,
+ author = "Luther, Marko and Strecker, Martin",
+ title = {{A Guided Tour through TYPELAB}},
+ year = "1998",
+ abstract =
+ "This report gives a survey of TYPELAB, a specification and
+ verification environment that integrates interactive proof
+ development and automated proof search. TYPELAB is based on a
+ constructive type theory, the Calculus of COnstructions, which can
+ be understood as a combinations of a typed $\lambda$calculus and
+ an expressive higherorder logic. Distinctive features of the type
+ system are dependent function types for modeling polymorphism and
+ dependent record types for encoding specifications and
+ mathematical theories. After presenting an extended example which
+ demonstrates how program development by stepwise refinement of
+ specifications can be carried out, the theory underlying the
+ prover component of TYPELAB is described in detail. A calculus
+ with metavariables and explicit substitutions is introduced, and
+ the metatheoretic properties of this calculus are
+ analyzed. Furthermore, it is shown that this calculus provides an
+ adequate foundation for automated proof search in fragments of the
+ logic.",
+ paper = "Luth98.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Ly, Kim Quyen}
\begin{chunk}{axiom.bib}
@misc{Lyxx15,
@@ 18048,7 +20314,7 @@ when shown in factored form.
code. Measurements indicate that a 20 per cent speed improvement
can generally be achieved.",
paper = "Maxx90.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 18078,6 +20344,20 @@ when shown in factored form.
\end{chunk}
+\index{Madsen, Ole Lehrmann}
+\begin{chunk}{axiom.bib}
+@article{Mads80,
+ author = "Madsen, Ole Lehrmann",
+ title = {{On Defining Semantics by means of Extended Attribute Grammars}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "259299",
+ year = "1980",
+ paper = "Mads80.pdf"
+}
+
+\end{chunk}
+
\index{Mahboubi, Assia}
\begin{chunk}{axiom.bib}
@article{Mahb13,
@@ 18170,6 +20450,101 @@ when shown in factored form.
\end{chunk}
+\index{Mhamdi, Tarek}
+\index{Hasan, Osman}
+\index{Tahar, Sofiene}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mham11,
+ author = "Mhamdi, Tarek and Hasan, Osman and Tahar, Sofiene",
+ title = {{Formalization of Entropy Measure in HOL}},
+ booktitle = "Interactve Theorem Proving",
+ publisher = "Springer",
+ pages = "233248",
+ year = "2011"
+}
+
+\end{chunk}
+
+\index{de Moura, Leonardo}
+\index{Avigad, Jeremy}
+\index{Kong, Soonho}
+\index{Roux, Cody}
+\begin{chunk}{axiom.bib}
+@misc{Mour15,
+ author = "de Moura, Leonardo and Avigad, Jeremy and Kong, Soonho
+ and Roux, Cody",
+ title = {{Elaboration in Dependent Type Theory}},
+ link = "\url{https://arxiv.org/pdf/1505.04324.pdf}",
+ year = "2015",
+ abstract =
+ "To be usable in practice, interactive theoremprovers need to
+ provide convenient and efficient means of writing expressions,
+ definitions, and proofs. This involves inferring information that
+ is often left implicit in an ordinary mathematical text, and
+ resolving ambiguities in mathematical expressions. We refer to the
+ rpocess of passing from a quasiformal and partiallyspecified
+ expression to a completely precise formal one as {\sl
+ elaboration}. We describe an elaboration algorithm for dependent
+ type theory that has been implemented in the Lean theorem
+ prover. Lean's elaborator supports higherorder unification, type
+ class inference, ad hoc overloading, insertion of coercions, the
+ use of tactics, and the computational reduction of terms. The
+ interactions between these components are subtle and complex, and
+ the elaboration algorithm has been carefully designed to balance
+ efficiency and usability. We describe the central design goals,
+ and the means by which they are achieved.",
+ paper = "Mour15.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{de Moura, Leonardo}
+\begin{chunk}{axiom.bib}
+@misc{Mour16a,
+ author = "de Moura, Leonardo",
+ title = {{The Lean Theorem Prover}},
+ link = "\url{https://www.youtube.com/watch?v=69ytTKfSSgc}",
+ conference = "PLSE '16",
+ comment = "video",
+ year = "2016",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{de Moura, Leonardo}
+\index{Kong, Soonho}
+\index{Avigad, Jeremy}
+\index{van Doorn, Floris}
+\index{von Raumer, Jakob}
+\begin{chunk}{axiom.bib}
+@misc{Mour19,
+ author = "de Moura, Leonardo and Kong, Soonho and Avigad, Jeremy
+ and van Doorn, Floris and von Raumer, Jakob",
+ title = {{The Lean Theorem Prover (system description)}},
+ link = "\url{http://florisvandoorn.com/papers/lean_description.pdf}",
+ year = "2019",
+ abstract =
+ "Lean is a new open source theorem prover being developed at
+ Microsoft Research and Carnegie Mellon University, with a small
+ trusted kernel based on dependent type theory. It aims to bridge the
+ gap between interactive and automated theorem proving, by situating
+ automated tools and methods in a framework that supports user
+ interaction and the construction of fully specified axiomatic
+ proofs. Lean is an ongoing and longterm effort, but it already
+ provides many useful components, integrated development
+ environments, and a rich API which can be used to embed it into
+ other systems. It is currently being used to formalize category
+ theory, homotopy type theory, and abstract algebra. We describe the
+ project goals, system architecture, and main features, and we
+ discuss applications and continuing work.",
+ paper = "Mour19.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{McKenna, Brian}
\begin{chunk}{axiom.bib}
@misc{Mcke14,
@@ 18324,6 +20699,36 @@ when shown in factored form.
\end{chunk}
\index{Moses, Joel}
+\index{Yun, David Y.Y.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mose73,
+ author = "Moses, Joel and Yun, David Y.Y.",
+ title = {{The EZ GCD Algorithm}},
+ booktitle = "ACM Annual Conference",
+ year = "1973",
+ publisher = "ACM",
+ abstract =
+ "This paper presents a preliminary report on a new algorithm for
+ computing the Greatest Common Divisor (GCD) of two multivariate
+ polynomials over the integers. The algorithm is strongly
+ influenced by the method used for factoring multivariate
+ polynomials over the integers. It uses an extension of the Hensel
+ lemma approach originally suggested by Zassenhaus for factoring
+ univariate polynomials over the integers. We point out that the
+ cost of the Modular GCD algorithm applied to sparse multivariate
+ polynomials grows at least exponentially in the number of
+ variables appearing in the GCD. This growth is largely independent
+ of the number of terms in the GCD. The new algorithm, called the
+ EZ (Extended Zassenhaus) GCD Algorithm, appears to have a
+ computing bound which in most cases is polynomial in
+ them. Especially difficult cases for the EZ GCD Algorithm are
+ described. Applications of the algorithm to the computation of
+ contents and squarefree decomposition of polynomials is indicated."
+}
+
+\end{chunk}
+
+\index{Moses, Joel}
\index{Zippel, Richard}
\begin{chunk}{axiom.bib}
@article{Mose79,
@@ 18419,7 +20824,7 @@ when shown in factored form.
project HiLite, which involves AdaCore and Altran Praxis together
with several industrial users and research labs.",
paper = "Moyx10.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 18573,8 +20978,129 @@ when shown in factored form.
\end{chunk}
+\index{Murthy, Chetan R.}
+\begin{chunk}{axiom.bib}
+@techreport{Murt91,
+ author = "Murthy, Chetan R.",
+ title = {{Classical Proofs as Programs: How, What and Why}},
+ type = "technical report",
+ institution = "Cornell University",
+ number = "TR911215",
+ year = "1991",
+ abstract =
+ "We recapitulate Friedman's conservative extension result of
+ (suitable) classical over constructive systems for $\prod_2^0$
+ sentences, viewing it in two lights: as a translation of programs
+ from an almostfunctional language (with $C$) back to its
+ functional core, and as a translation of a constructive logic for
+ a functional language to a classical logic for an
+ almostfunctional language. We investigate the computational
+ properties of the translation and of classical proofs and
+ characterize the classical proofs which give constructions in
+ concrete, computational terms, rather than logical terms. We
+ characterize different versions of Friedman's translation as
+ translating slightly different almostfunctional languages to a
+ functional language, thus giving a general method for arriving at
+ a sound reduction semantics for an almostfunctional language with
+ a mixture of eager and lazy constructors and destructors, as well
+ as integers, pairs, unions, etc. Finally, we describe how to use
+ classical reasoning in a disciplined manner in giving classical
+ (yet constructivizable) proofs of sentences of greater complexity
+ than $\prod_2^0$. This direction offers the possibility of
+ applying classical reasoning to more general programming problems.",
+ paper = "Murt91.pdf"
+}
+
+\end{chunk}
+
\subsection{N} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Nathanson, Melvyn B.}
+\begin{chunk}{axiom.bib}
+@article{Nath08,
+ author = "Nathanson, Melvyn B.",
+ title = {{Desperately Seeing Mathematical Proof}},
+ journal = "Notices of the American Math. Society",
+ volume = "55",
+ number = "7",
+ pages = "773",
+ year = "2008"
+}
+
+\end{chunk}
+
+\index{Naur, Peter}
+\begin{chunk}{axiom.bib}
+@misc{Naur85,
+ author = "Naur, Peter",
+ title = {{Programming as Theory Building}},
+ link = "\url{http://pages.cs.wisc.edu/~remzi/Naur.pdf}",
+ paper = "Naur85.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{Necula, George Ciprian}
+\begin{chunk}{axiom.bib}
+@phdthesis{Necu98,
+ author = "Necula, George Ciprian",
+ title = {{Compiling with Proofs}},
+ school = "Carnegie Mellon University",
+ year = "1998",
+ link = "\url{https://www.cs.cmu.edu/~rwh/theses/necula.pdf}",
+ abstract =
+ "One of the major challenges of building software systems is to
+ ensure that the various components fit together in a welldefined
+ manner. This problem is exacerbated by the recent advent of
+ software components whose origin is unknown or inherently
+ untrusted, such as mobile code or user extensions for operating
+ system kernels or database servers. Such extensions are useful for
+ implementing an efficient interaction model between a client and a
+ server because several data exchanges between them can be saved at
+ the cost of a single code exchange.
+
+ In this dissertation, I propose to tackle such system integrity
+ and security problems with techniques from mathematical logic and
+ programming language semantics. I propose a framework, called
+ {\sl proofcarrying code}, in which the extension provider sends
+ along with the extension code a representation of a formal proof
+ that the code meets certain safety and correctness
+ requirements. Then, the code receiver can ensure the safety of
+ executing the extension by validating the attached proof. The
+ major advantages of proofcarrying code are that it requires a
+ simple trusted infrastructure and that it does not impose runtime
+ penalties for the purpose of ensuring safety.
+
+ In addition to the concept of proofcarrying code, this
+ dissertation contributes the idea of certifying compilation. A
+ {\sl certifying compiler} emits, in addition to optimized target
+ code, function specifications and loop invariants that enable a
+ theoremproving agent to prove nontrivial properties of the
+ target code, such as type safety. Such a certifying compiler,
+ along with a proofgenerating theorem prover, is not only a
+ convenient producer of proofcarrying code but also a powerful
+ softwareengineering tool. The certifier also acts as an effective
+ referee for the correctness of each compilation, thus simplifying
+ considerably compiler testing and maintenance.
+
+ A complete system for proofcarrying code must also contain a
+ {\sl proofgenerating theorem prover} for the purpose of producing
+ the attached proofs of safety. This dissertation shows how
+ standard decision procedures can be adapted so that they can
+ produce detailed proofs of the proved predicates and also how
+ these proofs can be encoded compactly and checked
+ efficiently. Just like for the certifying compiler, a
+ proofgenerating theorem prover has significant software
+ engineering advantages over a traditional prover. In this case, a
+ simple proof checker can ensure the soundness of each successful
+ proving task and indirectly assist in testing and maintenance of
+ the theorem prover.",
+ paper = "Necu98.pdf"
+}
+
+\end{chunk}
+
\index{Nederpelt, R.}
\index{Kamareddine, F.}
\begin{chunk}{axiom.bib}
@@ 18639,6 +21165,19 @@ when shown in factored form.
\end{chunk}
\index{Nipkow, Tobias}
+\index{Paulson, Lawrence C.}
+\index{Wenzel, Markus}
+\begin{chunk}{axiom.bib}
+@book{Nipk02a,
+ author = "Nipkow, Tobias and Paulson, Lawrence C. and Wenzel, Markus",
+ title = {{Isabelle / HOL. A Proof Assistant for HigherOrder Logic}},
+ publisher = "Springer",
+ year = "2002"
+}
+
+\end{chunk}
+
+\index{Nipkow, Tobias}
\index{Tabacznyj, Christophe}
\index{Paulson, Lawrence C.}
\index{Chaieb, Amine}
@@ 18656,6 +21195,120 @@ when shown in factored form.
\end{chunk}
+\index{Noonan, Matt}
+\begin{chunk}{axiom.bib}
+@inproceedings{Noon18,
+ author = "Noonan, Matt",
+ title = {{Ghosts of Departed Proofs (Functional Pearl)}},
+ booktitle = "Haskell '18",
+ publisher = "ACM",
+ isbn = "9781450358354",
+ year = "2018",
+ abstract =
+ "Library authors often are faced with a design chice: should a
+ function with preconditions be implemented as a partial function,
+ or by returning a failure condition on incorrect use? Neither
+ option is ideal. Partial functions lead to frustrating runtime
+ errors. Failure conditions must be checked at the usesite,
+ placing an unfair tax on the users who have ensured that the
+ function's preconditions were correctly met.
+
+ In this paper, we introduce an API design concept called `ghosts
+ of departed proofs' based on the following observation:
+ sophisticated preconditions can be encoded in Haskell's type
+ system with no runtime overhead, by using proofs that inhabit
+ phantom type parameters attached to new type wrappers. The user
+ expresses correctness arguments by constructing proofs to inhabit
+ these phantom types. Critically, this technique allows the library
+ {\sl user} to decide when and how to validate that the API's
+ preconditions are met.
+
+ The 'ghost of departed proofs' approach to API design can achieve
+ many of the benefits of dependent types and refinement types, yet
+ only requires some minor and wellunderstood extensions to Haskell
+ 2010. We demonstrate the utility of this approach through a series
+ of case studies, showing how to enforce novel invariants for
+ lists, maps, graphs, shared memory regions, and more.",
+ paper = "Noon18.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Norell, Ulf}
+\begin{chunk}{axiom.bib}
+@phdthesis{Nore07,
+ author = "Norell, Ulf",
+ title = {{Towards a Practical Programming Language Based on
+ Dependent Type Theory}},
+ school = "Chalmers University",
+ year = "2007",
+ link = "\url{http://www.cse.chalmers.se/~ulfn/papers/thesis.pdf}",
+ abstract =
+ "Dependent type theories have a long history of being used for
+ theorem proving. One aspect of type theory which makes it very
+ powerful as a proof language is that it mixes deduction with
+ computation. This also makes type theory a good candidate for
+ programming  the strength of the type system allows properties
+ of programs to be stated and established, and the computational
+ properties provide semantics for the programs.
+
+ This thesis is concerned with bridging the gap between the
+ theoretical presentations of type theory and the requirements of
+ practical programming languages. Although there are many
+ challenging research problems left to solve before we have an
+ industrial scale programming language based on type theory, this
+ thesis takes us a good step along the way.
+
+ In functional programming languages pattern matching provides a
+ concise notation for defining functions. In dependent type theory,
+ pattern matching becomes even more powerful, in that inspecting
+ the value of a particular term can reveal information about the
+ types and values of other terms. In this thesis we give a type
+ checking algorithm for definitions by pattern matching in type
+ theory, supporting overlapping patterns, and pattern matching on
+ intermediate results using the {\sl with} rule.
+
+ Traditional presentations of type theory suffer from rather
+ verbose notation, cluttering programs and proofs with, for
+ instance, explicit type information. One solution to this problem
+ is to allow terms that can be inferred automatically to be
+ omitted. This is usually implemented by inserting metavariables in
+ place of the omitted terms and using unification to solve these
+ metavariables during type checking. We present a type checking
+ algorithm for a theory with metavariables and prove its soundness
+ independent of whether the metavariables are solved or not.
+
+ In any programming language it is important to be able to
+ structure large programs into separate units or modules and limit
+ the interaction between these modules. In this thesis we present a
+ simple, but powerful module system for a dependently typed
+ language. The main focus of the module system is to manage the
+ name space of a program, and an important characteristic is a
+ clear separation between the module system and the type checker,
+ making it largely independent of the underlying language.
+
+ As a side track, not directly related to the use of type theory
+ for programming, we present a connnection between type theory and
+ a firstorder logic theorem prover. This connection saves the user
+ the burden of proving simple, but tedious firstorder theorems by
+ leaving them for the prover. We use a transparent translation to
+ firstorder logic which makes the proofs constructed by the
+ theorem prover human readable. The soundness of the connection is
+ established by a general metatheorem.
+
+ Finally we put our work into practice in the implementation of a
+ programming language, Agda, based on type theory. As an
+ illustrating example, we show how to program a simple certified
+ prover for equations in a commutative monoid, which can be used
+ internally in Agda. Much more impressive examples have been done
+ by others, showing that the ideas developed in this thesis are
+ viable in practice.",
+ paper = "Nore07.pdf"
+}
+
+\end{chunk}
+
\index{Norrish, Michael}
\index{Slind, Konrad}
\begin{chunk}{axiom.bib}
@@ 18679,7 +21332,7 @@ when shown in factored form.
serving in a separate capacity as a useful representation medium
for persistent, hierarchical logical theories.",
paper = "Norr02.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 18706,6 +21359,46 @@ when shown in factored form.
\end{chunk}
+\index{O'Connor, Russell}
+\begin{chunk}{axiom.bib}
+@inproceedings{Ocon05,
+ author = "O'Connor, Russell",
+ title = {{Essential Incompleteness of Arithmetic Verified by Coq}},
+ booktitle = "Theorem Proving in Higher Order Logics",
+ publisher = "Springer",
+ pages = "245260",
+ year = "2005"
+}
+
+\end{chunk}
+
+\index{Odersky, Martin}
+\index{Zenger, Christoph}
+\index{Zenger, Matthias}
+\begin{chunk}{axiom.bib}
+@misc{Oder01,
+ author = "Odersky, Martin and Zenger, Christoph and
+ Zenger, Matthias",
+ title = {{Colored Local Type Inference}},
+ year = "2001",
+ abstract =
+ "We present a type system for a language based on $F_{\le}$, which
+ allows certain type annotations to be elided in actual
+ programs. Local type inference determines types by a combination
+ of type propagation and local constraint solving, rather than by
+ global constraint solving. We refine the previously existing local
+ type inference system of Pierce and Turner by allowing partial
+ type information to be propagated. This is expressed by coloring
+ types to indicate propagation directions. Propagating partial type
+ information allows us to omit type annotations for the visitor
+ pattern, the analogue of pattern matching in languages without sum
+ types.",
+ paper = "Oder01.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\begin{chunk}{axiom.bib}
@inproceedings{Oisd18,
author = "Anonymous",
@@ 18825,6 +21518,111 @@ when shown in factored form.
\subsection{P} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Page, William}
+\begin{chunk}{axiom.bib}
+@misc{Page18,
+ author = "Page, William",
+ title = {{MathAction Front Page}},
+ year = "2018",
+ link = "\url{http://axiomwiki.newsynthesis.org/FrontPage}",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Page, William}
+\begin{chunk}{axiom.bib}
+@misc{Page18a,
+ author = "Page, William",
+ title = {{MathAction Facebook Page}},
+ year = "2018",
+ link =
+ "\url{https://www.facebook.com/httpaxiomwikinewsynthesisorg229826785723}",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Page, William}
+\begin{chunk}{axiom.bib}
+@misc{Page18a,
+ author = "Page, William",
+ title = {{MathAction Facebook Page}},
+ year = "2018",
+ link =
+ "\url{https://www.facebook.com/httpaxiomwikinewsynthesisorg229826785723}",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Paraskevopoulou, Zoe}
+\index{Appel, Andrew W.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Para19,
+ author = "Paraskevopoulou, Zoe and Appel, Andrew W.",
+ title = {{Closure Conversion is Safe for Space}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "We formally prove that closure conversion with flat environments
+ for CPS lambda calculus is correct (preserves semantics) and safe
+ for time and space, meaning that produced code preserves the time
+ and space required for execution of the source program.
+
+ We give a cost model to pre and postclosureconversion code by
+ formalizing profiling semantics that keep track of the time and
+ space resources needed for the execution of a program, taking
+ garbage collection into account. To show preservation of time and
+ space we set up a genera 'garbagecollection compatible' binary
+ logical relation that establishes invariants on resource
+ consumption of the related programs, along with functional
+ correctness. Using this framework, we show semantics preservation
+ and space and time safety for terminating source programs, and
+ divergence preservation and space safety for diverging source
+ programs.
+
+ This is the first formal proof of spacesafety of a
+ closureconversion transformation. The transformation and the
+ proof are parts of the CertiCoq compiler pipeline from Coq
+ (Gallina) through CompCert Clight to assembly language. Our
+ results are mechanized in the Coq proof assistant.",
+ paper = "Para19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Parigot, Michel}
+\begin{chunk}{axiom.bib}
+@article{Pari92,
+ author = "Parigot, Michel",
+ title = {{$\lambda\mu$Calculus: An Algorithmic Interpretation of
+ Classical Natural Deduction}},
+ journal = "LNCS",
+ volume = "624",
+ pages = "190201",
+ year = "1992",
+ paper = "Pari92.pdf"
+}
+
+\end{chunk}
+
+\index{Parigot, Michel}
+\begin{chunk}{axiom.bib}
+@article{Pari92a,
+ author = "Parigot, Michel",
+ title = {{Recursive Programming with Proofs}},
+ journal = "Theoretical Computer Science",
+ volume = "94",
+ pages = "335356",
+ year = "1992",
+ paper = "Pari92a.pdf"
+}
+
+\end{chunk}
+
\index{Parisse, Bernard}
\begin{chunk}{axiom.bib}
@misc{Pari19,
@@ 18843,6 +21641,44 @@ when shown in factored form.
\end{chunk}
+\index{Patterson, Daniel}
+\index{Ahmed, Amal}
+\begin{chunk}{axiom.bib}
+@inproceedings{Patt19,
+ author = "Patterson, Daniel and Ahmed, Amal",
+ title = {{The Next 700 Compiler Correctness Theorems}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "Compiler correctness is an old problem, with results stretching
+ back beyond the last halfcentury. Founding the field, John
+ McCarthy and James Painter set out to build ' completely
+ trustworthy compiler'. And yet, until quite recently, even despite
+ truly impressive verification efforts, the theorems being proved
+ were only about the compilation of whole programs, a theoretically
+ quite appealing but practically unrealistic simplification. For a
+ compiler correctness theorem to assure complete trust, the theorem
+ must reflect the reality of how the compiler will be used.
+
+ There has been much recent work on more realistic 'compositional'
+ compiler correctness aimed at proving correct compilation of
+ components while supporting linking with components compiled from
+ different languages using different compilers. However, the
+ variety of theorems, stated in remarkably different ways, raises
+ questions about what researchers even mean by a 'compiler is
+ correct'. In this pearl, we develop a new framework with which to
+ understand compiler correctness theorems in the presence of
+ linking, and apply it to understanding and comparing this
+ diversity of results. In doing so, not only are we better able to
+ assess their relative strengths and weaknesses, but gain insight
+ into what we as a community should expect from compiler
+ correctness theorems of the future.",
+ paper = "Patt19.pdf"
+}
+
+\end{chunk}
+
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@inbook{Paul90b,
@@ 19035,45 +21871,82 @@ when shown in factored form.
\end{chunk}
\subsection{Q} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Pressler, Ron}
+\begin{chunk}{axiom.bib}
+@misc{Pres19,
+ author = "Pressler, Ron",
+ title = {{Correctness and Complexity}},
+ year = "2019",
+ link = "\url{https://pron.github.io/posts/correctnessandcomplexity}"
+}
\index{Deutsch, David}
+\end{chunk}
+
+\index{Propp, James}
\begin{chunk}{axiom.bib}
@article{Deut85,
 author = "Deutsch, David",
 title = {{Quantum Theory, the ChurchTuring Principle and the
 Universal Quantum Computer}},
 journal = "Proc. Royal Society of London",
 volume = "400",
 pages = "97117",
 year = "1985",
+@misc{Prop13,
+ author = "Propp, James",
+ title = {{Real Analysis in Reverse}},
+ year = "2013",
+ link = "\url{https://arxiv.org/pdf/1204.4483.pdf}",
abstract =
 "It is argued that underlying the ChurchTuring hypothesis there
 is an implicit physical assertion. Here, this assertion is
 presented explicitly as a physical principle: 'every finitely
 realizable physical system can be perfectly simulated by a
 universal model computing machine operating by finite
 means'. Classical physics and the universal Turing machine,
 because the former is continuous and the latter discrete, do not
 obey the principle, at leeast in the strong form above. A class of
 model computing machines that is the quantum generalization of the
 class of Turing machines is described, and it is shown that
 quantum theory and the 'universal quantum computer' are compatible
 with the principle. Computing machines resembling the universal
 quantum computer could, in principle, be built and would have many
 remarkable properties not reproducible by any Turing
 machine. These do not include the computation of nonrecursive
 functions, but they do include 'quantum parallelism', a method by
 which certain probabilistic tasks can be performed faster by a
 universal quantum computer than by any classical restriction of
 it. The intuitive explanation of these properties places an
 intolerable strain on all interpretations of quantum theory other
 than Everett's. Some of the numerous connections between the
 quantum theory of computation and the rest of physics are
 explored. Quantum complexity theory allows a physically more
 reasonable definition of the 'complexity' or 'knowledge' in a
 physical system than does classical complexity theory.",
 paper = "Deut85.pdf",
+ "Many of the theorems of real analysis, against the background of
+ the ordered field axioms, are equivalent to Dedekind completeness,
+ and hence can serve as completeness axioms for the reals. In the
+ course of demonstrating this, the article offers a tour of some
+ lessfamiliar ordered fields, provides some of the relevant
+ history, and considers pedagogical implications.",
+ paper = "Prop13.pdf"
+}
+
+\end{chunk}
+
+\subsection{Q} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Qiu, Xiaokang}
+\index{Garg, Pranav}
+\index{Stefanescu, Andrei}
+\index{Madhusudan, P.}
+\begin{chunk}{axiom.bib}
+@misc{Qiux13,
+ author = "Qiu, Xiaokang and Garg, Pranav and Stefanescu, Andrei and
+ Madhusudan, P.",
+ title = {{Natural Proofs for Structure, Data, and Separation}},
+ year = "2013",
+ link = "\url{http://madhu.cs.illinois.edu/dryad_full_version.pdf}",
+ abstract =
+ "We propose {\sl natural proofs} for reasoning with programs that
+ manipulate datastructures against complex specifications 
+ specifications that describe the structure of the heap, the data
+ stored within it, and separation and framing of
+ substructures. Natural proofs are a subclass of proofs that are
+ amenable to completely automated reasoning, that provide sound but
+ incomplete procedures, and that capture common reasoning tactics
+ in program verification. We develop a dialect of separation logic
+ over heaps, called DRYAD, with recursive definitions that avoids
+ explicit quantification. We develop ways to reason with heaplets
+ using classical logic over the theory of sets, and develop natural
+ proofs for reasoning using proof tactics involving disciplined
+ unfoldings and formula abstractions. Natural proofs are encoded
+ into decidable theories of firstorder logic so as to be
+ discharged using SMT solvers.
+
+ We also implement the technique and show that a large class of
+ more than 100 correct programs that manipulate datastructures are
+ amenable to full functional correctness using the proposed natural
+ proof method. These programs are drawn from a variety of sources
+ including standard datastructures, the SchorrWaite algorithm for
+ garbage collection, a large number of lowlevel C routines from
+ the Glib library, the OpenBSD library and the Linux kernel, and
+ routines from a secure verified OSbrowser project. Our work is
+ the first that we know of that can handle such a wide range of
+ full functional verification properties of heaps automatically,
+ given pre/post and loop invariant annotations. we believe that
+ this work paves the way for the deductive verification technology
+ to be used by programmers who do not (and need not) understand the
+ internals of the underlying logic solvers, significantly
+ increasing their applicability in building reliable systems.",
+ paper = "Qiux13.pdf",
keywords = "printed"
}
@@ 19175,6 +22048,52 @@ when shown in factored form.
\end{chunk}
+\index{Rado, Tibor}
+\begin{chunk}{axiom.bib}
+@article{Rado61,
+ author = "Rado, Tibor",
+ title = {{On NonComputable Functions}},
+ journal = "Bell System Technical Journal",
+ volume = "41",
+ number = "3",
+ year = "1961",
+ abstract =
+ "The construction of noncomputable functions used in this paper
+ is based on the priciple that a finite, nonempty set of
+ nonnegative integers has a largest element. Also, this principle
+ is used only for sets which are exceptionally welldefined by
+ current standards. No enumeration of computable functions is used,
+ and in this sense the diagonal process is not employed. This, it
+ appears that an apparently selfevident principle, of constant use
+ in every area of mathematics, yields nonconstructive entities.",
+ paper = "Rado61.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Raskovsky, Martin}
+\index{Collier, Phil}
+\begin{chunk}{axiom.bib}
+@article{Rask80,
+ author = "Raskovsky, Martin and Collier, Phil",
+ title = {{From Standard to Implementation Denotational Semantics}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "94139",
+ year = "1980",
+ abstract =
+ "We are developing a compiler compiler. It takes as input the
+ formal definition of a programming language in Denotational
+ Semantics and produces as output a fairly efficient compiler
+ written in a system programming language which in turn will
+ produce code for a real machine. This work mainly deals with the
+ code generation parts.",
+ paper = "Rask80.pdf"
+}
+
+\end{chunk}
+
\index{Raja, Amar}
\index{Rayner, Matthew}
\index{Sexton, Alan}
@@ 19265,6 +22184,33 @@ when shown in factored form.
\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
+@inproceedings{Reyn83,
+ author = "Reynolds, John C.",
+ title = {{Types, Abstraction and Parametric Polymorphism}},
+ booktitle = "Information Processing 83",
+ publisher = "Elsevier Science Publishers",
+ year = "1983",
+ abstract =
+ "We explore the thesis that type structure is a syntactic
+ discipline for maintaining levels of abstraction. Traditionally,
+ this view has beeen formalized algebraically, but the algebraic
+ approach fails to encompass higherorder functions. For this
+ purpose, it is necessary to generalize homomorphic functions to
+ relations; the result is an ``abstraction'' theorem that is
+ applicable to the typed lambda calculus and various extensions,
+ including userdefined types.
+
+ Finally, we consider polymorphic functions, and show that the
+ abstraction theorem captures Strachey's concept of parametric, as
+ opposed to ad hoc, polymorphism.",
+ paper = "Reyn83.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Reynolds, John C.}
+\begin{chunk}{axiom.bib}
@misc{Reyn94,
author = "Reynolds, John C.",
title = {{An Introduction to the Polymorphic Lambda Calculus}},
@@ 19470,6 +22416,65 @@ when shown in factored form.
\subsection{S} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Saibi, Amokrane}
+\begin{chunk}{axiom.bib}
+@inproceedings{Saib97,
+ author = "Saibi, Amokrane",
+ title = {{Typing Algorithm in Type Theory with Inheritance}},
+ booktitle = "Symp. on Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "292301",
+ year = "1997"
+}
+
+\end{chunk}
+
+\index{Saibi, Amokrane}
+\begin{chunk}{axiom.bib}
+@phdthesis{Saib99,
+ author = "Saibi, Amokrane",
+ title = {{Outils G\'en\'eriques de mod\'elisation et de
+ d\'emonstration pour la Formalisation des Math\'ematiques
+ en th\'eorie des Types, Application \'a la th\'eorie des
+ cat\'egories}},
+ school = "University of Paris 6",
+ year = "1999"
+}
+
+\end{chunk}
+
+\index{SanchezStern, Alex}
+\index{Alhessi, Yousef}
+\index{Saul, Lawrence}
+\index{Lerner, Sorin}
+\begin{chunk}{axiom.bib}
+@misc{Sanc19,
+ author = "SanchezStern, Alex and Alhessi, Yousef and Saul, Lawrence
+ and Lerner, Sorin",
+ title = {{Generating Correctness Proofs with Neural Networks}},
+ year = "2019",
+ link = "\url{https://arxiv.org/pdf/1907.07794.pdf}",
+ abstract =
+ "Foundational verification allows programmers to build software
+ which has been empirically shown to have high levels of assurance
+ in a variety of important domains. However, the cost of producing
+ foundationally verified software remains prohibitively high for
+ most projects, as it requires significant manual effort by highly
+ trained experts. In this paper we present Proverbot9001 a proof
+ search system using machine learning techniques to produce proofs
+ of software correctness in interactive theorem provers. We
+ deomonstrate Proverbot9001 on the proof obligations from a large
+ practical proof project, the CompCert verified C compiler, and
+ show that it can effectively automate what was previously manual
+ proofs, automatically solving 15.77\% of proofs in our test
+ dataset. This corresponds to an over 3X improvement over the prior
+ state of the art machine learning technique for generating proofs
+ in Coq.",
+ paper = "Sanc19.pdf"
+}
+
+\end{chunk}
+
\index{Sannella, Donald}
\begin{chunk}{axiom.bib}
@article{Sann86,
@@ 19487,6 +22492,31 @@ when shown in factored form.
\end{chunk}
\index{Sannella, Donald}
+\begin{chunk}{axiom.bib}
+@misc{Sann86a,
+ author = "Sannella, Donald",
+ title = {{Formal Specification of ML Programs}},
+ link =
+ "\url{http://www.lfcs.inf.ed.ac.uk/reports/86/ECSLFCS8615/ECSLFCS86.15.ps}",
+ year = "1986",
+ abstract =
+ "These notes were written to accompany lectures on program
+ specification which formed part of a course on functional
+ programming in ML. Functions can be specified using a
+ specification language obtained by extending ML with
+ (nonexecutable) firstorder axioms. Simple inductive proofs
+ suffice to show that in ML function satisfies such a
+ specification. This approach can also be used to specify and
+ verify larger programs built from smaller pieces using ML's
+ modularisation facilities. Examples are used to illustrate the
+ methods discussed.",
+ paper = "Sann86a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sannella, Donald}
\index{Tarlecki, Andrzej}
\begin{chunk}{axiom.bib}
@article{Sann87,
@@ 19567,6 +22597,188 @@ when shown in factored form.
\end{chunk}
+\index{Sannella, D.}
+\index{Tarlecki, A.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Sann91,
+ author = "Sannella, D. and Tarlecki, A.",
+ title = {{Formal Program Development in Extended ML for the Working
+ Programmer}},
+ booktitle = "3rd BCS/FACS Workshop on Refinement",
+ publisher = "Springer",
+ pages = "99130",
+ year = "1991",
+ abstract =
+ "Extened ML is a framework for the formal development of programs
+ in the Standard ML programming language from highlevel
+ specifications of their required input/output behavior. It
+ strongly supports the development of modular programs consisting
+ of an interconnected collection of generic and reusable units. The
+ Extended ML framework includes a methodology for formal program
+ development which establishes a number of ways of proceeding from
+ a given specification of a programming task towards a
+ program. Each such step gives rise to one or more proof
+ oblisgations which must be proved in order to establish the
+ correctness of that step. This paper is inteded as a useroriented
+ summary of the Extended ML language and mthodology. Theoretical
+ technicalities are avoided whenever possible, with emphasis placed
+ on the practical aspects of formal program development. An
+ extended example of a complete program development in Extended ML
+ is included.",
+ paper = "Sann91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sannella, D.}
+\index{Tarlecki, A.}
+\begin{chunk}{axiom.bib}
+@article{Sann91a,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Extended ML: Past, Present and Future}},
+ journal = "LNCS",
+ volume = "534",
+ pages = "297322",
+ year = "1991",
+ abstract =
+ "An overview of past, present and future work on the Extended ML
+ formal program development framework is given, with emphasis on
+ two topics of current active research: the semantics of the
+ Extened ML specification langauge, and tools to support formal
+ program development.",
+ paper = "Sann91a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sannella, Donald}
+\index{Tarlecki, Andrzej}
+\begin{chunk}{axiom.bib}
+@article{Sann97,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Essential Concepts of Algebraic Specification and Program
+ Development}},
+ journal = "Formal Aspects of Computing",
+ volume = "9",
+ pages = "229269",
+ year = "1997",
+ abstract =
+ "The main ideas underlying work on the modeltheoretic foundations
+ of algebraic specification and formal program development are
+ presented in an informal way. An attempt is made to offer an
+ overall view, rather than new results, and to focus on the basic
+ motivation behind the technicalities presented elsewhere.",
+ paper = "Sann97.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sannella, D.}
+\index{Tarlecki, A.}
+\begin{chunk}{axiom.bib}
+@article{Sann99,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Algebraic Methods for Specification and Formal Development
+ of Programs}},
+ journal = "ACM Computing Surveys",
+ volume = "31",
+ year = "1999",
+ paper = "Sann99.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Sannella, Donald}
+\index{Tarlecki, Andrzej}
+\begin{chunk}{axiom.bib}
+@book{Sann12,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Foundations of Algebraic Specification and Formal Software
+ Development}},
+ publisher = "Springer",
+ year = "2012",
+ isbn = "9783642173363",
+ paper = "Sann12.pdf"
+}
+
+\end{chunk}
+
+\index{Sanuki, Masaru}
+\index{Inaba, Daiju}
+\index{Sasaki, Tateaki}
+\begin{chunk}{axiom.bib}
+@inproceedings{Sanu15,
+ author = "Sanuki, Masaru and Inaba, Daiju and Sasaki, Tateaki",
+ title = {{Computation of GCD of Sparse Multivariate Polynomials by
+ Extended Hensel Construction}},
+ booktitle = "17th Int. Symp. on Symbolic and Numeric Algorithms for
+ Scientific Computing",
+ publisher = "IEEE",
+ year = "2015",
+ abstract =
+ "Let $F(x,u_1,\ldots,u_i)$ be a squarefree multivariate polynomial
+ in main variable $x$ and subvariables $u_1\ldots u_i$. We say
+ that the leading coefficient (LC) of $F$ is singular if it
+ vanishes at the origin of the subvariables. A representative
+ algorithm for nonsparse multivariate polynomial GCD is the EZGCD
+ algorithm, which is based on the generalized Hensel construction
+ (GHC). In order to apply the GHC easily, we requires 1) the LC of
+ $F$ is nonsingular, 2) $F(x,0,\ldots,0)$ is squarefree, and 3)
+ the initial Hensel factor of GCD is ``lucky''. These requirements
+ are usually satisfied by the ``nonzero substitution'', i.e. to
+ shift the origin of subvariables. However, the nonzero
+ substitution may cause a drastic increase of the number of terms
+ of $F$ if $F$ is sparse. In 1993, Sasaki and Kako proposed the
+ extended Hensel construction (EHC) which does not perform the
+ nonzero substitution even if the LC is singular. Using the EHC,
+ Inaba implemented an algorithm of multivariate polynomial
+ factorization and verified that it is very useful for sparse
+ polynomials. In this paper, we apply the EHC for the computation
+ of GCD of sparse multivariate polynomials. In order to find a
+ lucky initial factor, we utilize the weighting of subvariables,
+ etc. Our naive implementation in Maple shows that our algorithm is
+ comparable in performance to Maple's GCD routine base on the
+ sparse interpolation.",
+ paper = "Sanu15.pdf"
+}
+
+\end{chunk}
+
+\index{Sarkar, Dipanwita}
+\index{Waddell, Oscar}
+\index{Dybvig, R. Kent}
+\begin{chunk}{axiom.bib}
+@inproceedings{Sark04,
+ author = "Sarkar, Dipanwita and Waddell, Oscar and Dybvig, R. Kent",
+ title = {{A Nanopass Infrastructure for Compiler Education}},
+ booktitle = "9th ACM SIGPLAN",
+ publisher = "ACM",
+ pages = "201212",
+ year = "2004",
+ isbn = "1581139055",
+ abstract =
+ "A compiler structured as a small number of monolithic passes is
+ difficult to understand and difficult to maintain. The steep
+ learning curve is daunting, and even experienced developers find
+ that modifying existing passes is difficult and often introduces
+ subtle and tenacious bugs. These problems are especially
+ frustrating when the developer is a student in a compiler
+ class. An attractive alternative is to structure a compiler as a
+ collection of many finegrained passes, each of which performs a
+ single task. This structure aligns the implementation of a
+ compiler with its logical organization, simplifying development,
+ testing, and debugging. This paper describes the methodology and
+ tools comprising a framework for constructing such compilers.",
+ paper = "Sark04.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\begin{chunk}{axiom.bib}
@article{SCSCP10,
author = "Unknown",
@@ 19576,30 +22788,21 @@ when shown in factored form.
volume = "44",
number = "4",
year = "2010",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
\index{Shallit, Jeffrey}
\index{Sorenson, Jonathan}
+\index{Schnoebelen, Ph.}
\begin{chunk}{axiom.bib}
@article{Shal94,
 author = "Shallit, Jeffrey and Sorenson, Jonathan",
 title = {{Analysis of a LeftShift Binary GCD Algorithm}},
 journal = "Journal of Symbolic Computation",
 volume = "17",
 number = "6",
 pages = "473486",
 year = "1994",
 abstract =
 "We introduce a new leftshift binary algorithm, LSBGCD, for
 computing the greatest common divisor of two integers, and we
 provide an analysis of the worstcase behavior of the
 algorithm. The analysis depends on a theorem of Ramharter about
 the extremal behavior of certain continuants.",
 paper = "Shal94.pdf",
 keywords = "printed"
+@article{Schn02,
+ author = "Schnoebelen, Ph.",
+ title = {{The Complexity of Temporal Logic Model Checking}},
+ journal = "Advances in Modal Logic",
+ volume = "4",
+ pages = "144",
+ year = "2002",
+ paper = "Schn02.pdf"
}
\end{chunk}
@@ 19641,6 +22844,57 @@ when shown in factored form.
\end{chunk}
+\index{Selsam, Daniel}
+\begin{chunk}{axiom.bib}
+@misc{Sels19,
+ author = "Selsam, Daniel",
+ title = {{CS240H: A Standalone Proofchecker for the Lean Theorem Prover}},
+ year = "2019",
+ link = "\url{http://www.scs.stanford.edu/16wics240h/projects/selsam.pdf}",
+ comment = "\url{https://github.com/dselsam/tc}",
+ paper = "Sels19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Shankar, Natarjan}
+\index{Owre, Sam}
+\begin{chunk}{axiom.bib}
+@inproceedings{Shan00,
+ author = "Shankar, Natarjan and Owre, Sam",
+ title = {{Principles and Pragmatics of Subtyping in PVS}},
+ booktitle = "Recent Trends in Algebraic Development Techniques",
+ publisher = "Springer",
+ pages = "3752",
+ year = "2000"
+}
+
+\end{chunk}
+
+\index{Shallit, Jeffrey}
+\index{Sorenson, Jonathan}
+\begin{chunk}{axiom.bib}
+@article{Shal94,
+ author = "Shallit, Jeffrey and Sorenson, Jonathan",
+ title = {{Analysis of a LeftShift Binary GCD Algorithm}},
+ journal = "Journal of Symbolic Computation",
+ volume = "17",
+ number = "6",
+ pages = "473486",
+ year = "1994",
+ abstract =
+ "We introduce a new leftshift binary algorithm, LSBGCD, for
+ computing the greatest common divisor of two integers, and we
+ provide an analysis of the worstcase behavior of the
+ algorithm. The analysis depends on a theorem of Ramharter about
+ the extremal behavior of certain continuants.",
+ paper = "Shal94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Shivers, Olin}
\begin{chunk}{axiom.bib}
@techreport{Shiv90,
@@ 19868,29 +23122,6 @@ when shown in factored form.
\end{chunk}
\index{Stratford, Jonathan}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Stra08,
 author = "Stratford, Jonathan and Davenport, James H.",
 title = {{Unit Knowledge Management}},
 journal = "LNCS",
 volume = "5144",
 year = "2008",
 abstract =
 "In 9, various observations on the handling of (physical) units in
 OpenMath were made. In this paper, we update those observations,
 and make some comments based on a working unit converter that,
 because of its OpenMathbased design, is modular, extensible, and
 reflective. We also note that some of the issues in an effective
 converter, such as the rules governing abbreviations, being more
 linguistic than mathematical, do not lend themselves to easy
 expression in OpenMath.",
 paper = "Stra08.pdf"
}

\end{chunk}

\index{Steenkiste, Peter}
\index{Hennessy, John}
\begin{chunk}{axiom.bib}
@@ 19929,6 +23160,106 @@ when shown in factored form.
\end{chunk}
+\index{Stratford, Jonathan}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Stra08,
+ author = "Stratford, Jonathan and Davenport, James H.",
+ title = {{Unit Knowledge Management}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "In 9, various observations on the handling of (physical) units in
+ OpenMath were made. In this paper, we update those observations,
+ and make some comments based on a working unit converter that,
+ because of its OpenMathbased design, is modular, extensible, and
+ reflective. We also note that some of the issues in an effective
+ converter, such as the rules governing abbreviations, being more
+ linguistic than mathematical, do not lend themselves to easy
+ expression in OpenMath.",
+ paper = "Stra08.pdf"
+}
+
+\end{chunk}
+
+\index{Strecker, M.}
+\index{Luther, M.}
+\index{von Henke, F.}
+\begin{chunk}{axiom.bib}
+@inbook{Stre98,
+ author = "Strecker, M. and Luther, M. and von Henke, F.",
+ title = {{Interactive and Automated Proof Construction in Type Theory}},
+ publisher = "Springer",
+ chapter = "3",
+ pages = "7396",
+ isbn = "9789401704359",
+ year = "1998",
+ abstract =
+ "This chapter gives a survey of TYPELAB, a specification and
+ verification environment that integrates interactive proof
+ development and automated proof search. TYPELAB is based on a
+ constructive type theory, the Calculus of Constructions, which can
+ be understood as a combination of a typed $\lambda$calculus and
+ an expressive higherorder logic. Distinctive features of the type
+ system are dependent function types ($\Pi$ types) for modeling
+ polymorphism and dependent record types ($\Sigma$ types) for
+ encoding specifications and mathematical theories.",
+ paper = "Stre98.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Strecker, Martin}
+\begin{chunk}{axiom.bib}
+@phdthesis{Stre99,
+ author = "Strecker, Martin",
+ title = {{Construction and Deduction in Type Theories}},
+ school = "Unversitat Ulm",
+ year = "1999",
+ abstract =
+ "This dissertation is concerned with interactive proof
+ construction and automated proof search in type theories, in
+ particular the Calculus of Constructions and its subsystems.
+
+ Type theories can be conceived as expressive logics which combine
+ a functiona programming language, strong typing and a higherorder
+ logic. They are therefore a suitable formalism for specification
+ and verification systems. However, due to their expressiveness, it
+ is difficult to provide appropriate deductive support for type
+ theories. This dissertation first examines general methods for
+ proof construction in type theories and then explores how these
+ methods can be refined to yield proof search procedures for
+ specialized fragments of the language.
+
+ Proof development in type theories usually requires the
+ construction of a term having a given type in a given context. For
+ the term to be constructed, a {\sl metavariable} is introduced
+ which is successively instantiated in the course of the proof. A
+ naive use of metavariables leads to problems, such as
+ noncommutativity of reduction and instantiation and the
+ generation of illtyped terms during reduction. For solving these
+ problems, a calculus with {\sl explicit substitutions} is
+ introduced, and it is shown that this calculus preserves
+ properties such as strong normalisation and decidability of typing.
+
+ In order to obtain a calculus appropriate for proof search, the
+ usual natural deduction presentation of type theories is replaced
+ by a {\sl sequent style presentation}. It is shown that the
+ calculus thus obtained is correct with respect to the original
+ calculus. Completeness (proved with a cutelimination argument) is
+ shown for all predicative fragments of the lambda cube.
+
+ This dissertation concludes with a discussion of some techniques
+ that make proof search practically applicable, such as unification
+ and pruning of the proof search space by exploiting
+ impermutabilities of the sequent calculus.",
+ paper = "Stre99.pdf"
+}
+
+\end{chunk}
+
\index{Storjohann, Arne}
\begin{chunk}{axiom.bib}
@inproceedings{Stor97,
@@ 19943,6 +23274,19 @@ when shown in factored form.
\end{chunk}
+\index{Strub, PierreYves}
+\begin{chunk}{axiom.bib}
+@inproceedings{Stru10,
+ author = "Strub, PierreYves",
+ title = {{Coq Modulo Theory}},
+ booktitle = "19th Annual Conf. on Computer Science Logic",
+ publisher = "Springer",
+ pages = "549643",
+ year = "2010"
+}
+
+\end{chunk}
+
\index{Stump, Aaron}
\begin{chunk}{axiom.bib}
@misc{Stum18,
@@ 19979,8 +23323,73 @@ when shown in factored form.
\end{chunk}
+\index{Swierstra, Wouter}
+\index{Baanen, Tim}
+\begin{chunk}{axiom.bib}
+@inproceedings{Swie19,
+ author = "Swierstra, Wouter and Baanen, Tim",
+ title = {{A Predicate Transformer Semantics for Effects}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "Reasoning about programs that use effects can be much harder than
+ reasoning about their pure counterparts. This paper presents a
+ predicate transformer semantics for a variety of effects,
+ including exceptions, state, nondeterminism, and general
+ recursion. The predicate transformer semantics gives rise to a
+ refinement relation that can be used to relate a program to its
+ specification, or even calculate effectful programs that are
+ correct by construction.",
+ paper = "Swie19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Swords, Sol}
+\index{Davis, Jared}
+\begin{chunk}{axiom.bib}
+@article{Swor11,
+ author = "Swords, Sol and Davis, Jared",
+ title = {{BitoBlasting ACL2 Theorems}},
+ journal = "EPTCS",
+ volume = "70",
+ pages = "84102",
+ year = "2011",
+ abstract =
+ "Interactive theorem proving requires a lot of human
+ guidance. Proving a property involes (1) figuring out why it
+ holds, the (2) coaxing the theorem prover into believing it. Both
+ steps can take a long time. We explain how to use GL, a framework
+ for proving finite ACL2 theorems with BDD and SATbased
+ reasoning. This approach makes it unnecessary to deeply understand
+ why a property is true, and automates the process of admitting it
+ as a theorem. We use GL at Centaur Technology to verify execution
+ units for x86 Integer, MMX, SSE, and floatingpoint arithmetic.",
+ paper = "Swor11.pdf"
+}
+
+\end{chunk}
+
\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Tait, William W.}
+\begin{chunk}{axiom.bib}
+@article{Tait86,
+ author = "Tait, William W.",
+ title = {{Truth and Proof: The Platonism of Mathematics}},
+ link = "\url{logic.harvard.edu/EFI_Tait_PlatonisminMathematics.pdf}",
+ journal = "Synthese",
+ volume = "69",
+ pages = "341370",
+ year = "1986",
+ paper = "Tait86.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Tan, Yong Kiam}
\index{Myreen, Magnus O.}
\index{Kumar, Ramana}
@@ 20200,6 +23609,118 @@ when shown in factored form.
\end{chunk}
+\index{Tennent, R.D.}
+\begin{chunk}{axiom.bib}
+@article{Tenn76,
+ author = "Tennent, R.D.",
+ title = {{The Denotational Semantics of Programming Languages}},
+ journal = "Communications of the ACM",
+ volume = "19",
+ number = "8",
+ pages = "437453",
+ year = "1976",
+ abstract =
+ "This paper is a tutorial introduction to the theory of
+ programming language semantics developed by D. Scott and
+ C. Strachey. The application of the theory to formal language
+ specification is demonstrated and other applications are
+ surveyed. The first language considered, LOOP, is very elementary
+ and its definition merely introduces the notion and methodology of
+ the approach. Then the semantic concepts of environments, stores,
+ and continuations are introduced to model classes of programming
+ language features and the underlying mathematical theory of
+ computation due to Scott is motivated and outlined. Finally, the
+ paper presents a formal definition of the language GEDANKEN.",
+ paper = "Tenn76.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Thatcher, James W.}
+\index{Wagner, Eric G.}
+\index{Wright, Jesse B.}
+\begin{chunk}{axiom.bib}
+@article{That80,
+ author = "Thatcher, James W. and Wagner, Eric G. and Wright, Jesse B.",
+ title = {{More on Advice on Structuring Compilers and Proving Them
+ Correct}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "165188",
+ year = "1980",
+ paper = "That80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tofte, Mads}
+\begin{chunk}{axiom.bib}
+@phdthesis{Toft88,
+ author = "Tofte, Mads",
+ title = {{Operational Semantics and Polymorphic Type Inference}},
+ school = "Univ. of Edinburgh",
+ year = "1988",
+ abstract =
+ "Three languages with polymorphic type disciplines are discussed,
+ namely the $\lambda$calculus with Milner's polymorphic type
+ discipline; a language with imperative features (polymorphic
+ references); and a skeletal module language with structures,
+ signatures and functors. In each of the two first cases we show
+ that the type inference system is consistent with an operational
+ dynamic semantics.
+
+ On the module level, polymorphic types correspond to
+ signatures. There is a notion of principal signatures. Socalled
+ signature checking is the module level equivalent of type
+ checking. In particular, there exists an algorithm which either
+ fails or produces a principal signature.",
+ paper = "Toft88.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tofte, Mads}
+\begin{chunk}{axiom.bib}
+@misc{Toft96,
+ author = "Tofte, Mads",
+ title = {{Essentials of Standard ML Modules}},
+ year = "1996",
+ abstract =
+ "The following notes give an overview of Standard ML Module
+ system.
+
+ Part 1 gives an introduction to ML Modules aimed at the reader who
+ is familiar with a functional programming language but has little
+ or no experience with ML programming.
+
+ Part 2 is a halfday practical intended to give the reader an
+ opportunity to modify a small, but nontrivial piece of software
+ using functors, signatures and structures.",
+ paper = "Toft96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{van Tonder, Rijnard}
+\index{Le Goues, Claire}
+\begin{chunk}{axiom.bib}
+@inproceedings{Tond19,
+ author = "van Tonder, Rijnard and Le Goues, Claire",
+ title = {{Lightweight MultiLanguage Syntax Transformation with
+ Parser Parser Combinators}},
+ booktitle = "PLDI",
+ publisher = "ACM",
+ isbn = "9781450367127",
+ year = "2019",
+ paper = "Tond19.pdf"
+}
+
+\end{chunk}
+
\index{Torlak, Emina}
\begin{chunk}{axiom.bib}
@misc{Torl17,
@@ 20243,6 +23764,113 @@ when shown in factored form.
\end{chunk}
+\index{Troelstra, A.S.}
+\index{van Dalen, Dirk}
+\begin{chunk}{axiom.bib}
+@book{Troe88,
+ author = "Troelstra, A.S. and van Dalen, Dirk",
+ title = {{Constructivism in Mathematics, Vol 2}},
+ publisher = "NorthHolland",
+ year = "1988"
+}
+
+\end{chunk}
+
+\index{Tsuji, Kuniaki}
+\begin{chunk}{axiom.bib}
+@article{Tsuj09,
+ author = "Tsuji, Kuniaki",
+ journal = "Journal of Symbolic Computation",
+ title = {{An Improved EZGCD Algorithm for Multivariate Polynomials}},
+ volume = "44",
+ number = "1",
+ year = "2009",
+ pages = "99110",
+ abstract =
+ "The EZGCD algorithm often has a badzero problem, which has a
+ remarkable influence on polynomials with higherdegree terms. In
+ this paper, by applying special ideals, the EZGCD algorithm for
+ sparse polynomials is improved. This improved algorithm greatly
+ reduces computational complexity because of the sparseness of
+ polynomials. The author expects that the use of these ideals will
+ be useful as a resolution for obtaining a GCD of sparse
+ multivariate polynomials with higherdegree terms.",
+ paper = "Tsuj09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Turing, A. M.}
+\begin{chunk}{axiom.bib}
+@misc{Turi36,
+ author = "Turing, A. M.",
+ title = {{On Computable Numbers, with an Application to the
+ Entscheidungsproblem}},
+ year = "1936",
+ link =
+ "\url{https://www.cs.virginia.edu/~robins/Turing_Paper_1936.pdf}",
+ abstract =
+ "The ``computeble'' numbers may be described briefly as the real
+ numbers whose expressions as a decimal are calculable by finite
+ means. Although the subjec of this paper is ostensibly the
+ computable {\sl numbers}, it is almost equally easy to define and
+ investigate computable functions of an integral variable or a real
+ or computable variable, computable predicates, and so forth. The
+ fundamental problems involed are, however, the same in each case,
+ and I have chosen the computable numbers for explicit treatment as
+ involving the least cumbrous technique. I hope shortly to give an
+ account of the relations of the computable numbers, functions, and
+ so forth to one another. This will include a development of the
+ theory of functions of a real variable expressed in terms of
+ computable numbers. According to my definition, a number is
+ computable if its decimal can be written down by a machine.",
+ paper = "Turi36.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Turing, A. M.}
+\begin{chunk}{axiom.bib}
+@misc{Turi47,
+ author = "Turing, A. M.",
+ title = {{Lecture to the London Mathematical Society on 20 February
+ 1947}},
+ year = "1947",
+ link = "\url{http://www.vordenker.de/downloads/turingvorlesung.pdf}",
+ paper = "Turi47.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Turing, A. M.}
+\begin{chunk}{axiom.bib}
+@misc{Turi48,
+ author = "Turing, A. M.",
+ title = {{Intelligent Machinery}},
+ year = "1948",
+ link = "\url{https://weightagnostic.github.io/papers/turning1948.pdf}",
+ abstract =
+ "The possible ways in which machinery might be made to show
+ intelligent behaviour are discussed. The analogy with the human
+ brain is used as a guiding principle. It is pointed out that the
+ potentialities of the human intelligence can only be realized if
+ suitable education is provided. The investigation mainly centres
+ round an analogous teaching process applied to machines. The idea
+ of an unorganized machine is defined, and it is suggested that the
+ infant human cortex is of this nature. Simple examples of such
+ machines are given, and their education by means of rewards and
+ punishments is discussed. In one case the education process is
+ carried through until the organization is similar to that of an
+ ACE.",
+ paper = "Turi48.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\subsection{U} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 20263,6 +23891,20 @@ when shown in factored form.
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
+@misc{Wadl00,
+ author = "Wadler, Philip",
+ title = {{Proofs are Programs: 19th Century Logic and 21st Century
+ Computing}},
+ link = "\url{https://homepages.inf.ed.ac.uk/wadler/papers/frege/frege.pdf}",
+ year = "2000",
+ paper = "Wadl00.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Wadler, Philip}
+\begin{chunk}{axiom.bib}
@misc{Wadl03,
author = "Wadler, Philip",
title = {{The GirardReynolds Isomorphism}},
@@ 20401,6 +24043,42 @@ when shown in factored form.
\end{chunk}
+\index{Wang, Paul S.}
+\begin{chunk}{axiom.bib}
+@article{Wang80,
+ author = "Wang, Paul S.",
+ title = {{The EEZGCD Algorithm}},
+ journal = "SIGSAM Bulletin",
+ volume = "14",
+ number = "2",
+ pages = "5060",
+ year = "1980",
+ abstract =
+ "An enhanced gcd algorithm based on the EXGCD algorithm is
+ described. Implementational aspects are emphasized. It is
+ generally faster and is particularly suited for computing gcd of
+ sparse multivariate polynomials. The EEZGCD algorithm is
+ characterized by the following features:
+ \begin{enumerate}
+ \item avoiding unlucky evaluations,
+ \item predetermining the correct leading coefficient of the
+ desired gcd,
+ \item using the sparsity of the given polynomials to determine
+ terms in the gcd and
+ \item direct methods for dealing with the ``common divisor problem.''
+ \end{enumerate}
+ The common divisor problem occurs when the gcd has a different
+ common divisor with each of the cofactors. The EZGCD algorithm
+ does a squarefree decomposition in this case. It can be avoided
+ resulting in increased speed. One method is to use parallel padic
+ construction of more than two factors. Machine examples with
+ timing data are included.",
+ paper = "Wang80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Wang, Ke}
\begin{chunk}{axiom.bib}
@misc{Wang19,
@@ 20533,6 +24211,72 @@ when shown in factored form.
\end{chunk}
+\index{Weirich, Stephanie}
+\index{Choudhury, Pritam}
+\index{Voizard, Antoine}
+\index{Eisenberg, Richard A.}
+\begin{chunk}{axiom.bib}
+@misc{Weir19,
+ author = "Weirich, Stephanie and Choudhury, Pritam and Voizard,
+ Antoine and Eisenberg, Richard A.",
+ title = {{A Role for Dependent Types in Haskell (Extended Version)}},
+ link = "\url{https://arxiv.org/pdf/1905.13706.pdf}",
+ year = "2019",
+ abstract =
+ "Modern Haskell supports zerocost coercions, a mechanism where
+ types that share the same runtime representation may be freely
+ converted between. To make sure such conversions are safe and
+ desirable, this feature relies on a mechanism of roles to prohibit
+ invalid coercions. In this work, we show how to integrate roles
+ with dependent type systems and prove, using the Coq proof
+ assistant, that the resulting system is sound. We have designed
+ this work as a foundation for the addition of dependent types to
+ the Glasgow Haskell Compiler, but we also expect that it will be
+ of use to designers of other dependentlytyped languages who might
+ want to adopt Haskell's safe coercion feature.",
+ paper = "Weir19.pdf"
+}
+
+\end{chunk}
+
+\index{Werner, Benjamin}
+\begin{chunk}{axiom.bib}
+@inbook{Wern97,
+ author = "Werner, Benjamin",
+ title = {{Sets in Types, Types in Sets}},
+ booktitle = "Theoretical Aspects of Computer Software",
+ publisher = "Springer",
+ chapter = "unknown",
+ pages = "530546",
+ year = "1997"
+}
+
+\end{chunk}
+
+\index{Wernhard, Christoph}
+\begin{chunk}{axiom.bib}
+@misc{Wern19,
+ author = "Wernhard, Christoph",
+ title = {{PIE  Proving, Interpolating and Eliminating on the Basis
+ of FirstOrder Logic}},
+ year = "2019",
+ link = "\url{https://arxiv.org/pdf/1908.11137.pdf}",
+ abstract =
+ "PIE is a Prologembedded environment for automated reasoning on
+ the basis of firstorder logic. It includes a versatile formula
+ macro system and supports the creation of documents that
+ intersperse macro definitions, reasoner invocations and LaTeX
+ formatted natural language text. Invocation of various reasoners
+ is supported. External provers as well as subsystems of PIE,
+ which include preprocessors, a Prologbased firstorder prover,
+ methods for Craig interpolation and methods for secondorder
+ quantifier elimination.",
+ paper = "Wern19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Wied03b,
@@ 20610,9 +24354,117 @@ when shown in factored form.
\end{chunk}
+\index{Wright, Andrew K.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wrig95,
+ author = "Wright, Andrew K.",
+ title = {{Simple Imperative Polymorphism}},
+ booktitle = "LISP and Symbolic Computation",
+ publisher = "Kluwer Academic",
+ pages = "242256",
+ year = "1995",
+ abstract =
+ "This paper describes a simple extension of the HindleyMilner
+ polymorphic type discipline to callbyvalue languages that
+ incorporate imperative features like references, exceptions, and
+ continuations. This extension sacrifices the ability to type every
+ purely functional expression that is typable in the HindleyMilner
+ system. In return, it assigns the same type to functional and
+ imperative implementations of the same abstraction. Hence with a
+ module system that separates specifications from implementations,
+ imperative features can be freely used to implement polymorphic
+ specifications. A study of a number of ML programs shows that the
+ inability to type all HindleyMilner typable expressions seldom
+ impacts realistic programs. Furthermore, most programs that are
+ rendered untypable by the new system can be easily repaired.",
+ paper = "Wrig95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{X} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Xi, Hongwei}
+\begin{chunk}{axiom.bib}
+@book{Xixx19,
+ author = "Xi, Hongwei",
+ title = {{Introduction to Programming in ATS}},
+ publisher = "ATS Trustful Software, Inc",
+ year = "2019",
+ abstract =
+ "As a programming language, ATS is both syntaxrich and
+ featurerich. This book introduces the reader to some core
+ features of ATS, including basic functional programming, simple
+ types, (recursively defined) datatypes, polymorphic types,
+ dependent types, linear types, theorem proving, programming with
+ theorem proving (PwTP), and templatebased programming. Although
+ the reader is not assumed to be familiar with programming in
+ genera, the book is likely to be rather dense for someone without
+ considerable programming experience",
+ paper = "X1xx19.pdf"
+}
+
+\end{chunk}
+
\subsection{Y} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Yallop, Jeremy}
+\index{White, Leo}
+\begin{chunk}{axiom.bib}
+@inproceedings{Yall19,
+ author = "Yallop, Jeremy and White, Leo",
+ title = {{Lambda: The Ultimate Sublanguage (Experience Report}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "We describe our experience teaching an advanced typed functional
+ programming course based around the use of Sysmte $F_\omega$ as a
+ programming language.",
+ paper = "Yall19.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Yedidia, Adam}
+\index{Aaronson, Scott}
+\begin{chunk}{axiom.bib}
+@article{Yedi16,
+ author = "Yedidia, Adam and Aaronson, Scott",
+ title = {{A Relatively Small Turing Machine Whose Behavior Is
+ Independent of Set Theory}},
+ journal = "Complex Systems",
+ volume = "25",
+ number = "4",
+ pages = "297327",
+ link = "\url{http://www.complexsystems.com/pdf/2555.pdf}",
+ year = "2016",
+ abstract =
+ "Since the definition of the Busy Beaver function in Rado in 1962,
+ an interesting open question has been what the smallest value of
+ $n$ for which $BB(n)$ is independent of ZFC set theory. Is this
+ $n$ approximately 10, or closer to 1,000,000, or is it even
+ larger? In this paper, we show that it is at most 7,918 by
+ presenting an explicit description of a 7,918state Turing machine
+ $Z$ with 1 tape and a 2symbol alphabet that cannot be proved to
+ run forever in ZFC (even though it presumably does), assuming ZFC
+ is consistent. The machine is based on work of Harvey Friedman on
+ independent statements involving orderinvariant graphs. In doing
+ so, we give the first known upper bound on the highest provable
+ Busy Beaver number in ZFC. We also present a 4,888state Turing
+ machine $G$ that halts if and only if there is a counterexample of
+ Goldbach's conjecure, an at 5,372state Turing machine $R$ that
+ halts if and only if the Riemann hypothesis is false. To create
+ $G$, $R$, and $Z$, we develop and use a higherlevel language,
+ Laconic, which is much more convenient than direct state
+ manipulation.",
+ paper = "Yedi16.pdf"
+}
+
+\end{chunk}
+
\index{Yoshida, Masaaki}
\begin{chunk}{axiom.bib}
@book{Yosh97,
@@ 20627,6 +24479,64 @@ when shown in factored form.
\subsection{Z} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Zavialov, Vladislav}
+\begin{chunk}{axiom.bib}
+@misc{Zavi18,
+ author = "Zavialov, Vladislav",
+ title = {{Why Dependent Haskell is the Future of Software Development}},
+ link = "\url{https://serokell.io/blog/whydependenthaskell}",
+ year = "2018"
+}
+
+\end{chunk}
+
+\index{Zhao, Jinxu}
+\index{Oliveira, Bruno C.D.S}
+\index{Schrijvers, Tom}
+\begin{chunk}{axiom.bib}
+@inproceedings{Zhao19,
+ author = "Zhao, Jinxu and Oliveira, Bruno C.D.S and
+ Schrijvers, Tom",
+ title = {{A Mechanical Formalization of HigherRanked Polymorphic
+ Type Interence}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
+ abstract =
+ "Modern functional programming languages, such as Haskell or
+ OCaml, use sophisticated forms of type inference. While an
+ important topic in the Programming Languages research, there is
+ little work on the mechanization of the metatheory of type
+ inference in theorem provers. In particular we are unaware of any
+ complete formalization of the type inference algorithms that are
+ the backbone of modern functional languages.
+
+ This paper presents the first full mechanical formalization of the
+ metatheory for higherranked polymorphic type inference. The
+ system that we formalize is the bidirectional type system by
+ Dunfield and Krishnaswami (DK). The DK type system has two
+ variants (a declarative and an algorithmic one) that have been
+ manually proven sound, complete and decidable. We present a
+ mechanical formalization in the Abella theorem provers of DK's
+ declarative type system with a novel algorithmic system. We have a
+ few reasons to use a new algorithm. Firstly, our new algorithm
+ employs worklist judgments, which precisely capture the scope of
+ variables and simplify the formalization of scoping in a theorem
+ prover. Secondly, while DKs original formalizations comes with
+ very wellwritten manual proofs, there are several details missing
+ and some incorrect proofs, which complicate the task of writing a
+ mechanized proof. Despite the use of a different algorithm we
+ prove the same results as DK, although with significantly
+ different proofs and proof techniques. Since such type inference
+ algorithms are quite subtle and have a complex metatheory,
+ mechanical formalizations are an important advance in type
+ inference research.",
+ paper = "Zhao19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\section{Proving Axiom Correct  Spring 2018}
\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 22241,7 +26151,7 @@ when shown in factored form.
proofchecking mathematical texts written in controlled natural
language, and has largely been implemented in this system.",
paper = "Cram14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 22895,7 +26805,7 @@ when shown in factored form.
link = "\url{www.ams.org/notices/201410/rnotip1249.pdf}",
pages = "12491252",
paper = "Dura14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 23394,7 +27304,7 @@ when shown in factored form.
Ada programs. We demonstrate the benefits of Why3 and WhyML on
nontrivial examples of program verification.",
paper = "Fill13.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 23954,7 +27864,7 @@ when shown in factored form.
similarities between them will be studied, showing the relationship
between these type theories and other fields of logic.",
paper = "Gual14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 25135,33 +29045,6 @@ when shown in factored form.
\index{McBride, Conor}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@article{Lohx01,
 author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
 title = {{A Tutorial Implementation of a Dependently Typed Lambda
 Calculus}},
 journal = "Fundamenta Informaticae",
 volume = "XXI",
 year = "2001",
 pages = "10011031",
 abstract =
 "We present the type rules for a dependently typed core calculus
 together with a straightforward implementation in Haskell. We
 explicitly highlight the changes necessary to shift from a
 simplytyped lambda calculus to the dependently typed lambda
 calculus. We also describe how to extend our core language with data
 types and write several small example programs. The article is
 accompanied by an executable interpreter and example code that allows
 immediate experimentation with the system we describe.",
 paper = "Lohx01.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Loh, Andres}
\index{McBride, Conor}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@misc{Lohx18,
author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
title = {{Simply Easy! An Implementation of a Dependently Typed
@@ 25798,7 +29681,7 @@ when shown in factored form.
this toplevel theorem is the most comprehensive formal evidence of a
theorem prover’s soundness to date.",
paper = "Myre14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 25878,7 +29761,7 @@ when shown in factored form.
i.e. produces code ondemand. All proofs have been performed
inside the HOL4 theorem prover.",
paper = "Myre10.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 25914,7 +29797,7 @@ when shown in factored form.
approach to machinecode verification scales to nontrivial
applications.",
paper = "Myre11.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 25943,7 +29826,7 @@ when shown in factored form.
to understand the operational semantics that gives meanings to the
deep embeddings.",
paper = "Myre12.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 26366,6 +30249,18 @@ when shown in factored form.
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
+@misc{Pfen04a,
+ author = "Pfenning, Frank",
+ title = {{Lecture Notes on Bidirectional Type Checking}},
+ year = "2004",
+ paper = "Pfen04a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
@phdthesis{Pfen87,
author = "Pfenning, Frank",
title = {{Proof Transformations in HigherOrder Logic}},
@@ 27407,7 +31302,8 @@ when shown in factored form.
common denominator through complete multivariate partial fractions,
including a dense subset of all intermediate forms.
\end{itemize}",
 paper = "Stou11a.pdf"
+ paper = "Stou11a.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 27460,6 +31356,51 @@ when shown in factored form.
\index{Stoutemyer, David R.}
\begin{chunk}{axiom.bib}
+@misc{Stou12a,
+ author = "Stoutemyer, David R.",
+ title = {{Can the Eureqa Symbolic Regression Program, Computer
+ Algebra and Numerical Analysis help each other?}},
+ link = "\url{https://arxiv.org/pdf/1203.1023.pdf}",
+ year = "2012",
+ abstract =
+ "The free Eureqa program has recently received extensive press
+ praise. A representitive quote is
+ \begin{quote}
+ There are very clever 'thinking machines' in existence today,
+ such as Watson, the IBM computer that conquered {\sl Jeopardy!}
+ last year. But next to Eureqa, Watson is merely a glorified
+ search engine.
+ \end{quote}
+
+ The program is designed to work with noisy experimental data,
+ searching for then returning a set of result expressions that
+ attempt to optimally trade off conciseness with accuracy.
+
+ However, if the data is generated from a formula for which there
+ exists more concise equivalent formulas, sometimes some of the
+ candidate Eureqa expressions are one or more of those more concise
+ equivalents expressions. If not, perhaps one or more of the
+ returned Eureqa expressions might be a sufficiently accurate
+ approximation that is more concise than the given
+ formula. Moreover, when there is no known closed form expression,
+ the data points can be generated by numerical methods, enabling
+ Eureqa to find expressions that concisely fit those data points
+ with sufficient accuracy. In contrast to typical regression
+ software, the user does not have to explicitly or implicitly
+ provide a specific expression or class of expressions containing
+ unknown constants for the software to determine.
+
+ Is Eureqa useful enough in these regards to provide an additional
+ tool for experimental mathematics, computer algebra users and
+ numerical analysts? Yes, if used carefully. Can computer algebra
+ and numerical methods help Eureqa? Definitely.",
+ paper = "Stou12a.pdf"
+}
+
+\end{chunk}
+
+\index{Stoutemyer, David R.}
+\begin{chunk}{axiom.bib}
@article{Stou13,
author = "Stoutemyer, David R.",
title = {{A Computer Algebra User Interface Manifesto}},
@@ 27500,7 +31441,7 @@ when shown in factored form.
modes in a unified window.
\end{itemize}",
paper = "Stou13.pdf",
 kryeotfd = "printed"
+ kryeotfd = "printed, DONE"
}p
\end{chunk}
@@ 28273,7 +32214,7 @@ when shown in factored form.
rigorous proof, nor absolute certainty, and 'truth' has many shades and
levels.",
paper = "Zeil10.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 28889,7 +32830,7 @@ when shown in factored form.
Components project, which is working towards a verification of the
FeitThompson theorem.",
paper = "Avig12a.pdf",
 keywords = "coercion"
+ keywords = "coercion, printed, DONE"
}
\end{chunk}
@@ 28939,7 +32880,6 @@ when shown in factored form.
@misc{Avig17c,
author = "Avigad, Jeremy and Holzl, Johannes and Serafin, Luke",
title = {{A Formally Verified Proof of the Central Limit Theorem}},
 link = "\url{}",
year = "2017",
abstract =
"We describe a proof of the Central Limit Theorem that has been
@@ 30749,7 +34689,7 @@ when shown in factored form.
with code from a prototype of MathScheme 2.0, the system I am
developing with Bill Farmer and our research group.",
paper = "Care10.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 30805,7 +34745,7 @@ when shown in factored form.
and reflect, as much as possible, the mathematical structure present
in the objects which populate the library.",
paper = "Care11a.pdf",
 keywords = "axiomref, printed"
+ keywords = "axiomref, printed, DONE"
}
\end{chunk}
@@ 30847,7 +34787,7 @@ when shown in factored form.
year = "2011",
link = "\url{http://imps.mcmaster.ca/doc/cicm2011projdesc.pdf}",
paper = "Care11c.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 30868,7 +34808,7 @@ when shown in factored form.
fibered categories are the ideal theoretical tools for this
purpose.",
paper = "Care12.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 31128,9 +35068,29 @@ when shown in factored form.
\end{chunk}
\index{Coquand, Thierry}
\index{Huet, G\'erard}
\begin{chunk}{axiom.bib}
@techreport{Coqu86,
+ author = "Coquand, Thierry",
+ title = {{An Analysis of Girard's Paradox}},
+ year = "1986",
+ institution = "INRIA Centre de Rocquencourt",
+ number = "531",
+ abstract =
+ "We study the consistency of a few formal systems specially some
+ extensions of Church's calculus and the construction system. We
+ show that Church's calculus is not compatible with the notion of
+ secondorder type. We apply this result for showing that the
+ calculus of construction wit four levels is inconsistent. We
+ suggest finally some consistent extensions of these two calculi.",
+ paper = "Coqu86.pdf"
+}
+
+\end{chunk}
+
+\index{Coquand, Thierry}
+\index{Huet, G\'erard}
+\begin{chunk}{axiom.bib}
+@techreport{Coqu86a,
author = {Coquand, Thierry and Huet, G\'erard},
title = {{The Calculus of Constructions}},
year = "1986",
@@ 31154,7 +35114,7 @@ when shown in factored form.
theory of a Calculus of Constructions, and prove a strong
normalization theorem showing that all computations terminate.
Finally, we suggest various extensions to stronger calculi.",
 paper = "Coqu86.pdf"
+ paper = "Coqu86a.pdf"
}
\end{chunk}
@@ 31224,6 +35184,21 @@ when shown in factored form.
\end{chunk}
\index{Coquand, Thierry}
+\begin{chunk}{axiom.bib}
+@misc{Coqu96a,
+ author = "Coquand, Thierry",
+ title = {{An Algorithm for TypeChecking Dependent Types}},
+ year = "1996",
+ abstract =
+ "We present a simple typechecker for a language with dependent
+ types and let expressions, with a simple proof of correctness.",
+ paper = "Coqu96a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Coquand, Thierry}
\index{Huet, G\'erard}
\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@@ 33402,7 +37377,7 @@ when shown in factored form.
OpenMath terms produced by a CAS in the calculus of Coq, as well as
viewing pure Coq terms in a simpler type system that is behind OpenMath.",
paper = "Kome11.pdf",
 keywords = "CASProof, printed"
+ keywords = "CASProof, printed, DONE"
}
\end{chunk}
@@ 33704,7 +37679,7 @@ when shown in factored form.
structuring, is simple and practical. The author's twenty years of
experience writing such proofs is discussed.",
paper = "Lamp14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 34541,39 +38516,6 @@ when shown in factored form.
\index{Kong, Soonho}
\index{Roux, Cody}
\begin{chunk}{axiom.bib}
@misc{Mour15,
 author = "de Moura, Leonardo and Avigad, Jeremy and Kong, Soonho and
 Roux, Cody",
 title = {{Elaboration in Dependent Type Theory}},
 year = "2015",
 comment = "arXiv:1505.04324v2",
 abstract =
 "To be usable in practice, interactive theorem provers need to provide
 convenient and efficient means of writing expressions, definitions,
 and proofs. This involves inferring information that is often left
 implicit in an ordinary mathematical text, and resolving ambiguities
 in mathematical expressions. We refer to the process of passing from a
 quasiformal and partiallyspecified expression to a completely
 precise formal one as {\sl elaboration}. We describe an elaboration
 algorithms for dependent type theory that has been implemented in the
 Lean theorem prover. Lean's elaborator supports higherorder
 unification, type class inference, ad hoc overloading, insertion of
 coercions, the use of tactics, and the computational reduction of
 terms. The interactions between these components are subtle and
 complex, and the elaboration algorithm has been carefully designed to
 balance efficiency and usability. We describe the central design
 goals, and the means by which they are achieved.",
 paper = "Mour15.pdf",
 keywords = "coercion, printed"
}

\end{chunk}

\index{de Moura, Leonardo}
\index{Avigad, Jeremy}
\index{Kong, Soonho}
\index{Roux, Cody}
\begin{chunk}{axiom.bib}
@misc{Mour16,
author = "de Moura, Leonardo and Avigad, Jeremy and Kong, Soonho and
Roux, Cody",
@@ 34638,7 +38580,7 @@ when shown in factored form.
year = "2013",
link = "\url{http://ceurws.org/Vol1010/paper09.pdf}",
paper = "Neup13.pdf",
 keywords = "CASProof, printed"
+ keywords = "CASProof, printed, DONE"
}
\end{chunk}
@@ 35167,6 +39109,33 @@ when shown in factored form.
\end{chunk}
\index{Pierce, Benjamin C.}
+\index{Turner, David N.}
+\begin{chunk}{axiom.bib}
+@misc{Pier98,
+ author = "Pierce, Benjamin C. and Turner, David N.",
+ title = {{Local Type Inference}},
+ year = "1998",
+ link =
+ "\url{http://www.cis.upenn.edu/~bcpierce/papers/ltitoplas.pdf}",
+ abstract =
+ "We study two partial type inference methods for a language
+ combining subtyping and impredicative polymorphism. Both methods
+ are local in the sense that missing annotations are recovered
+ using only information from adjacent nodes in the syntax tree,
+ without longdistance constraints such as unification
+ variables. One method infers type arguments in polymorphic
+ applications using a local constraint solver. The other infers
+ annotations on bound variables in function abstractions by
+ propagating type constraints downward from enclosing application
+ nodes. We motivate our design choices by a statistical analysis of
+ the uses of type inference in a sizable body of existing ML code.",
+ paper = "Pier98.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pierce, Benjamin C.}
\begin{chunk}{axiom.bib}
@book{Pier00,
author = "Pierce, Benjamin C.",
@@ 36197,7 +40166,7 @@ when shown in factored form.
new implementation, in particular the new user interface of the
system.",
paper = "Wind14.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 38730,19 +42699,23 @@ College Mathematics Journal Vol 25 No 4 (1994) pp295308
\end{chunk}
\index{Moses, Joel}
\begin{chunk}{ignore}
\bibitem[Moses 76]{Mos76} Moses, Joel
+\begin{chunk}{axiom.bib}
+@inproceedings{Mose76,
+ author = "Moses, Joel",
title = {{An introduction to the Risch Integration Algorithm}},
ACM Proc. 1976 annual conference pp425428
 ref = "00048",
 abstract = "
 Risch's decision procedure for determining the integrability in closed
+ booktitle = "Proc. 1976 annual conference",
+ publisher = "ACM",
+ pages = "425428",
+ year = "1976",
+ abstract =
+ "Risch's decision procedure for determining the integrability in closed
form of the elementary functions of the calculus is presented via
examples. The exponential and logarithmic cases of the algorithsm had
been implemented for the MACSYMA system several years ago. The
implementation of the algebraic case of the algorithm is the subject
of current research.",
 paper = "Mos76.pdf"
+ paper = "Mose76.pdf"
+}
\end{chunk}
@@ 42619,78 +46592,6 @@ Proc ISSAC 97 pp172175 (1997)
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Baker, Henry G.}
\begin{chunk}{axiom.bib}
@misc{Bake90,
 author = "Baker, Henry G.",
 title = {{The Nimble Type Inferencer for Common Lisp84}},
 link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
 year = "1990",
 abstract =
 "We describe a framework and an algorithm for doing type inference
 analysis on programs written in full Common Lisp84 (Common Lisp
 without the CLOS objectoriented extensions). The objective of type
 inference is to determine tight lattice upper bounds on the range of
 runtime data types for Common Lisp program variables and
 temporaries. Depending upon the lattice used, type inference can also
 provide range analysis information for numeric variables. This lattice
 upper bound information can be used by an optimizing compiler to
 choose more restrictive, and hence more efficient, representations for
 these program variables. Our analysis also produces tighter control
 flow information, which can be used to eliminate redundant tests which
 result in dead code. The overall goal of type inference is to
 mechanically extract from Common Lisp programs the same degree of
 representation information that is usually provided by the programmer
 in traditional stronglytyped languages. In this way, we can provide
 some classes of Common Lisp programs execution time efficiency
 expected only for more stronglytyped compiled languages.

 The Nimble type inference system follows the traditional
 lattice/algebraic data flow techniques [Kaplan80], rather than the
 logical/theoremproving unification techniques of ML [Milner78]. It
 can handle polymorphic variables and functions in a natural way, and
 provides for ``casebased'' analysis that is quite similar to that used
 intuitively by programmers. Additionally, this inference system can
 deduce the termination of some simple loops, thus providing
 surprisingly tight upper lattice bounds for many loop variables.

 By using a higher resolution lattice, more precise typing of primitive
 functions, polymorphic types and case analysis, the Nimble type
 inference algorithm can often produce sharper bounds than
 unificationbased type inference techniques. At the present time,
 however, our treatment of higherorder data structures and functions
 is not as elegant as that of the unification techniques.",
 paper = "Bake90.html",
 keywords = "printed"
}

\end{chunk}

\index{Baker, Henry G.}
\begin{chunk}{axiom.bib}
@article{Bake91,
 author = "Baker, Henry G.",
 title = {{Pragmatic Parsing in Common Lisp}},
 journal = "ACM Lisp Pointers",
 volume = "IV",
 number = "2",
 pages = "315",
 year = "1991",
 abstract =
 "We review META, a classic technique for building recursive descent
 parsers, that is both simple and efficient. While META does not
 handle all possible regular or contextfree grammars, it handles a
 surprisingly large fraction of the grammars encountered by Lisp
 programmers. We show how META can be used to parse streams, strings
 and lists—including Common Lisp's hairy lambda expression parameter
 lists. Finally, we compare the execution time of this parsing method
 to the builtin methods of Common Lisp.",
 paper = "Bake91.pdf",
 keywords = "printed, DONE"
}

\end{chunk}

\index{Basu, Saugata}
\index{Pollack, Richard}
\index{Roy, MarieFrancoise}
@@ 53505,7 +57406,7 @@ ISBN 1581130732 LCCN QA76.95.I57 1999
concepts, in structured generic programming as practiced in
computational mathematical systems.",
paper = "Reis12.pdf",
 keywords = "axiomref, printed"
+ keywords = "axiomref, printed, DONE"
}
\end{chunk}
@@ 57900,7 +61801,7 @@ SIGPLAN Notices, New York: Association for Computing Machiner, Nov 1981
Adalike facility for defining types and packages (those of new
SCRATCHPAD are dynamically constructable, however). One language is
used for both interactive and system programming language use,
 although several freedomes such as abbreviation and optional
+ although several freedoms such as abbreviation and optional
typedeclarations allowed at toplevel are not permitted in system
code. The interactive language (levels 18) is a blend of original
SCRATCHPAD [GRJY75], some proposed extensions [JENK74], work by Loos
@@ 67474,6 +71375,20 @@ National Physical Laboratory. (1982)
\end{chunk}
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Avig17d,
+ author = "Avigad, Jeremy and de Moura, Leonardo and Ebner, Gabriel
+ and Ullrich, Sebastian",
+ title = {{An Introduction to Lean}},
+ year = "2017",
+ link = "\url{https://leanprover.github.io/introduction_to_lean/introduction_to_lean.pdf}",
+ paper = "Avig17d.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Bailey, Anthony}
@@ 68126,7 +72041,8 @@ J. Symbolic Computation (1993) 16, 131145
syntax with implicit arguments and type classes into a fully explicit
type theory. Furthermore, I show how this method facilitates the
implementation of new highlevel language constructs.",
 keywords = "printed"
+ paper = "Brad13.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 73568,7 +77484,8 @@ Elsevier. (1967)
intuitionistic algebra, concentrated on issues raised by considering
algebraic structures over the real numbers, and so developed a
handmaiden'of analysis rather than a theory of discrete algebraic
 structures."
+ structures.",
+ paper = "Mine88.pdf"
}
\end{chunk}
diff git a/changelog b/changelog
index 06db22c..6ac2074 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,13 @@
+20191011 tpd src/axiomwebsite/patches.html 20191011.02.tpd.patch
+20191011 tpd books/bookvolbib add references
+20191011 tpd src/axiomwebsite/patches.html 20191011.01.tpd.patch
+20191011 tpd books/bookvol14.pamphlet add FFT details
+20191011 tpd src/axiomwebsite/index1.html fix links
+20191011 tpd readme add quote
+20191011 tpd add books/ps/bluebayou.png additional file format
+20191011 tpd add books/ps/bluebayou.eps additional file format
+20191011 tpd add books/ps/axiomfront.png additional file format
+20191011 tpd add books/ps/axiomfront.eps additional file format
20190531 tpd src/axiomwebsite/patches.html 20190531.01.tpd.patch
20190531 tpd books/bookvolbib add MODLISP Davenport references
20190531 tpd books/bookvol4 add MODLISP Davenport chapter
diff git a/patch b/patch
index 80e4dc6..9b07188 100644
 a/patch
+++ b/patch
@@ 1,4170 +1,3958 @@
books/bookvolbib added references
Goal: Proving Axiom Sane

\index{Abadi, Martin}
\index{Cardelli, Luca}
\index{Pierce, Benjamin}
\index{Plotkin, Gordon}
\begin{chunk}{axiom.bib}
@inproceedings{Adad89,
 author = "Abadi, Martin and Cardelli, Luca and Pierce, Benjamin
 and Plotkin, Gordon",
 title = {{Dynamic Typing in a Statically Typed Language}},
 booktitle = "16th Principles of Programming Languages",
 publisher = "ACM",
 pages = "213227",
 year = "1989"
 abstract =
 "Statically typed programming languages allow earlier error
 checking, better enforcement of disciplined programming styles,
 and generation of more efficient object code than languages where
 all type consistency checks are performed at run time. However,
 even in statically typed languages, there is often the need to
 deal with data whose type cannot be determined at compile time. To
 handle such situations safely, we propose to add a type Dynamic
 whose values are pairs of a value $v$ and a type tag T where $v$
 has the type denoted by T. Instances of Dynamic are built with an
 explicit tagging construct and inspected with a type safe typecase
 construct.

 This paper explores the syntax, operational semantics, and
 denotational semantics of a simple language including the type
 Dynamic. We give examples of how dynamically typed values can be
 used in programming. Then we discuss an operational semantics for
 our language and obtain a soundness theorem. We present two
 formulations of the denotational semantics of this language and
 relate them to the operational semantics. Finally, we consider the
 implications of polymorphism and some implementation issues.",
 paper = "Abad89.pdf",
 keywords = "printed"
}

\end{chunk}
\index{AitKaci, Hassan}
\begin{chunk}{axiom.bib}
@book{Aitk99,
 author = "AitKaci, Hassan",
 title = {{Warren's Abstract Machine: A Tutorial Reconstruction}},
 publisher = "MIT Press",
 isbn = "0262510588",
 year = "1999",
 link = "\url{http://wambook.sourceforge.net/wambook.pdf}",
 paper = "Aitk99.pdf",
 keywords = "printed"
+\index{Benoit, Alexandre}
+\index{Chyzak, Frederic}
+\index{Darrasse, Alexis}
+\index{Gregoire, Thomas}
+\index{Koutschan, Christoph}
+\index{Mezzarobba, Marc}
+\index{Salvy Bruno}
+\begin{chunk}{axiom.bib}
+@misc{DDMF19,
+ author = "Benoit, Alexandre and Chyzak, Frederic and Darrasse, Alexis
+ and Gregoire, Thomas and Koutschan, Christoph and
+ Mezzarobba, Marc and Salvy Bruno",
+ title = {{Digital Dictionary of Mathematical Functions}}
+ year = "2019",
+ link = "\url{ddfm.msrinria.inria.fr/1.9.1/ddmf}",
+ abstract =
+ "Interactive site on Mathematical Functions with properties,
+ truncated expansions, numerical evaluations, plots, and more. The
+ functions currently presented are elementary functions with
+ special functions of a single variable. More functions  special
+ functions with parameters, orthogonal polynomials, sequences 
+ will be added with the project advances.",
+ paper = "DDMF19.pdf",
+ keywords = "axiomref"
}
\end{chunk}
\index{Bagnara, Roberto}
\index{Bagnara, Abramo}
\index{Biselli, Fabio}
\index{Chiari, Michele}
\index{Gori, Roberta}
+\index{Stoutemyer, David R.}
\begin{chunk}{axiom.bib}
@misc{Bagn19,
 author = "Bagnara, Roberto and Bagnara, Abramo and Biselli, Fabio
 and Chiari, Michele and Gori, Roberta",
 title = {{Correct Approximation of IEEE 754 FloatingPoint
 Arithmetic for Program Verification}},
 year = "2019",
 link = "\url{https://arxiv.org/abs/1903.06119}",
+@misc{Stou12a,
+ author = "Stoutemyer, David R.",
+ title = {{Can the Eureqa Symbolic Regression Program, Computer
+ Algebra and Numerical Analysis help each other?}},
+ link = "\url{https://arxiv.org/pdf/1203.1023.pdf}",
+ year = "2012",
abstract =
 "Verification of programs using floatingpoint arithmetic is
 challenging on several accounts. One of the difficulties of
 reasoning about such programs is due to the peculiarities of
 floatingpoint arithmetic: rounding errors, infinities,
 nonnumeric objects (NaNs), signed zeros, denormal numbers,
 different rounding modes... One possibility to reason about
 floatingpoint arithmetic is to model a program computation path
 by means of a set of ternary constraints of the form $z=x op y$
 and use constraint propagation techniques to infer new information
 on the variables' possible values. In this setting, we define and
 prove the correctness of algorithms to precisely bound the value
 of one of the variables $x$, $y$, or $z$, starting from the bounds
 known for the other two. We do this for each of the operations and
 for each rounding mode defined by the IEEE 754 binary
 floatingpoint standard, even in the case the rounding mode in
 effect is only partially known. This is the first time that such
 socalled filtering algorithms are defined and their correctness
 is formally proved. This is an important slab for paving the way
 to formal verification of programs that use floatingpoint
 arithmetics.",
 paper = "Bagn19.pdf",
+ "The free Eureqa program has recently received extensive press
+ praise. A representitive quote is
+ \begin{quote}
+ There are very clever 'thinking machines' in existence today,
+ such as Watson, the IBM computer that conquered {\sl Jeopardy!}
+ last year. But next to Eureqa, Watson is merely a glorified
+ search engine.
+ \end{quote}
+
+ The program is designed to work with noisy experimental data,
+ searching for then returning a set of result expressions that
+ attempt to optimally trade off conciseness with accuracy.
+
+ However, if the data is generated from a formula for which there
+ exists more concise equivalent formulas, sometimes some of the
+ candidate Eureqa expressions are one or more of those more concise
+ equivalents expressions. If not, perhaps one or more of the
+ returned Eureqa expressions might be a sufficiently accurate
+ approximation that is more concise than the given
+ formula. Moreover, when there is no known closed form expression,
+ the data points can be generated by numerical methods, enabling
+ Eureqa to find expressions that concisely fit those data points
+ with sufficient accuracy. In contrast to typical regression
+ software, the user does not have to explicitly or implicitly
+ provide a specific expression or class of expressions containing
+ unknown constants for the software to determine.
+
+ Is Eureqa useful enough in these regards to provide an additional
+ tool for experimental mathematics, computer algebra users and
+ numerical analysts? Yes, if used carefully. Can computer algebra
+ and numerical methods help Eureqa? Definitely.",
+ paper = "Stou12a.pdf"
+}
+
+\end{chunk}
+
+\index{Barthe, Gilles}
+\begin{chunk}{axiom.bib}
+@article{Bart85,
+ author = "Barthe, Gilles",
+ title = {{Implicit Coercions in Type Systems}},
+ journal = "LNCS",
+ volume = "1158",
+ pages = "115",
+ year = "1985",
+ abstract =
+ "We propose a notion of pure type system with implicit
+ coercions. In our framework, judgements are extended with a
+ context of coerions $\Delta$ and the application rule is modified
+ so as to allow coercions to be left implicit. The setting supports
+ multiple inheritance and can be applied to all type theories with
+ $\Pi$types. One originality of our work is to propose a
+ computational interpretation of implict coercions. In this paper,
+ we demonstrate how this interpretation allows a strict control on
+ the logical properties of pure type systems with implicit coercions.",
+ paper = "Bart85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bakel, Steffan van}
+\index{Barthe, Gilles}
+\index{Ruys, Mark}
+\index{Barendregt, Henk}
\begin{chunk}{axiom.bib}
@article{Bake93,
 author = "Bakel, Steffan van",
 title = {{Principal Type Schemes for the Strict Type Assignment System}},
 journal = "J. Logic and Computation",
 volume = "3",
 number = "6",
 pages = "643670",
 year = "1993",
+@article{Bart85a,
+ author = "Barthe, Gilles and Ruys, Mark and Barendregt, Henk",
+ title = {{A TwoLevel Approach Towards Lean ProofChecking}},
+ journal = "LNCS",
+ volume = "1158",
+ pages = "1635",
+ year = "1985",
abstract =
 "We study the strict type assignment system, a restriction on the
 intersection type discipline and prove that it has the principal
 type property. W define, for a term $M$, the principal pair (of
 basis and type). We specify three operations on pairs, and prove
 that all pairs deducible for $M$ can be obtained from the
 principal one by these operations, and that these map deducible
 pairs to deducible pairs.",
 paper = "Bake93.pdf",
+ "We present a simple and effective methodology for equational
+ reasoning in proof checkers. The method is based on a twolevel
+ approach distinguishing between syntax and semantics of
+ mathematical theories. The method is very general and can be
+ carried out in any system with inductive and oracle types. The
+ potential of our twolevel approach is illustrated by some
+ examples developed in Lego.",
+ paper = "Bart85a.pdf",
keywords = "printed"
}
\end{chunk}
@inbook{Boye72,
 author = "Boyer, Robert S. and Moore, J Strother",
 title = {{The Sharing of Structure in Theorem Proving Programs}},
 booktitle = "Machine Intelligence 7",
 pages = "110116",
 year = "1972",
+\index{Lou, Zhaohui}
+\begin{chunk}{axiom.bib}
+@article{Loux08,
+ author = "Lou, Zhaohui",
+ title = {{Coercions in a Polymorphic Type System}},
+ journal = "Math. Struct. in Comp. Science",
+ volume = "18",
+ pages = "729751",
+ year = "2008",
abstract =
 "We describe how clauses in resolution programs can be represented
 and used without applying substitutions or consing lists of
 literals. The amount of space required by our representation of a
 clause is independent of the number of literals in the clause and
 the depth of function nesting. We introduce the concept of the
 value of an expression in a binding environment which we use to
 standardize clauses apart and share the structure of parents in
 representing the resolvent. We present unification and resolution
 algorithms for our representation. Some data comparing our
 representation to more conventional ones is given.",
 paper = "Boye72.pdf",
+ "We incorporate the idea of coercive subtyping, a theory of
+ abbreviation for dependent type theories, into the polymorphic
+ type system in functional programming languages. The traditional
+ type system with letpolymorphism is extended with argument
+ coercions and function coercions, and a corresponding type
+ inference algorithm is presented and proved to be sound and complete.",
+ paper = "Loux08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Clark, K.L.}
\index{Tarnlund, S.A.}
+\index{Berger, U.}
+\index{Schwichtenberg, H.}
\begin{chunk}{axiom.bib}
@book{Clar82,
 author = "Clark, K.L. and Tarnlund, S.A.",
 title = {{Logic Programming}},
 publisher = "Academic Press",
 year = "1982",
 isbn = "0121755207"
+@article{Berg85,
+ author = "Berger, U. and Schwichtenberg, H.",
+ title = {{The Greatest Common Divisor: A Case Study for Program
+ Extraction from Classical Proofs}},
+ journal = "LNCS",
+ volume = "1158",
+ pages = "3646",
+ year = "1985",
+ paper = "Berg85.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Daly, Timothy}
\begin{chunk}{axiom.bib}
@misc{Daly18a,
 author = "Daly, Timothy",
 title = {{Proving Axiom Sane Talk}},
 comment = "International Conference on Mathematical Software",
 journal = "LNCS",
 volume = "10931",
 year = "2018",
 paper = "Daly18a.pdf"
}

\index{Dzamonja, Mirna}
+\index{Lou, Zhaohui}
\begin{chunk}{axiom.bib}
@misc{Dzam18,
 author = "Dzamonja, Mirna",
 title =
 {{A New Foundational Crisis in Mathematics, Is it really happening?}},
 link = "\url{https://arxiv.org/pdf/1802.06221.pdf}",
 year = "2018",
+@article{Loux12,
+ author = "Lou, Zhaohui",
+ title = {{Formal Semantics in Modern Type Theories with Coercive Semantics}},
+ journal = "Linguistics and Philosophy",
+ volume = "35",
+ pages = "491513",
+ year = "2012",
abstract =
 "The article reconsiders the position of the foundations of
 mathematics after the discovery of HoTT. Discussion that this
 discovery has generated in the community of mathematicians,
 philosophers and computer scientists might indicate a new crisis
 in the foundation of mathematics. By examining the mathematical
 facts behind HoTT and their relation with the existing
 foundations, we conclude that the present crisis is not one. We
 reiterate a pluralist vision of the foundations of mathematics.

 The article contains a short survey of the mathematical and
 historical background needed to understand the main tenets of the
 fundational issues.",
 paper = "Dzam18.pdf",
+ "In the formal semantics based on modern type theories, common
+ nouns are interpreted as types, rather than as predicates on
+ entities as in Montague's semantics. This brings about important
+ advantages in linguistic interpretations but also leads to a
+ limitation of expressive power because there are fewer operations
+ on types as compared with those on predicates. The theory of
+ coercive subtyping adequately extends the modern type theories
+ and, as shown in this paper, plays a very useful role in making
+ type theories more expressive for formal semantics. It not only
+ gives a satisfactory solution to the basic problem of 'multiple
+ categorisations' caused by interpreting common nouns as types, but
+ provides a powerful formal framework to model interesting
+ linguistic phenomena such as copredication, whose formal treatment
+ has been found difficult in a Montagovian setting. In particular,
+ we show how to formally introduce dottypes in a type theory with
+ coercive subtyping and study some typetheoretic constructs that
+ provide useful representational tools for reference transfers and
+ multiple word meanings in formal lexical semantics.",
+ paper = "Loux12.pdf",
keywords = "printed"
}
\end{chunk}

\index{Giannini, Paola}
+
+\index{Jedynak, Wojciech}
+\index{Biernacka, Malgorzata}
+\index{Biernacki, Dariusz}
\begin{chunk}{axiom.bib}
@techreport{Gian85,
 author = "Giannini, Paola",
 title = {{Type Checking and Type Deduction Techniques for
 Polymorphic Programming Languages}},
 type = "technical report",
 institution = "Carnegie Mellon University",
 number = "CMUCS85187",
 year = "1985",
 abstract =
 "In this paper we present some of the syntactic issues that arise
 in polymorphic programming languages. In particular we examine
 type checking and deduction in two different polymorphic type
 strucutres: the parametric lambdacalculus (with let construct)
 and the polymorphic or secondorder lambdacalculus. In both
 approaches the behavior of types is formalized with type inference
 rules. Examples of programming languages following those
 approaches are presented and some of their specific problems
 studied.",
 paper = "Gian85.pdf",
 keywords = "printed"
+@inproceedings{Jedy13,
+ author = "Jedynak, Wojciech and Biernacka, Malgorzata and
+ Biernacki, Dariusz",
+ title = {{An Operational Foundation for the Tactic Language of Coq}},
+ booktitle = "Proc. 15th Symp. on Principles and Practices of
+ Declarative Programming",
+ publisher = "ACM",
+ pages = "2536",
+ year = "2013",
+ isbn = "9781450321549",
+ abstract =
+ "We introduce a semantic toolbox for Ltac, the tactic language of
+ the popular Coq proof assistant. We present three formats of
+ operational semantics, each of which has its use in the practice
+ of tactic programming: a bigstep specification in the form of
+ natural semantics, a model of implementation in the form of an
+ abstract machine, and a smallstep characterization of computation
+ in the form of reduction semantics. The three semantics are
+ provably equivalent and have been obtained via offtheshelf
+ derivation techniques of the functional correspondence and the
+ syntactic correspondence. We also give examples of Ltac programs
+ and discuss some of th eissues that the formal semantics help to
+ clarify.
+
+ With this work we hope to enhance the operational understanding of
+ Ltac as well as to set up a framework to reason about Coq scripts
+ and to build tools supporting tactic programming based on rigorous
+ semantics.",
+ paper = "Jedy13.pdf"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@article{Harp92,
 author = "Harper, Robert",
 title = {{Constructing Type Systems over an Operational Semantics}},
 journal = "J. Symbolic Computation",
 volume = "14",
 pages = "7184",
 year = "1992",
+\index{Asperti, Andrea}
+\index{Ricciotti, Wilmer}
+\index{Coen, Claudio Sacerdoti}
+\index{Tassi, Enrico}
+@article{Aspe12b
+ author = "Asperti, Andrea and Ricciotti, Wilmer and
+ Coen, Claudio Sacerdoti and Tassi, Enrico",
+ title = {{Formal Metatheory of Programming Languages in the Matita
+ Interactive Theorem Prover}},
+ journal = "Journal of Automated Reasoning",
+ volume = "49",
+ number = "3",
+ pages = "427451",
+ year = "2012",
abstract =
 "Type theories in the sense of MartinLof and the NuPRL system are
 based on taking as primitive a typefree programming language
 given by an operational semantics, and defining types as partial
 equivalence relations on the set of closed terms. The construction
 of a type system is based on a general form of inductive
 definition that may either be taken as acceptable in its own
 right, or further explicated in terms of other patterns of
 induction. One suc account, based on a general theory of
 inductively defined relations, was given by Allen. An alternative
 account, based on an essentially set theoretic argument, is
 presented.",
 paper = "Harp92.pdf",
+ "This paper is a report about the use of Matita, an interactive
+ theorem prover under development at the University of Bologna, for
+ the solution of the POPLmark Challenges, part 1a. We provide three
+ different formalizations, including two direct solutions using
+ pure de Bruijn and locally nameless encodings of bound variables,
+ and a formalization using named variables, obtained by means of a
+ sound translation to the locally nameless encoding. According to
+ this experience, we also discuss some of the proof principles used
+ in our solutions, which have led to the development of a
+ generalized inversion tactic for Matita.",
+ paper = "Aspe12b.pdf",
keywords = "printed"
}
\end{chunk}
+\index{Strecker, Martin}
\begin{chunk}{axiom.bib}
@misc{lion137,
 author = "Unknown",
 title = {{Thoughts in Free Time}},
 link = "\url{https://lion137.blogspot.com/2019/02/fundamentalalgorithmspolyomialgcd.html}",
 comment = "\url{https://github.com/lion137/Fundamental_Algorithsms}",
 year = "2019"
+@phdthesis{Stre99,
+ author = "Strecker, Martin",
+ title = {{Construction and Deduction in Type Theories}},
+ school = "Unversitat Ulm",
+ year = "1999",
+ abstract =
+ "This dissertation is concerned with interactive proof
+ construction and automated proof search in type theories, in
+ particular the Calculus of Constructions and its subsystems.
+
+ Type theories can be conceived as expressive logics which combine
+ a functiona programming language, strong typing and a higherorder
+ logic. They are therefore a suitable formalism for specification
+ and verification systems. However, due to their expressiveness, it
+ is difficult to provide appropriate deductive support for type
+ theories. This dissertation first examines general methods for
+ proof construction in type theories and then explores how these
+ methods can be refined to yield proof search procedures for
+ specialized fragments of the language.
+
+ Proof development in type theories usually requires the
+ construction of a term having a given type in a given context. For
+ the term to be constructed, a {\sl metavariable} is introduced
+ which is successively instantiated in the course of the proof. A
+ naive use of metavariables leads to problems, such as
+ noncommutativity of reduction and instantiation and the
+ generation of illtyped terms during reduction. For solving these
+ problems, a calculus with {\sl explicit substitutions} is
+ introduced, and it is shown that this calculus preserves
+ properties such as strong normalisation and decidability of typing.
+
+ In order to obtain a calculus appropriate for proof search, the
+ usual natural deduction presentation of type theories is replaced
+ by a {\sl sequent style presentation}. It is shown that the
+ calculus thus obtained is correct with respect to the original
+ calculus. Completeness (proved with a cutelimination argument) is
+ shown for all predicative fragments of the lambda cube.
+
+ This dissertation concludes with a discussion of some techniques
+ that make proof search practically applicable, such as unification
+ and pruning of the proof search space by exploiting
+ impermutabilities of the sequent calculus.",
+ paper = "Stre99.pdf"
+}
+
+\end{chunk}
+
+\index{Lou, Zhaohui}
+\begin{chunk}{axiom.bib}
+@phdthesis{Loux90,
+ author = "Lou, Zhaohui",
+ title = {{An Extended Calculus of Constructions}},
+ school = "University of Edinburgh",
+ year = "1990",
+ abstract =
+ "This thesis presents and studies a unifying theory of dependent
+ types ECC Extended Calculus of Constructions. ECC integrates
+ CoquandHuet's (impredicative) calculus of constructions and
+ MartinLof's (predicative) type theory with universes, and turns
+ out to be a strong and expressive calculus for formalization of
+ mathematics, structured proof development and program specification.
+
+ The meta theory of ECC is studied and we show that the calculus
+ has good metatheoretic properties. The main proof theoretic
+ result is the {\sl strong normalization theorem} which makes
+ explicit the predicativity of the predicative universes. The
+ strong normalization result shows the proof theoretic consistency
+ of the calculus; in particular, it implies the consistency of the
+ embedded intuitionistic higherorder logic and the decidability of
+ the theory. The metatheoretic results establish the theoretical
+ foundations both for pragmatic applications in theorem proving and
+ program specification and for computer implementations of the
+ theory. ECC has been implemente in the proof development system
+ LEGO developed by Pollack.
+
+ In ECC, dependent $\Sigma$ types are nonpropositional types
+ residing in the predicative universes and propositions are lifted
+ as higherlevel types as well. This solves the known difficulty
+ that adding strong $\Sigma$ types to an impredicative system
+ results in logical paradox and enables $\Sigma$ types to be used
+ to express the intuitionistic notion of subsets. $\Sigma$ types
+ together with type universes hence provide useful abstraction and
+ module mechanisms for abstract description of mathematical
+ theories and basic mechanisms for program specification and
+ adequate formalization of abstract mathematics (e.g. abstract
+ algebras and notions in category theory). A notion of (abstract)
+ mathematical theory can be described and leads to a promising
+ approach to {\sl abstract reasoning} and {\sl structured
+ reasoning}. Program specifications can be expressed by $\Sigma$
+ types, using propositions in the embedded logic to describe
+ program properties (for example, by an equality reflection result,
+ computational equality can be modeled by the propositional
+ Leibniz's equality definable in the theory). These developments
+ allow comprehensive structuring of formal or rigorous development
+ of proofs and programs.
+
+ Also discussed is how the calculus can be understood
+ settheoretically. We explain an $\omegaSet$ (realizability)
+ model of the theory. In particular, propositions can be
+ interpreted as partial equivalence relations and the predicative
+ type universes as corresponding to large set universes.",
+ paper = "Loux90.pdf"
+}
+
+\end{chunk}
+
+\index{von Henke, F.W.}
+\index{Luther, M.}
+\index{Pfeifer, H.}
+\index{Ruess, H.}
+\index{Schwier, D.}
+\index{Strecker, M.}
+\index{Wagner, M.}
+\begin{chunk}{axiom.bib}
+@article{Henk96,
+ author = "von Henke, F.W. and Luther, M. and Pfeifer, H. and Ruess, H.
+ and Schwier, D. and Strecker, M. and Wagner, M.",
+ title = {{The TYPELAB Specification and Verification Environment}},
+ journal = "LNCS",
+ volume = "1101",
+ pages = "604607",
+ year = "1996",
+ paper = "Henk96.pdf"
}
\end{chunk}

\index{Norrish, Michael}
\index{Slind, Konrad}
\begin{chunk}{axiom.bib}
@article{Norr02,
 author = "Norrish, Michael and Slind, Konrad",
 title = {{A Thread of HOL Development}},
 journal = "Computer Journal",
 volume = "45",
 number = "1",
 pages = "3745",
 year = "2002",
+
+\index{von Henke, F.W.}
+\index{Dold, A.}
+\index{Ruess, H.}
+\index{Schwier, D.}
+\index{Strecker, M.}
+\begin{chunk}{axiom.bib}
+@article{Henk94,
+ author = "von Henke, F.W. and Dold, A. and Ruess, H. and Schwier, D.
+ and Strecker, M.",
+ title = {{Construction and Deduction Methods for the Formal
+ Development of Software}},
+ journal = "LNCS",
+ number = "1009",
+ year = "1994",
abstract =
 "The HOL system is a mechanized proof assistant for higher order
 logic that has been under continuous development since the
 mid1980s, by an everchanging group of developers and external
 contributors. we give a brief overview of various implementations
 of the HOL logic before focusing on the evolution of certain
 important features available in a recent implementation. We also
 illustrate how the module system of Standard ML provided security
 and modularity in the construction of the HOL kernel, as well as
 serving in a separate capacity as a useful representation medium
 for persistent, hierarchical logical theories.",
 paper = "Norr02.pdf",
+ "In this paper we present an approach towards a framework based on
+ the type theory ECC (Extended Calculus of Constructions) in which
+ specifications, programs and operators for modular development by
+ stepwise refinement can be formally described and reasoned
+ about. We show that generic software development steps can be
+ expressed as higherorder functions and demonstrate that proofs
+ about their asserted effects can be carried out in the underlying
+ logical calculus.
+
+ For transformations requiring syntactic manipulations of objects,
+ a twolevel system comprising a Meta and an Objectlevel is
+ provided, and it is shown how transformations can be formalized
+ that faithfully represent operators on the object level.",
+ paper = "Henk94.pdf",
keywords = "printed"
}
+}
\end{chunk}
\index{Paulson, Lawrence C.}
+\index{Kamareddine, Fairouz}
+\index{Laan, Twan}
+\index{Nederpelt, Rob}
\begin{chunk}{axiom.bib}
@inbook{Paul90b,
 author = "Paulson, Lawrence C.",
 title = {{Designing a Theorem Prover}},
 booktitle = "Handbook of Logic in Computer Science, Volume 2",
 publisher = "Oxford University Press",
 pages = "415475",
 year = "1992",
 paper = "Paul90b.pdf",
 keywords = "printed"
+@book{Kama05,
+ author = "Kamareddine, Fairouz and Laan, Twan and Nederpelt, Rob",
+ title = {{A Modern Perspective on Type Theory}},
+ comment = "Applied Logic Series 29",
+ publisher = "Kluwer Academic Publishers",
+ isbn = "1402023359",
+ year = "2005",
+ paper = "Kama05.pdf"
}
\end{chunk}
\index{Robinson, J.A.}
\index{Sibert, E.E.}
+\index{Andrews, Peter B.}
\begin{chunk}{axiom.bib}
@techreport{Robi80,
 author = "Robinson, J.A. and Sibert, E.E.",
 title = {{Loglisp: An Alternative to Prolog}},
 type = "technical report",
 institution = "University of Syracuse",
 number = "807",
 year = "1980",
 paper = "Robi80.pdf",
 keywords = "printed"
+@book{Andr02,
+ author = "Andrews, Peter B.",
+ title = {{An Introduction to Mathematical Logic and Type Theory: To
+ Truth Through Proof}},
+ comment = "Applied Logic Series 27",
+ publisher = "Springer",
+ year = "2002",
+ isbn = "9789401599344",
+ paper = "Andr02.pdf"
}
\end{chunk}

\index{Demers, Alan}
\index{Donahue, James}
+
+\index{Strecker, M.}
+\index{Luther, M.}
+\index{von Henke, F.}
\begin{chunk}{axiom.bib}
@inproceedings{Deme80,
 author = "Demers, Alan and Donahue, James",
 title = {{Type Completeness as a Language Principle}},
 booktitle = "POPL 80",
 publisher = "ACM",
 pages = "234244",
 year = "1980",
+@inbook{Stre98,
+ author = "Strecker, M. and Luther, M. and von Henke, F.",
+ title = {{Interactive and Automated Proof Construction in Type Theory}},
+ publisher = "Springer",
+ chapter = "3",
+ pages = "7396",
+ isbn = "9789401704359",
+ year = "1998",
abstract =
 "The problem of Von Neumann languages is that their changeable
 parts have so little expressive power  John Backus",
 paper = "Deme80.pdf",
 keywords = "printed"
+ "This chapter gives a survey of TYPELAB, a specification and
+ verification environment that integrates interactive proof
+ development and automated proof search. TYPELAB is based on a
+ constructive type theory, the Calculus of Constructions, which can
+ be understood as a combination of a typed $\lambda$calculus and
+ an expressive higherorder logic. Distinctive features of the type
+ system are dependent function types ($\Pi$ types) for modeling
+ polymorphism and dependent record types ($\Sigma$ types) for
+ encoding specifications and mathematical theories.",
+ paper = "Stre98.pdf"
}

+
\end{chunk}
\index{Jammer, Max}
+\index{Kreitz, Christoph}
\begin{chunk}{axiom.bib}
@book{Jamm66,
 author = "Jammer, Max",
 title = {{The Conceptual Development of Quantum Mechanics}},
 year = "1996",
 publisher = "McGrawHill"
+@inbook{Krei98,
+ author = "Kreitz, Christoph",
+ title = {{Program Synthesis}},
+ booktitle = "Automated Deduction  A Basis for Applications (Vol III)",
+ publisher = "Springer",
+ year = "1998",
+ chapter = "5",
+ pages = "105134",
+ isbn = "9789401704373",
+ comment = "Applied Logic Series, volume 10",
+ paper = "Krei98.pdf"
}
\end{chunk}
\index{Mackie, Ian}
\index{Pinto, Jorge Sousa}
+\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@article{Mack02,
 author = "Mackie, Ian and Pinto, Jorge Sousa",
 title = {{Encoding Linear Logic with Interaction Combinators}},
 journal = "Information and Computation",
 volume = "176",
 pages = "153186",
 year = "2002",
 abstract =
 "The purpose of this paper is to demonstrate how Lafont's
 interaction combinators, a system of three symbols and six
 interaction rules, can be used to encode linear
 logic. Specifically, we give a translation of the multiplicative,
 exponential, and additive fragments of linear logic together with
 a strategy for cutelimination which can be faithfully
 simulated. Finally, we show briefly how this encoding can be used
 for evaluating $\lambda$terms. In addition to offering a very
 simple, perhaps the simplest, system of rewriting for linear logic
 and the $\lambda$calculus, the interaction net implementation
 that we present has been shown by experimental testing to offer a
 good level of sharing in terms of the number of cutelimination
 steps (resp. $\beta$reduction steps). In particular it performs
 better than all extant finite systems of interaction nets.",
 paper = "Mack02.pdf",
+@misc{Wadl00,
+ author = "Wadler, Philip",
+ title = {{Proofs are Programs: 19th Century Logic and 21st Century
+ Computing}},
+ link = "\url{https://homepages.inf.ed.ac.uk/wadler/papers/frege/frege.pdf}",
+ year = "2000",
+ paper = "Wadl00.pdf",
keywords = "printed"
}
\end{chunk}
+\index{Wang, Paul S.}
\begin{chunk}{axiom.bib}
@inproceedings{Oisd18,
 author = "Anonymous",
 title = {{Solving Rings in Agda}},
 booktitle = "Proc. ACM Program. Lang.",
 publisher = "ACM",
 year = "2018",
+@article{Wang80,
+ author = "Wang, Paul S.",
+ title = {{The EEZGCD Algorithm}},
+ journal = "SIGSAM Bulletin",
+ volume = "14",
+ number = "2",
+ pages = "5060",
+ year = "1980",
abstract =
 "We present a new library which automates the construction of
 equivalence proofs between polynomials over commutative rings and
 semirings in the programming language Agda [Norell and Chapman
 2008]. It is significantly faster than Agda's existing solver. We
 use reflection to provide a simple interface to the solver, and
 demonstrate how to use the constructed proofs to provide
 stepbystep solutions.",
 paper = "Oisd18.pdf",
+ "An enhanced gcd algorithm based on the EXGCD algorithm is
+ described. Implementational aspects are emphasized. It is
+ generally faster and is particularly suited for computing gcd of
+ sparse multivariate polynomials. The EEZGCD algorithm is
+ characterized by the following features:
+ \begin{enumerate}
+ \item avoiding unlucky evaluations,
+ \item predetermining the correct leading coefficient of the
+ desired gcd,
+ \item using the sparsity of the given polynomials to determine
+ terms in the gcd and
+ \item direct methods for dealing with the ``common divisor problem.''
+ \end{enumerate}
+ The common divisor problem occurs when the gcd has a different
+ common divisor with each of the cofactors. The EZGCD algorithm
+ does a squarefree decomposition in this case. It can be avoided
+ resulting in increased speed. One method is to use parallel padic
+ construction of more than two factors. Machine examples with
+ timing data are included.",
+ paper = "Wang80.pdf",
keywords = "printed"
}
\end{chunk}
\index{Harvey, David}
\index{van der Hoeven, Joris}
+\index{Tsuji, Kuniaki}
\begin{chunk}{axiom.bib}
@misc{Harv19,
 author = "Harvey, David and van der Hoeven, Joris",
 title = {{Integer Multiplication in Time O(n log n)}},
 link = "\url{https://hal.archivesouvertes.fr/hal.02070778/document}",
 year = "2019",
 abstract =
 "We present an algorithm that computes the product of two
 nbit intgers in O(n log n) bit operations"
 paper = "Harv19.pdf"
+@article{Tsuj09,
+ author = "Tsuji, Kuniaki",
+ journal = "Journal of Symbolic Computation",
+ title = {{An Improved EZGCD Algorithm for Multivariate Polynomials}},
+ volume = "44",
+ number = "1",
+ year = "2009",
+ pages = "99110",
+ abstract =
+ "The EZGCD algorithm often has a badzero problem, which has a
+ remarkable influence on polynomials with higherdegree terms. In
+ this paper, by applying special ideals, the EZGCD algorithm for
+ sparse polynomials is improved. This improved algorithm greatly
+ reduces computational complexity because of the sparseness of
+ polynomials. The author expects that the use of these ideals will
+ be useful as a resolution for obtaining a GCD of sparse
+ multivariate polynomials with higherdegree terms.",
+ paper = "Tsuj09.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Dunfield, Joshua}
\index{Krishnaswami, Neelakantan R.}
+\index{Sanuki, Masaru}
+\index{Inaba, Daiju}
+\index{Sasaki, Tateaki}
\begin{chunk}{axiom.bib}
@misc{Dunf13,
 author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
 title = {{Complete and Easy Bidirectional Typechecking for HigherRank
 Polymorphism}},
 link = "\url{https://arxiv.org/pdf/1306.6032.pdf}",
 year = "2013",
 abstract =
 "Bidirectional typechecking, in which terms either synthesize a
 type or are checked against a known type, has become popular for
 its scalability (unlike DamasMilner type inference, bidirectional
 typing remains decidable even for very expressive type systems),
 its error reporting, and its relative ease of
 implementation. Following design principles from proof theory,
 bidirectional typing can be applied to many type constructs. The
 principles underlying a bidirectional approach to polymorphism,
 however, are less obvious. We give a declarative, bidirectional
 account of higherrank polymorphism, grounded in proof theory;
 this calculus enjoys many properties such as $\eta$reduction and
 predictability of annotations. We give an algorithm for
 implementing the declarative system; our algorithm is remarkably
 simple and wellbehaved, despite being both sound and complete.",
 paper = "Dunf13.pdf",
 keywords = "printed"
}

+@inproceedings{Sanu15,
+ author = "Sanuki, Masaru and Inaba, Daiju and Sasaki, Tateaki",
+ title = {{Computation of GCD of Sparse Multivariate Polynomials by
+ Extended Hensel Construction}},
+ booktitle = "17th Int. Symp. on Symbolic and Numeric Algorithms for
+ Scientific Computing",
+ publisher = "IEEE",
+ year = "2015",
+ abstract =
+ "Let $F(x,u_1,\ldots,u_i)$ be a squarefree multivariate polynomial
+ in main variable $x$ and subvariables $u_1\ldots u_i$. We say
+ that the leading coefficient (LC) of $F$ is singular if it
+ vanishes at the origin of the subvariables. A representative
+ algorithm for nonsparse multivariate polynomial GCD is the EZGCD
+ algorithm, which is based on the generalized Hensel construction
+ (GHC). In order to apply the GHC easily, we requires 1) the LC of
+ $F$ is nonsingular, 2) $F(x,0,\ldots,0)$ is squarefree, and 3)
+ the initial Hensel factor of GCD is ``lucky''. These requirements
+ are usually satisfied by the ``nonzero substitution'', i.e. to
+ shift the origin of subvariables. However, the nonzero
+ substitution may cause a drastic increase of the number of terms
+ of $F$ if $F$ is sparse. In 1993, Sasaki and Kako proposed the
+ extended Hensel construction (EHC) which does not perform the
+ nonzero substitution even if the LC is singular. Using the EHC,
+ Inaba implemented an algorithm of multivariate polynomial
+ factorization and verified that it is very useful for sparse
+ polynomials. In this paper, we apply the EHC for the computation
+ of GCD of sparse multivariate polynomials. In order to find a
+ lucky initial factor, we utilize the weighting of subvariables,
+ etc. Our naive implementation in Maple shows that our algorithm is
+ comparable in performance to Maple's GCD routine base on the
+ sparse interpolation.",
+ paper = "Sanu15.pdf"
+}
+
\end{chunk}
\index{Parisse, Bernard}
+\index{Griesmer, J.H.}
\begin{chunk}{axiom.bib}
@misc{Pari19,
 author = "Parisse, Bernard",
 title = {{Computing Huge Groebner Basis like Cyclic10 over
 $\mathbb{Q}$ with Giac}}.
 link = "\url{https://hal.archivesouvertes.fr/hal02081648}",
 year = "2019",
 abstract =
 "We present a short description on how to finetune the
 modular algorithm implemented in the Giac computer algebra system
 to reconstruct large Groebner basis over $\mathbb{Q}$. The
 classical cyclic10 benchmark will serve as example.",
 paper = "Pari19.pdf"
+@article{Grie76,
+ author = "Griesmer, James",
+ title = "{{Symbolic Mathematical Computation: A Survey}},
+ journal = "SIGSAM Bulletin",
+ volume = "10",
+ number = "2",
+ pages = "3032",
+ year = "1976",
+ paper = "Grie76.pdf",
+ keywords = "axiomref"
}
\end{chunk}
\index{Denes, Maxime}
\index{Mortberg, Anders}
\index{Siles, Vincent}
+\index{Luther, Marko}
+\index{Strecker, Martin}
\begin{chunk}{axiom.bib}
@misc{Dene19,
 author = "Denes, Maxime and Mortberg, Anders and Siles, Vincent",
 title = {{A Refinementbased Approach to Computational Algebra in COQ}},
 year = "2019",
 link = "\url{www.cse.chalmers.se/~mortberg/papers/coqeal.pdf}",
+@misc{Luth98,
+ author = "Luther, Marko and Strecker, Martin",
+ title = {{A Guided Tour through TYPELAB}},
+ year = "1998",
abstract =
 "We describe a stepbystep approach to the implementation and
 formal verification of efficient algebraic algorithms. Formal
 specifications are expressed on rich data types which are suitable
 for deriving essential theoretical properties. These
 specifications are then refined to concrete implementations on
 more efficient data structures and linked to their abstract
 counterparts. We illustrate this methodology on key applications:
 matrix rank computation, Winograd's fast matrix product,
 Karatsuba's polynomial multiplication, and the gcd of multivariate
 polynomials.",
 paper = "Dene19.pdf",
 keywords = "printed",
+ "This report gives a survey of TYPELAB, a specification and
+ verification environment that integrates interactive proof
+ development and automated proof search. TYPELAB is based on a
+ constructive type theory, the Calculus of COnstructions, which can
+ be understood as a combinations of a typed $\lambda$calculus and
+ an expressive higherorder logic. Distinctive features of the type
+ system are dependent function types for modeling polymorphism and
+ dependent record types for encoding specifications and
+ mathematical theories. After presenting an extended example which
+ demonstrates how program development by stepwise refinement of
+ specifications can be carried out, the theory underlying the
+ prover component of TYPELAB is described in detail. A calculus
+ with metavariables and explicit substitutions is introduced, and
+ the metatheoretic properties of this calculus are
+ analyzed. Furthermore, it is shown that this calculus provides an
+ adequate foundation for automated proof search in fragments of the
+ logic.",
+ paper = "Luth98.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Ly, Kim Quyen}
+\index{Ehrig, H.}
+\index{Kreowski, H.J.}
+\index{Mahr, B.}
+\index{Padawitz, P.}
\begin{chunk}{axiom.bib}
@misc{Lyxx15,
 author = "Ly, Kim Quyen",
 title = {{Formalization in Coq of Polynomial Interpretations on
 Rationals}},
+@article{Ehri82,
+ author = "Ehrig, H. and Kreowski, H.J. and Mahr, B. and Padawitz, P.",
+ title = {{Algebraic Implementation of Abstract Data Types}},
+ journal = "Theoretical Computer Science",
+ volume = "20",
+ pages = "209263",
+ year = "1982",
+ abstract =
+ "Starting with a review of the theory of algebraic specifications
+ in the sense of the ADJgroup a new theory for algebraic
+ implementation of abstract data types is presented.
+
+ While main concepts of this new theory were given already at
+ several conferences this paper provides the full theory of
+ algebraic implementations developed in Berlin except of complexity
+ considerations which are given in a separate paper. This new
+ concept of algebraic implementations includes implementations for
+ algorithms in specific programming languages and on the other hand
+ it meets also the requirements for stepwise refinement of
+ structured programs and software systems as introduced by Dijkstra
+ and Wirth. On the syntactical level an algebraic implementation
+ corresponds to a system of recursive programs while the semantical
+ level is defined by algebraic constructions, called SYNTHESIS,
+ RESTRICTION and IDENTIFICATION. Moreover the concept allows
+ composition of implementations and a rigorous study of
+ correctness. The main results of the paper are different kinds of
+ correctness criteria which are applied to a number of illustrating
+ examples including the implementation of sets by hashtables.
+ Algebraic implementations of larger systems like a histogram or a
+ parts system are given in separate case studies which, however,
+ are not included in this paper.",
+ paper = "Ehri82.pdf"
+}
+
+\end{chunk}
+
+\index{de Moura, Leonardo}
+\index{Avigad, Jeremy}
+\index{Kong, Soonho}
+\index{Roux, Cody}
+\begin{chunk}{Mour15,
+@misc{Mour15,
+ author = "de Moura, Leonardo and Avigad, Jeremy and Kong, Soonho
+ and Roux, Cody",
+ title = {{Elaboration in Dependent Type Theory}},
+ link = "\url{https://arxiv.org/pdf/1505.04324.pdf}",
year = "2015",
 link = "\url{https://www.di.ens.fr/~quyen/publication/ly10.pdf}",
 paper = "Lyxx15.pdf",
+ abstract =
+ "To be usable in practice, interactive theoremprovers need to
+ provide convenient and efficient means of writing expressions,
+ definitions, and proofs. This involves inferring information that
+ is often left implicit in an ordinary mathematical text, and
+ resolving ambiguities in mathematical expressions. We refer to the
+ rpocess of passing from a quasiformal and partiallyspecified
+ expression to a completely precise formal one as {\sl
+ elaboration}. We describe an elaboration algorithm for dependent
+ type theory that has been implemented in the Lean theorem
+ prover. Lean's elaborator supports higherorder unification, type
+ class inference, ad hoc overloading, insertion of coercions, the
+ use of tactics, and the computational reduction of terms. The
+ interactions between these components are subtle and complex, and
+ the elaboration algorithm has been carefully designed to balance
+ efficiency and usability. We describe the central design goals,
+ and the means by which they are achieved.",
+ paper = "Mour15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kovacs, Laura}
\index{Voronkov, Andrei}
\begin{chunk}{axiom.bib}
@misc{Kova13,
 author = "Kovacs, Laura and Voronkov, Andrei",
 title = {{FirstOrder Theorem Proving and Vampire}},
 year = "2013",
 link = "\url{http://www.cse.chalmers.se/~laurako/pub/CAV13_Kovacs.pdf}",
+\index{de Moura, Leonardo}
+\index{Kong, Soonho}
+\index{Avigad, Jeremy}
+\index{van Doorn, Floris}
+\index{von Raumer, Jakob}
+@misc{Mour19,
+ author = "de Moura, Leonardo and Kong, Soonho and Avigad, Jeremy
+ and van Doorn, Floris and von Raumer, Jakob",
+ title = {{The Lean Theorem Prover (system description)}},
+ link = "\url{http://florisvandoorn.com/papers/lean_description.pdf}",
+ year = "2019",
abstract =
 "In this paper we give a short introduction in firstorder theorem
 proving and the use of the theorem prover Vampire. We discuss the
 superposition calculus and explain the key concepts of saturation
 and redundancy elimination, present saturation algorithms and
 preprocessing, and demonstrate how these concepts are implemented
 in Vampire. Further, we also cover more recent topics and features
 of Vampire designed for advanced applications, including
 satisfiability checking, theory reasoning, interpolation,
 consequence elimination, and program analysis.",
 paper = "Kova13.pdf",
 keywords = "printed"
+ "Lean is a new open source theorem prover being developed at
+ Microsoft Research and Carnegie Mellon University, with a small
+ trusted kernel based on dependent type theory. It aims to bridge the
+ gap between interactive and automated theorem proving, by situating
+ automated tools and methods in a framework that supports user
+ interaction and the construction of fully specified axiomatic
+ proofs. Lean is an ongoing and longterm effort, but it already
+ provides many useful components, integrated development
+ environments, and a rich API which can be used to embed it into
+ other systems. It is currently being used to formalize category
+ theory, homotopy type theory, and abstract algebra. We describe the
+ project goals, system architecture, and main features, and we
+ discuss applications and continuing work.",
+ paper = "Mour19.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
\index{Kotelnikov, Evgenii}
\index{Kovacs, Laura}
\index{Reger, Giles}
\index{Voronkov, Andrei}
+\index{Dybjer, Peter}
\begin{chunk}{axiom.bib}
@inproceedings{Kote16,
 author = "Kotelnikov, Evgenii and Kovacs, Laura and Reger, Giles and
 Voronkov, Andrei",
 title = {{The Vampire and the FOOL}},
 booktitle = "SIGPLAN Conf. on Certified Programs and Proofs",
 year = "2016",
 publisher = "ACM",
 pages = "3748",
+\article{Dybj94,
+ author = "Dybjer, Peter",
+ title = {{Inductive Families}},
+ journal = "Formal Aspects of Computing",
+ volume = "6",
+ number = "4",
+ pages = "440465",
+ year = "1994",
abstract =
 "This paper presents new features recently implemented in the
 theorem prover Vampire, namely support for firstorder logic with
 a first class boolean sort (FOOL) and polymorphic arrays. In
 addition to having a first class boolean sort, FOOL also contains
 ifthenelse and letin expressions. We argue that presented
 extensions facilitate reasoningbased program analysis, both by
 increasing the expressivity of firstorder reasoners and by gains
 in efficiency.",
 paper = "Kote16.pdf",
+ "A general formulation of inductive and recursive definitions in
+ MartinLof's type theory is presented. It extends Backhouse's
+ 'DoItYourself Type Theory' to include inductive definitions of
+ families of sets and definitions of functions by recursion on the
+ way elements of such sets are generated. The formulation is in
+ natural deduction and is intended to be a natural generalization
+ to type theory of MartinLof's theory of iterated inductive
+ definitions of predicate logic.
+
+ Formal criteria are given for correct formation and introduction
+ rules of a new set former capturing definition by strictly
+ positive, iterated, generalized induction. Moreover, there is an
+ inversion principle for deriving elimination and equality rules
+ from the formation and introduction rules. Finally, there is an
+ alternative schematic presentation of definition by recursion.
+
+ The resulting theory is a flexible and powerful language for
+ programming and constructive mathematics. We hint at the wealth of
+ possible applications by showing several basic examples: predicate
+ logic, generalized induction, and a formalization of the untyped
+ lambda calculus.",
+ paper = "Dybj94.pdf",
keywords = "printed"
}
\end{chunk}
+\end{chunk}
\index{Storjohann, Arne}
+\index{Selsam, Daniel}
\begin{chunk}{axiom.bib}
@inproceedings{Stor97,
 author = "Storjohann, Arne",
 title = {{A Solution to the extended GCD problem with applications}},
 booktitle = "ISSAC '97",
 publisher = "ACM",
 year = "1997",
 paper = "Stor97.pdf",
+@misc{Sels19,
+ author = "Selsam, Daniel",
+ title = {{CS240H: A Standalone Proofchecker for the Lean Theorem Prover}},
+ year = "2019",
+ link = "\url{http://www.scs.stanford.edu/16wics240h/projects/selsam.pdf}",
+ paper = "Sels19.pdf",
keywords = "printed"

}
\end{chunk}
\index{Corless, Robert}
\index{Postma, Erik}
\index{Stoutemyer, David}
+\index{Birkhoff, Garrett}
\begin{chunk}{axiom.bib}
@inproceedings{Corl11,
 author = "Corless, Robert and Postma, Erik and Stoutemyer, David",
 title = {{GCD of Multivariate Approximate Polynomials using
 Beautification with the Subtractive Algorithm}},
 booktitle = "Int. Workshop on SymbolicNumeric Computation",
 publisher = "ACM",
 year = "2011",
 paper = "Corl11.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Cheng, Howard}
\index{Labahn, George}
\index{Zhou, Wei}
\begin{chunk}{axiom.bib}
@article{Chen08,
 author = "Cheng, Howard and Labahn, George and Zhou, Wei",
 title = {{Computing Polynomial LCD and GCD in Lagrange Basis}},
 journal = "Communications in Computer Algebra",
 volume = "42",
 number = "3",
 pages = "129130",
 year = "2008",
 abstract =
 "We discuss the verification of mathematical software solving
 polynomial systems symbolically by way of triangular
 decomposition. Standard verification techniques are highly
 resource consuming and apply only to polynomial systems which are
 easy to solve. We exhibit a new approach which manipulates
 constructible sets represented by regular systems. We provide
 comparative benchmarks of different verification procedures
 applied to four solvers on a large set of wellknown polynomial
 systems. Our experimental results illustrate the high efficiency
 of our new approach. In particular, we are able to verify
 triangular decomposition of polynomial systems which are not easy
 to solve.",
 paper = "Chen08.pdf",
 keywords = "printed"
+@article{Birk35,
+ author = "Birkhoff, Garrett",
+ title {{On the Structure of Abstract Algebra}},
+ journal = "Proc. of the Cambridge Philosophical Society",
+ volume = "31",
+ year = "1935",
+ paper = "Birk35.pdf"
}
\end{chunk}
\index{Cheng, Howard}
\index{Labahn, George}
+\index{de Moura, Leonardo}
\begin{chunk}{axiom.bib}
@inproceedings{Chen06,
 author = "Cheng, Howard and Labahn, George",
 title = {{On Computing Polynomial GCDs in Alternate Bases}},
 booktitle = "ISSAC '06",
 publisher = "ACM",
 year = "2006",
 pages = "4754",
 abstract =
 "In this paper, we examine the problem of computing the greatest
 common divisor (GCD) of univariate polynomials represented in
 different bases. When the polynomials are represented in Newton
 basis or a basis of orthogonal polynomials, we show that the
 wellknown Sylvester matrix can be generalized. We give
 fractionfree and modular algorithms to directly compute the GCD
 in the alternate basis. These algorithms are suitable for
 computation in domains where growth of coefficients in
 intermediate computations are a central concern. In the cases of
 Newton basis and bases using certain orthogonal polynomials, we
 also show that the standard subresultant algorithm can be applied
 easily. If the degrees of the input polynomials is at most $n$ and
 the degree of the GCD is at least $n/2$, our algorithms outperform
 the corresponding algorithms using the standard power basis.",
 paper = "Chen06.pdf",
 keywords = "printed"
+@misc{Mour16,
+ author = "de Moura, Leonardo",
+ title = {{The Lean Theorem Prover}},
+ link = "\url{https://www.youtube.com/watch?v=69ytTKfSSgc}",
+ conference = "PLSE '16",
+ comment "video",
+ year = "2016",
+ keywords = "DONE"
}
\end{chunk}
\index{Chen, Changbo}
\index{Maza, Marc Moreno}
+\index{Awodey, Steve}
\begin{chunk}{axiom.bib}
@article{Chen15,
 author = "Chen, Changbo and Maza, Marc Moreno",
 title = {{Simplification of Cylindrical Algebraic Formulas}},
 journal = "LNCS",
 volume = "9301",
 pages = "119134",
 comment = "Int. Workshop on Computer Algebra in Scientific Computing",
 paper = "Chen15.pdf"
+@misc{Awod12,
+ author = "Awodey, Steve",
+ title = {{Category Theory Foundations. Lectures 14}},
+ year = "2012",
+ comment = "Oregon Programming Language Summer School 2013",
+ link =
+ "\url{http://www.youtube.com/watch?v=ZKmodCApZwk&list=PL8Ky8IYL8Oh7awp0sqa82o7Ggt4AGhyf}"
}
\end{chunk}
\index{Chen, Changbo}
\index{Covanov, Svyatoslav}
\index{Mansouri, Farnam}
\index{Maza, Marc Moreno}
\index{Xie, Ning}
\index{Xie, Yuzhen}
+\index{Weirich, Stephanie}
+\index{Choudhury, Pritam}
+\index{Voizard, Antoine}
+\index{Eisenberg, Richard A.}
\begin{chunk}{axiom.bib}
@article{Chen14,
 author = "Chen, Changbo and Covanov, Svyatoslav and Mansouri, Farnam
 and Maza, Marc Moreno and Xie, Ning and Xie, Yuzhen",
 title = {{Basic Polynomial Algebra Subprograms}},
 journal = "Communications in Computer Algebra",
 volume = "48",
 number = "3/4",
 pages = "197201",
 year = "2014",
 paper = "Chen14.pdf"
+@misc{Weir19,
+ author = "Weirich, Stephanie and Choudhury, Pritam and Voizard,
+ Antoine and Eisenberg, Richard A.",
+ title = {{A Role for Dependent Types in Haskell (Extended Version)}},
+ link = "\url{https://arxiv.org/pdf/1905.13706.pdf}",
+ year = "2019",
+ abstract =
+ "Modern Haskell supports zerocost coercions, a mechanism where
+ types that share the same runtime representation may be freely
+ converted between. To make sure such conversions are safe and
+ desirable, this feature relies on a mechanism of roles to prohibit
+ invalid coercions. In this work, we show how to integrate roles
+ with dependent type systems and prove, using the Coq proof
+ assistant, that the resulting system is sound. We have designed
+ this work as a foundation for the addition of dependent types to
+ the Glasgow Haskell Compiler, but we also expect that it will be
+ of use to designers of other dependentlytyped languages who might
+ want to adopt Haskell's safe coercion feature.",
+ paper = "Weir19.pdf"
}
\end{chunk}
\index{Gleich, David}
+\index{Tennent, R.D.}
\begin{chunk}{axiom.bib}
@misc{Glei05,
 author = "Gleich, David",
 title = {{Finite Calculus: A Tutorial for Solving Nasty Sums}},
 link = "\url{}",
 year = "2005",
+@article{Tenn76,
+ author = "Tennent, R.D.",
+ title = {{The Denotational Semantics of Programming Languages}},
+ journal = "Communications of the ACM",
+ volume = "19",
+ number = "8",
+ pages = "437453",
+ year = "1976",
abstract =
 "In this tutorial, I will first explain the need for finite
 calculus using an example sum I think is difficult to solve. Next,
 I will show where this sum actually occurs and why it is
 important. Following that, I will present all the mathematics
 behind finite calculus and a series of theorems to make it helpful
 before concluding with a set of examples to show that it really is
 useful.",
 paper = "Glei05.pdf",
+ "This paper is a tutorial introduction to the theory of
+ programming language semantics developed by D. Scott and
+ C. Strachey. The application of the theory to formal language
+ specification is demonstrated and other applications are
+ surveyed. The first language considered, LOOP, is very elementary
+ and its definition merely introduces the notion and methodology of
+ the approach. Then the semantic concepts of environments, stores,
+ and continuations are introduced to model classes of programming
+ language features and the underlying mathematical theory of
+ computation due to Scott is motivated and outlined. Finally, the
+ paper presents a formal definition of the language GEDANKEN.",
+ paper = "Tenn76.pdf",
keywords = "printed"
}
\end{chunk}
\index{Majewski, Bohdan}
\index{Havas, George}
+\index{Elliott, Conal}
\begin{chunk}{axiom.bib}
@article{Maje94,
 author = "Majewski, Bohdan and Havas, George",
 title = {{The Complexity of Greatest Common Divisor Computations}},
 journal = "LNCS",
 volume = "877",
 pages = "184193",
 year = "1994",
 abstract =
 "We study the complexity of expressing the greatest common divisor
 of $n$ positive numbers as a linear combination of the
 numbers. We prove the NPcompleteness of finding an optimal set of
 multipliers with respect to either of $L_0$ metric or the
 $L_\infty$ norm. We present and analyze a new method for
 expressing the gcd of $n$ numbers as their linear combination and
 give an upper bound on the size of the largest multiplier
 produced by this method, which is optimal.",
 paper = "Maje94.pdf",
+@inproceedings{Elli17,
+ author = "Elliott, Conal",
+ title = {{Compiling to Categories}},
+ booktitle = "Proc. ACM Program. Lang. Vol 1",
+ publisher = "ACM",
+ year = "2017",
+ link = "\url{http://conal.net/papers/compilingtocategories/compilingtocategories.pdf}",
+ abstract =
+ "It is wellknown that the simply typed lambdacalculul is modeled
+ by any cartesian closed category (CCC). This correspondence
+ suggests giving typed functional programs a variety of
+ interpretations, each corresponding to a different category. A
+ convenient way to realize this idea is as a collection of
+ meaningpreserving transformations added to an existing compiler,
+ such as GHC for Haskell. This paper describes automatic
+ differentiation, incremental computation, and interval
+ analysis. Each such interpretation is a category easily defined in
+ Haskell (outside of the compiler). The general technique appears
+ to provide a compelling alternative to deeply embedded
+ domainspecific languages.",
+ paper = "Elli17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Shallit, Jeffrey}
\index{Sorenson, Jonathan}
+\index{Harrison, John}
\begin{chunk}{axiom.bib}
@article{Shal94,
 author = "Shallit, Jeffrey and Sorenson, Jonathan",
 title = {{Analysis of a LeftShift Binary GCD Algorithm}},
 journal = "Journal of Symbolic Computation",
 volume = "17",
 number = "6",
 pages = "473486",
 year = "1994",
 abstract =
 "We introcude a new leftshift binary algorithm, LSBGCD, for
 computing the greatest common divisor of two integers, and we
 provide an analysis of the worstcase behavior of the
 algorithm. The analysis depends on a theorem of Ramharter about
 the extremal behavior of certain continuants.",
 paper = "Shal94.pdf",
 keywords = "printed"
+@misc{Harr13,
+ author = "Harrison, John",
+ title = {{A Survey of Automated Theorem Proving}},
+ year = "2013",
+ link = "\url{https://www.lektorium.tv/lecture/14805}"
}
\end{chunk}
\index{Kozen, Dexter}
\index{Landau, Susan}
\index{Zippel, Richard}
+\index{Ganesalingam, M.}
+\index{Gowers, W.T.}
\begin{chunk}{axiom.bib}
@article{Koze94,
 author = "Kozen, Dexter and Landau, Susan and Zippel, Richard",
 title = {{Decomposition of Algebraic Functions}},
 journal = "LNCS",
 volume = "877",
 pages = "8092",
 year = "1994",
+@article{Gane17,
+ author = "Ganesalingam, M. and Gowers, W.T.",
+ title = {{A Fully Automatic Theorem Prover with HumanStyle Output}},
+ journal = "J. Automated Reasoning",
+ volume = "58",
+ pages = "253291",
+ year = "2017",
abstract =
 "Functional decomposition  whether a function $f(x)$ can be
 written as a composition of functions $g(h(x))$ in a nontrivial
 way  is an important primitive in symbolic computation
 systems. The problem of univariate polynomial decomposition was
 shown to have an efficient solution by Kozen and Landau.
 Dickerson and von zur Gathen gave algorithms for certain
 multivariate cases. Zippel showed how to decompose rational
 functions. In this paper, we address the issue of decomposition of
 algebraic functions. We show that the problem is related to
 univariate resultants in algebraic function fields, and in fact
 can be reformulated as a problem of resultant decomposition. We
 characterize all decompositions of a given algebraic function up
 to isomorphism, and give an exponential time algorithm for finding
 a nontrivial one if it exists. The algorithm involves genus
 calculations and constructing transcendental generators of fields
 of genus zero.",
 paper = "Koze94.pdf"
}

\end{chunk}

\index{Lipton, Richard J.}
\begin{chunk}{axiom.bib}
@article{Lipt94,
 author = "Lipton, Richard J.",
 title = {{StraightLine Complexity and Integer Factorization}},
 journal = "LNCS",
 volume = "877",
 pages = "7179",
 year = "1994",
 abstract =
 "We show that if polynomials with many rational roots have
 polynomial length straightline complexity, then integer
 factorization is 'easy'",
 paper = "Lipt94.pdf"
+ "This paper describes a program that solves elementary
+ mathematical problems, mostly in metric space theory, and presents
+ solutions that are hard to distinguish from solutions that might
+ be written by a human mathematician.",
+ paper = "Gane17.pdf"
}
\end{chunk}
\index{Chen, Changbo}
\index{Maza, Marc Moreno}
+\index{Ganzinger, Harald}
\begin{chunk}{axiom.bib}
@article{Chen16,
 author = "Chen, Changbo and Maza, Marc Moreno",
 title = {{Quantifier Elimination by Cylindrical Algebraic
 Decomposition based on Regular Chains}},
 journal = "Journal of Symbolic Computation",
 volume = "75",
 pages = "7493",
 year = "2016",
 abstract =
 "A quantifier elimination algorithm by cylindrical algebraic
 decomposition based on regular chains is presented. The main idea
 is to refine a complex cylindrical tree until the signs of
 polynomials appearing in the tree are sufficient to distinguish
 the true and false cells. We report an implementation of our
 algorithm in the RegularChains library in MAPLE and illustrate its
 effectiveness by examples.".
 paper = "Chen16.pdf"
+@article{Ganz80,
+ author = "Ganzinger, Harald",
+ title = {{Transforming Denotational Semantics into Practical
+ Attribute Grammars}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "169",
+ year = "1980",
+ paper = "Ganz80.pdf"
}
\end{chunk}
\index{Kotelnikov, Evgenii}
+\index{Jones, Neil D.}
+\index{Schmidt, David A.}
\begin{chunk}{axiom.bib}
@phdthesis{Kote18,
 author = "Kotelnikov, Evgenii",
 title = {{Automated Theorem Proving with Extensions of FirstOrder Logic}},
 school = "Chalmers",
 year = "2018",
+@article{Jone80,
+ author = "Jones, Neil D. and Schmidt, David A.",
+ title = {{Compiler Generation from Denotational Semantics}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "7093",
+ year = "1980",
abstract =
 "Automated theorem provers are computer programs that check
 whether a logical conjecture follows from a set of logical
 statements. The conjecture and the statements are expressed in the
 language of some formal logic, such as firstorder logic. Theorem
 provers for firstorder logic have been used for automation in
 proof assistants, verification of programs, static analysis of
 networks, and other purposes. However, the efficient usage of
 these provers remains challenging. One of the challenges is the
 complexity of translating domain problems to firstorder
 logic. Not only can such translation be cumbersome due to semantic
 differences between the domain and the logic, but it might
 inadvertently result in problems that provers cannot easily handle.

 The work presented in the thesis addresses this challenge by
 developing an extension of firstorder logic named FOOL. FOOL
 contains syntactical features of programming languages and more
 expressive logics, is friendly for translation of problems from
 various domains, and can be efficiently suported by existing
 theorem provers. We describe the syntax and semantics of FOOL and
 present a simple translation from FOOL to plain firstorder
 logic. We describe an efficient clausal normal form transformation
 algorithm for FOOL and based on it implement a support for FOOL in
 the Vampire theorem prover. We illustrate the efficient use of
 FOOL for program verification by describing a concise encoding of
 next state relations of imperative programs in FOOL. We show a
 usage of features of FOOL in problems of static analysis of
 networks. We demonstrate the efficiency of automated theorem
 proving in FOOL with an extensive set of experiments. In these
 experiments we compare the performance of Vampire on a large
 collection of problems from various sources translated to FOOL and
 ordinary firstorder logic. Finally, we fix the syntax for FOOL in
 TPTP, the standard language of firstorder theorem provers.",
 paper = "Kote18.pdf",
 keywords = "printed"
+ "A methodology is described for generating provably correct
+ compilers from denotational definitions of programming
+ languages. An application is given to produce compilers into STM
+ code (an STM or state transition machine is a flowchartlike
+ program, lowlevel enough to be translated into efficient code on
+ conventional computers). First, a compiler $\phi:LAMC\rightarrow
+ STM$ from a lambda calculus dialect is defined. Any denotational
+ defintion $\Delta$ of language $L$ defines a map
+ $\over{\rightarrow}{\Delta}:L\rightarrow LAMC$, so
+ $\over{\rightarrow}{\Delta}\circ \phi$ compiles $L$ into STM
+ code. Correctness follows from the correctness of $\phi$.
+
+ The algebraic framework of Morris, ADJ, etc. is used. The set of
+ STMs is given an algebraic structure so any
+ $\over{\rightarrow}{\Delta} \circ \phi$ may be specified by giving
+ a derived operator on STM for each syntax rule of $L$.
+
+ This approach yields quite redundant object programs, so the paper
+ ends by describing two flow analytic optimization methods. The
+ first analyzes an alreadyproduced STM to obtain information about
+ its runtime behaviour which is used to optimize the STM. The
+ second analyzer the generated compiling scheme to determine
+ runtime properties of object programs in general which a compiler
+ can use to produce less redundant STMs.",
+ paper = "Jone80.pdf"
}
\end{chunk}
\index{Dewar, Michael}
+\index{Raskovsky, Martin}
+\index{Collier, Phil}
\begin{chunk}{axiom.bib}
@inproceedings{Dewa92,
 author = "Dewar, Michael",
 title = {{Using Computer Algebra to Select Numerical Algorithms}},
 booktitle = "ISSAC '92",
 publisher = "ACM",
 year = "1992",
 pages = "18",
 isbn = "0897914899",
+@article{Rask80,
+ author = "Raskovsky, Martin and Collier, Phil",
+ title = {{From Standard to Implementation Denotational Semantics}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "94139",
+ year = "1980",
abstract =
 "Many reallife problems require a combination of both symbolic
 and numerical methods for their solution. This has led to the
 development of integrated, interactive symbolic / numeric packages
 which use a computer algebra system for the former and a standard
 subroutine library for the later. These systems may also be viewed
 as simplified frontends to the numerical library. To use these
 packages, however, a user must be able to select which of the many
 available routines is the most appropriate for his or her problem,
 which contrasts with the 'blackbox' style interfaces available in
 computer algebra systems. This paper describes how a computer
 algebra system can be used to make this decision, thus providing a
 muchsimplified and orthogonal interface.",
 paper = "Dewa92.pdf"
+ "We are developing a compiler compiler. It takes as input the
+ formal definition of a programming language in Denotational
+ Semantics and produces as output a fairly efficient compiler
+ written in a system programming language which in turn will
+ produce code for a real machine. This work mainly deals with the
+ code generation parts.",
+ paper = "Rask80.pdf"
}
\end{chunk}
\index{Dupee, Brian J.}
\index{Davenport, James H.}
+\index{Gaudel, M.C.}
\begin{chunk}{axiom.bib}
@article{Dupe96,
 author = "Dupee, Brian J. and Davenport, James H.",
 title = {{An Intelligent Interface to Numerical Routines}},
+@article{Gaud80,
+ author = "Gaudel, M.C.",
+ title = {{Specification of Compilers as Abstract Data Type
+ Representations}},
journal = "LNCS",
 number = "1128",
 pages = "252262",
 year = "1996",
+ volume = "54",
+ pages = "140164",
+ year = "1980",
abstract =
 "Links from Computer Algebra Systems to Numerical Libraries have
 been increasingly made available. However, they remain, like the
 numerical routines which comprise the libraries, difficult to use
 by a novice and there is little help in choosing the appropriate
 routine for any given problem, should there be a choice.

 Computer Algebra Systems use generic names for each problem
 area. For example, 'integrate' (or 'int') is used for integration
 of a function, whatever method the code may use. Numeric
 interfaces still use different names for each method together with
 a variety of extra parameters, some of which may be
 optional. Ideally, we should extend the generic name structure to
 cover numerical routines. This would then, necessarily, require
 algorithms for making an assessment of the efficacy of different
 methods where such a choice exists.

 This paper considers the link to the NAG Fortran Library from
 version 2.0 of Axiom and shows how we can build on this to extend
 and simplify the interface using an expert system for choosing and
 using the numerical routines.",
 paper = "Dupe96.pdf",
 keywords = "printed"
+ "This paper presents a method for specifying and proving
+ compilers. This method is based on the algebraic data types
+ ideas. The main points are:
+ \begin{itemize}
+ \item to each language is associated an algebraic abstract data type
+ \item the semantic value of a program is given as a term of this
+ data type
+ \item the translation of the semantic values of source programs
+ into semantic values of target programs is specified and proved as
+ the representation of an algebrayc data type by another one.
+ \end{itemize}
+ A compiler generator, PERLUETTE, which accepts such specifications
+ as input is described. The proof technic is discussed.",
+ paper = "Gaud80.pdf"
}
\end{chunk}
\index{Davenport, James H.}
\index{Dewar, Michael C.}
\index{Richardson, Michael G.}
+\index{Thatcher, James W.}
+\index{Wagner, Eric G.}
+\index{Wright, Jesse B.}
\begin{chunk}{axiom.bib}
@inbook{Dave92f,
 author = "Davenport, James H. and Dewar, Michael C. and
 Richardson, Michael G.",
 title = {{Symbolic and Numeric Computation: the Example of IRENA}},
 booktitle = "Symbolic and Numerical Computation for Artificial Intelligence",
 pages = "347362",
 year = "1992",
 publisher = "Academic Press",

 abstract =
 "Historically symbolic and numeric computation have pursued
 different lines of evolution, have been written in different
 languages and generally seen to be competitive rather than
 complementary techniques. Even when both were used to solve a
 problem ad hoc methods were used to transfer the data between
 them.

 We first discuss the reasons for this dichotomy, and then present
 IRENA, a system being developed by the authors to present an
 integrated environment with all the facilities of Reduce combined
 with the functionality of the NAG FORTRAN library.

 Not only does IRENA allow the Reduce user to make calls to the NAG
 Library interactively, it also converts a natural input
 representation to the required unnatural FORTRAN one and
 viceversa on output, which results in a much more intuitive
 interface. Many parameters have default values and so need not be
 supplied by th user.",
 paper = "Dave92f.pdf"
}

\end{chunk}

\index{Dewar, Mike}
\index{Carlisle, David}
\begin{chunk}{axiom.bib}
@inproceedings{Dewa01,
 author = "Dewar, Mike and Carlisle, David",
 title = {{Mathematical Software: The Next Generation?}}
 booktitle = "Int. Workshop on Mathematical Knowledge Management",
 link = "\url{https://www.emis.de/proceedings/MKM2001/printed/dewar.pdf}",
 publisher = "RISC",
 year = "2001",
 paper = "Dewa01.pdf",
 keywords = "axiomref"
+@article{That80,
+ author = "Thatcher, James W. and Wagner, Eric G. and Wright, Jesse B.",
+ title = {{More on Advice on Structuring Compilers and Proving Them
+ Correct}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "165188",
+ year = "1980",
+ paper = "That80.pdf"
}
\end{chunk}
\index{Bodnar, Gabor}
\index{Kaltenbacher, Barbara}
\index{Pau, Petru}
\index{Schicho, Josef}
+\index{Madsen, Ole Lehrmann}
\begin{chunk}{axiom.bib}
@article{Bodn01,
 author = "Bodnar, Gabor and Kaltenbacher, Barbara and Pau, Petru and
 Schicho, Josef",
 title = {{Exact Real Computation in Computer Algebra}},
+@article{Mads80,
+ author = "Madsen, Ole Lehrmann",
+ title = {{On Defining Semantics by means of Extended Attribute Grammars}},
journal = "LNCS",
 volume = "2630",
 pages = "279292",
 year = "2001",
 abstract =
 "Exact real computation allows many of the advantages of numerical
 computation (e.g. high performance) to be accessed also in
 symbolic computation, providing validated results. In this paper
 we present our approach to build a transparent and easy to use
 connection between the two worlds, using this paradigm. The main
 discussed topics are representation of exact real objects,
 operations on exact real matrices, polynomial greatest common
 divisor and root computation. Some of these problems are
 illposed; we use regularization methods to solve them.",
 paper = "Bodn01.pdf"
+ volume = "54",
+ pages = "259299",
+ year = "1980",
+ paper = "Mads80.pdf"
}
\end{chunk}
\index{Barthe, G.}
\index{Elbers, H.}
+\index{Jones, Neil D.}
+\index{Madsen, Michael}
\begin{chunk}{axiom.bib}
@misc{Bart96,
 author = "Barthe, G. and Elbers, H.",
 title = {{Towards Lean Proof Checking}},
 year = "1996",
+@article{Jone80a,
+ author = "Jones, Neil D. and Madsen, Michael",
+ title = {{AttributeInfluenced LR Parsing}},
+ journal = "LNCS",
+ volume = "54",
+ pages = "393407",
+ year = "1980",
abstract =
 "Logical formal systems are inefficient at computations. In order
 to increase their efficiency, we aim to extend these systems with
 computational power. In this paper, we suggest a general, powerful
 syntax, called oracle types, to extend type theories with
 computational power; the resulting systems, which combine the
 logical abilities of logical formal systems and the computational
 power of term rewriting systems, provide a suitable environment
 for theorem proving. As a practical application, we present an
 extension of the theorem prover Lego with oracle types and
 illustrate the use of this new system in performing algebraic
 computations. Our implementation of oracle types is very flexible
 and allows rewriting to be performed either inside Lego or by
 Reduce, an efficient symbolic computation system. In our view, the
 main novelty of our approach is to combine a sound theoretical
 foundation with an efficient implementation. Besides, our work
 provides the first attempt to combine symbolic computation systems
 with theorem provers such as Coq and Lego, which are based on
 intensional type theories.",
 paper = "Bart96.pdf",
 keywords = "printed"
+ "Methods are described which make it possible, when given an
+ arbitrary attribute grammar (or AG),
+ \begin{enumerate}
+ \item to analyze the AG to determine which of its attributes may
+ be computed during LR parsing,
+ \item to augment the parser with instructions and data structures
+ to compute many attributes during parsing,
+ \item to use attribute values to assist the parsing process
+ (e.g. to use symbol table information to decide whether P(X) is an
+ array element or a function call).
+ \end{enumerate}",
+ paper = "Jone80a.pdf"
}
\end{chunk}
\index{Bostan, Alin}
\index{Schost, Eric}
+\index{Bernstein, Daniel J.}
+\index{Yang, BoYin}
\begin{chunk}{axiom.bib}
@article{Bost13,
 author = "Bostan, Alin and Schost, Eric",
 title = {{A Simple and Fast Algorithm for Computing Exponentials
 of Power Series}},
 journal = "Information Processing Letters",
 volume = "13",
 pages = "754756",
 year = "2013",
 abstract =
 "As was initially shown by Brent, exponentials of truncated power
 series can be computed using a constant number of polynomial
 multiplications. This note gives a relatively simple algorithm
 with a low constant factor",
 paper = "Bost13.pdf"
+@misc{Bern19,
+ author = "Bernstein, Daniel J. and Yang, BoYin",
+ title = {{Fast ConstantTime GCD and Modular Inversion}},
+ year = "2019",
+ link = "\url{https://gcd.crypto.to/safegcd20190413.pdf}",
+ abstract =
+ "This paper introduces streamlined constanttime variants of
+ Euclid's algorithm, both for polynomial inputs and for integer
+ inputs. As concrete applications, this paper saves time in (1)
+ modular inversion for Curve25519, which was previously believed to
+ be handled much more efficiently by Fermat's method, and (2) key
+ generation for the ntruhrss701 and sntrup4591761 latticebased
+ cryptosystems.",
+ paper = "Bern19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Piskac, Ruzica}
+\index{Necula, George Ciprian}
\begin{chunk}{axiom.bib}
@inproceedings{Pisk15,
 author = "Piskac, Ruzica",
 title = {{Frome Decision Procedures to Synthesis Procedures}},
 booktitle = "Symp. on Symbolic and Numeric Algorithms for
 Scientific Computing",
+@phdthesis{Necu98,
+ author = "Necula, George Ciprian",
+ title = {{Compiling with Proofs}},
+ school = "Carnegie Mellon University",
+ year = "1998",
+ link = "\url{https://www.cs.cmu.edu/~rwh/theses/necula.pdf}",
+ abstract =
+ "One of the major challenges of building software systems is to
+ ensure that the various components fit together in a welldefined
+ manner. This problem is exacerbated by the recent advent of
+ software components whose origin is unknown or inherently
+ untrusted, such as mobile code or user extensions for operating
+ system kernels or database servers. Such extensions are useful for
+ implementing an efficient interaction model between a client and a
+ server because several data exchanges between them can be saved at
+ the cost of a single code exchange.
+
+ In this dissertation, I propose to tackle such system integrity
+ and security problems with techniques from mathematical logic and
+ programming language semantics. I propose a framework, called
+ {\sl proofcarrying code}, in which the extension provider sends
+ along with the extension code a representation of a formal proof
+ that the code meets certain safety and correctness
+ requirements. Then, the code receiver can ensure the safety of
+ executing the extension by validating the attached proof. The
+ major advantages of proofcarrying code are that it requires a
+ simple trusted infrastructure and that it does not impose runtime
+ penalties for the purpose of ensuring safety.
+
+ In addition to the concept of proofcarrying code, this
+ dissertation contributes the idea of certifying compilation. A
+ {\sl certifying compiler} emits, in addition to optimized target
+ code, function specifications and loop invariants that enable a
+ theoremproving agent to prove nontrivial properties of the
+ target code, such as type safety. Such a certifying compiler,
+ along with a proofgenerating theorem prover, is not only a
+ convenient producer of proofcarrying code but also a powerful
+ softwareengineering tool. The certifier also acts as an effective
+ referee for the correctness of each compilation, thus simplifying
+ considerably compiler testing and maintenance.
+
+ A complete system for proofcarrying code must also contain a
+ {\sl proofgenerating theorem prover} for the purpose of producing
+ the attached proofs of safety. This dissertation shows how
+ standard decision procedures can be adapted so that they can
+ produce detailed proofs of the proved predicates and also how
+ these proofs can be encoded compactly and checked
+ efficiently. Just like for the certifying compiler, a
+ proofgenerating theorem prover has significant software
+ engineering advantages over a traditional prover. In this case, a
+ simple proof checker can ensure the soundness of each successful
+ proving task and indirectly assist in testing and maintenance of
+ the theorem prover.",
+ paper = "Necu98.pdf"
+}
+
+\end{chunk}
+
+\index{Cramer, Marcos}
+\index{Koepke, Peter}
+\index{Schroder, Bernhard}
+\begin{chunk}{axiom.bib}
+@article{Cram11,
+ author = "Cramer, Marcos and Koepke, Peter and Schroder, Bernhard",
+ title = {{Parsing and Disambiguation of Symbolic Mathematics in the
+ Naproche System}},
+ journal = "LNAI",
+ number = "6824",
+ pages = "180195",
+ year = "2011",
+ publisher = "Springer",
+ abstract =
+ "The Naproche system is a system for linguistically analysing and
+ proofchecking mathematical texts written in a controlled natural
+ language. The aim is to have an input language that is as close as
+ possible to the language that mathematicians actually use when
+ writing textbooks or papers.
+
+ Mathematical texts consist of a combination of natural language
+ and symbolic mathematics, with symbolic mathematics obeying its
+ own syntactic rules. We discuss the difficulties that a program
+ for parsing and disambiguating symbolic mathematics must face and
+ present how these difficulties have been tackled in the Naproche
+ system. One of these difficulties is the fact that information
+ provided in the preceding context  including information
+ provided in natural language  can influence the way a symbolic
+ expression has to be disambiguated.",
+ paper = "Cram11.pdf"
+}
+
+\end{chunk}
+
+\index{Altenkirch, Thorsten}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@inproceedings{Alte07,
+ author = "Altenkirch, Thorsten and McBride, Conor and Swierstra, Wouter",
+ title = {{Observational Equality, Now!}},
+ booktitle = "ACM Workshop Programming Languages meets Program
+ Verification",
publisher = "ACM",
 year = "2015",
 abstract =
 "Software synthesis is a technique for automatically generating
 code from a given specification. The goal of software synthesis is
 to make software development easier while increasing both the
 productivity of the programmer and the correctness of the produced
 code. In this paper we present an approach to synthesis that
 relies on the use of automated reasoning and decision
 procedures. First we describe how to generalize decision
 procedures into predictable and complete synthesis
 procedures. Here completeness means that the procedure is
 guaranteed to find code that satisfies the given specification. We
 illustrate the process of turning a decision procedure into a
 synthesis procedure using linear integer arithmetic as an example.

 However, writing a complete specification can be a tedious task,
 sometimes even harder than writing the code itself. To overcome
 this problem, ideally the user could provide a few inputoutput
 examples, and then the code should be automatically derived. We
 outline how to broaden usability and applications of current
 software synthesis techniques. We conclude with an outlook on
 possible future research directions and applications of synthesis
 procedures.",
 paper = "Pisk15.pdf"
}

\end{chunk}

\index{Havas, George}
\index{Majewski, Bohdan}
\index{Matthews, K.R.}
\begin{chunk}{axiom.bib}
@techreport{Hava95,
 author = "Havas, George and Majewski, Bohdan and Matthews, K.R.",
 title = {{Extended GCD Algorithms}},
 type = "technical report",
 institution = "University of Queensland",
 number = "TR0302",
 year = "1995",
 abstract =
 "Extended gcd calculation has a long history and plays an
 important role in computational number theory and linear
 algebra. Recent results have shown that finding optimal
 multipliers in extended gcd calculations is difficult. We study
 algorithms for finding good multipliers and present new algorithms
 with improved performance. We present a wellperforming algorithm
 which is based on lattice basis reduction methods and may be
 formally analyzed. We also give a relatively fast algorithm with
 moderate performance.",
 paper = "Hava95.pdf",
 keywords = "printed"
+ pages = "5768",
+ year = "2007"
}
\end{chunk}
\index{Ritt, J.F.}
+\index{Asperti, Andrea}
+\index{Ricciotti, Wilmer}
+\index{Coen, Claudio Sacerdoti}
+\index{Tassi, Enrico}
\begin{chunk}{axiom.bib}
\article{Ritt25,
 author = "Ritt, J.F.",
 title = {{Elementary Functions and their Inverses}},
 journal = "Transactions of the American Mathematical Society",
 volume = "27",
 pages = "6890",
 year = "1925",
 paper = "Ritt25.pdf"
+@article{Aspe09a,
+ author = "Asperti, Andrea and Ricciotti, Wilmer and Coer, Claudio
+ Sacerdoti and Tassi, Enrico",
+ title = {{Hints in Unification}},
+ journal = "LNCS",
+ volume = "5674",
+ pages = "8498",
+ year = "2009",
+ isbn = "9783642033582",
+ abstract =
+ "Several mechanisms such as Canonical Structures, Type Classes, or
+ Pullbacks have been recently introduced with the aim to improve the
+ power and flexibility of the type inference algorithm for interactive
+ theorem provers. We claim that all these mechanisms are particular
+ instances of a simpler and more general technique, just consisting in
+ providing suitable hints to the unification procedure underlying type
+ inference. This allows a simple, modular and not intrusive
+ implementation of all the above mentioned techniques, opening at the
+ same time innovative and unexpected perspectives on its possible
+ applications.",
+ paper = "Aspe09a.pdf"
}
\end{chunk}
\index{Risch, Robert H.}
+\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@article{Risc76,
 author = "Risch, Robert H.",
 title = {{Implicitly Elementary Integrals}},
 journal = "Proc. Amer. Math.",
 volume = "57",
+@article{Avig07,
+ author = "Avigad, Jeremy",
+ title = {{A Formally Verified Proof of the Prime Number Theorem}},
+ journal = "ACM Trans. Comput. Logic",
+ volume = "9",
number = "1",
 pages = "17",
 year = "1976",
 paper = "Risc76.pdf",
 keywords = "printed"
+ pages = "2",
+ year = "2007"
}
\end{chunk}
\index{Risch, Robert H.}
+\index{Barendregt, Hendrik Pieter}
\begin{chunk}{axiom.bib}
@article{Risc79,
 author = "Risch, Robert H.",
 title = {{Algebraic Properties of the Elementary Functions of Analysis}},
 journal = "American Journal of Mathematics",
 volume = "101",
 number = "4",
 pages = "743759",
 abstract =
 "The elementary functions of a complex variable $z$ are those
 functions built up from the rational functions of $z$ by
 exponentiation, taking logarithms, and algebraic operations. The
 purpose of this paper is first, to prove a 'structure theorem'
 which shows that if an algebraic relation holds among a set of
 elementary functions, then they must satisfy an algebraic relation
 of a special kind. Then we make four applications of this theorem,
 obtaining both new and old results which are described here
 briefly (and imprecisely).
 \begin{enumerate}
 \item An algorithm is given for telling when two elementary
 expressions define the same function.
 \item A characterization is derived of those ordinary differential
 equations having elementary solutions
 \item The four basic functions of elementary calculus  exp, log,
 tan, tan$^{1},  are shown to be 'irredundant'
 \item A characterization is given of elementary functions
 possessing elementary inverses.",
 paper = "Risc79.pdf",
+@article{Bare91,
+ author = "Barendregt, Hendrik Pieter",
+ title = {{An Introduction to Generalized Type Systems}},
+ journal = "Journal of Functional Programming",
+ volume = "1",
+ number = "2",
+ year = "1991",
+ pages = "125154",
+ abstract =
+ "Programming languages often come with type systems. Some of these are
+ simple, others are sophisticated. As a stylistic representation of
+ types in programming languages several versions of typed lambda
+ calculus are studied. During the last 20 years many of these systems
+ have appeared, so there is some need of classification. Working
+ towards a taxonomy, Barendregt (1991) gives a finestructure of the
+ theory of constructions (Coquand and Huet 1988) in the form of a
+ canonical cube of eight type systems ordered by inclusion. Berardi
+ (1988) and Terlouw (1988) have independently generalized the method of
+ constructing systems in the λcube. Moreover, Berardi (1988, 1990)
+ showed that the generalized type systems are flexible enough to
+ describe many logical systems. In that way the wellknown
+ propositionsastypes interpretation obtains a nice canonical form.",
+ paper = "Bare91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Moses, Joel}
+\index{Bertot, Yves}
+\index{Cast\'eran, Pierre}
\begin{chunk}{axiom.bib}
@article{Mose72,
 author = "Moses, Joel",
 title = {{Toward a General Theory of Special Functions}},
 journal = "Communications of the ACM",
 volume = "15",
 number = "7",
 pages = "550554",
 year = "1972",
 abstract =
 "A list of a number of natural developments for the field of
 algebraic manipulation is given. Then the prospects for a general
 theory of functions defined by ordinary differential equations are
 discussed. The claim is made that recent developments in
 mathematics indicate that it should be possible to algorithmically
 generate many properties of solutions to differential
 equations. Such a theory is preferable to a less general effort to
 make algebraic manipulation systems knowledgeable about the usual
 special functions (e.g. exponential, hypergeometric).",
 paper = "Mose72.pdf",
 keywords = "printed"
+@book{Bert04,
+ author = {Bertot, Yves Cast\'eran, Pierre},
+ title = {{Interactive Theorem Proving and Program Development}},
+ publisher = "Springer",
+ year = "2004",
+ isbn = "3540208542",
+ abstract = "
+ Coq is an interactive proof assistant for the development of
+ mathematical theories and formally certified software. It is based on
+ a theory called the calculus of inductive constructions, a variant of
+ type theory.
+
+ This book provides a pragmatic introduction to the development of
+ proofs and certified programs using Coq. With its large collection of
+ examples and exercies it is an invaluable tool for researchers,
+ students, and engineers interested in formal methods and the
+ development of zerofault software."
}
\end{chunk}
\index{Muller, JeanMichel}
+\index{Bertot, Yves}
+\index{Gonthier, Georges}
+\index{Biha, Sidi Ould}
+\index{Pasca, Ioana}
\begin{chunk}{axiom.bib}
@book{Mull16,
 author = "Muller, JeanMichel",
 title = {{Elementary Functions: Algorithms and Implementation}},
 isbn = "9781489979810",
 publisher = "Birkhauser",
 year = "2016",
 paper = "Mull16.pdf"
+@inproceedings{Bert08,
+ author = "Bertot, Yves and Gonthier, Georges and Biha, Sidi Ould and
+ Pasca, Ioana",
+ title = {{Canonical Big Operators}},
+ booktitle = "Theorem Proving in Higher Order Logics",
+ publisher = "Springer",
+ pages = "86101",
+ year = "2008"
}
\end{chunk}
\index{Fitt, A.D.}
\index{Hoare, G.T.Q}
+\index{Blanqui, Frederic}
+\index{jouannaud, JeanPierre}
+\index{Okada, Mitsuhiro}
\begin{chunk}{axiom.bib}
@article{Fitt93,
 author = "Fitt, A.D. and Hoare, G.T.Q",
 title = {{The ClosedForm Integration of Arbitrary Functions}},
 journal = "The Mathematical Gazette",
 volume = "77",
 number = "479",
 pages = "227236",
 year = "1993"
 paper = "Fitt93.pdf",
+@inproceedings{Blan99,
+ author = "Blanqui, Frederic and jouannaud, JeanPierre and Okada, Mitsuhiro",
+ title = {{The Calculus of Algebraic Constructions}},
+ booktitle = "Rewriting Techniques and Applications RTA99",
+ year = "1999",
+ publisher = "LNCS 1631",
+ link = "\url{https://hal.inria.fr/inria00105545v1/document}",
+ abstract =
+ "This paper is concerned with the foundations of the Calculus of
+ Algebraic Constructions (CAC), an extension of the Calculus of
+ Constructions by inductive data types. CAC generalizes inductive
+ types equipped with higherorder primitive recursion, by providing
+ definition s of functions by patternmatching which capture recursor
+ definitions for arbitrary nondependent and nonpolymorphic inductive
+ types satisfying a strictly positivity condition. CAC also
+ generalizes the firstorder framework of abstract data types by
+ providing dependent types and higherorder rewrite rules.",
+ paper = "Blan99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Schorre, D.V.}
+\index{Church, Alonzo}
\begin{chunk}{axiom.bib}
@inproceedings{Scho64,
 author = "Schorre, D.V.",
 title = {{META II: A SyntaxOriented Compiler Writing Language}},
 booktitle = "19th National Conference of the ACM",
 publisher = "ACM",
 year = "1964",
 abstract =
 "META II is a compiler writing language which consists of syntax
 equations resembling Backus normal form and into which
 instructions to output assembly language commands are
 inserted. Compilers have been written in this language for VALGOL
 I and VALGOL II. The former is a simple algebraic language
 designed for the purpose of illustrating META II The latter
 contains a fairly arge subset of ALGOL 60.

 The method of writing compilers which is given in detail in the
 paper may be explained briefly as follows. Each synta equation is
 translated into a recursive subroutine which tests the input
 string for a particular phrase structure, and deletes it if
 found. Backup is avoided by the extensive use of factoring in the
 syntax equations. For each source language, an interpreter is
 written and programs are compiled into that interpretive language.

 META II is not intended as a standard language which everyone will
 use to write compilers. Rather, it is an example of a simple
 working language which can give one a good start in designing a
 compilerwriting compiler suited to his own needs. Indeed, the
 META II compiler is written in its own language, thus lending
 itself to modification.",
 paper = "Scho64.pdf",
+@article{Chur40,
+ author = "Church, Alonzo",
+ title = {{A Formulation of the Simple Theory of Types}},
+ journal = "J. of Symbolic Logic",
+ volume = "5",
+ number = "2",
+ year = "1940",
+ pages = "5668",
+ abstract =
+ "The purpose of the present paper is to give a formulation of the
+ simple theory of types which incorporates certain features of the
+ calculus of $\lambda$conversion. A complete incorporation of the
+ calculus of $\lambda$conversion into the theory of types is
+ impossible if we require that $\lambda x$ and juxtaposition shall
+ retain their respective meanings as an abstraction operator and as
+ denoting the application of function to argument. But the present
+ partial incorporation has certain advantages from the point of view of
+ type theory and is offered as being of interest on this basis
+ (whatever may be thought of the finally satisfactory character of the
+ theory of types as a foundation for logic and mathematics).",
+ paper = "Chur40.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jenks, Richard D.}
+\index{Ciolli, Gianni}
+\index{Gentili, Graziano}
+\index{Maggesi, Marco}
\begin{chunk}{axiom.bib}
@techreport{Jenk70,
 author = "Jenks, Richard D.",
 title = {{META/LISP: An interactive translator writing system}},
 type = "research report",
 number = "RC2968",
 year = "1970",
 institution = "IBM Research",
 abstract =
 "META/LISP is a general purpose translator writing system for IBM
 System/360 currently running on TSS, CP/CMS, and OS/360. The input
 to the system is a source program which simultaneously describes
 1) the syntax of some input data to be translated and
 2) algorithms which operate on the input data and a pushdown stack
 to accomplish the desired translation; the output of the system is
 a compiled program for translating that input data. In particular
 when the input data are statements of a higherlevel language to
 be translated into assembly language, META/LISP serves as a
 compilercompiler. META/LISP uses the topdown syntaxdirected
 approach which makes the system extremely attractive for the
 design and implementation of experimental languages; using
 META/LISP such compilers are easy to write, easy to check out, and
  most importantly  easy to modify interactively. The appendices
 which follow a rather complete description of the system including
 a selfdescription of the META/LISP compiler.",
 paper = "Jenk70.pdf",
 keywords = "axiomref, printed"
}

\end{chunk}

\index{Havas, George}
\index{Majewski, Bohdan S.}
\index{Matthews, Keith R.}
\begin{chunk}{axiom.bib}
@article{Hava98,
 author = "Havas, George and Majewski, Bohdan S. and Matthews, Keith R.",
 title = {{Extended GCD and Hermite Normal Form Algorithms via
 Lattice Basis Reduction}},
 journal = "Experimental Mathematics",
 volume = "7",
 number = "2",
 pages = "125136",
 year = "1998",
 abstract =
 "Extended gcd calculation has a long history and plays an
 important role in computational number theory and linear
 algebra. Recent results have shown that finding optimal
 multipliers in extended gcd calculations is difficult. We present
 an algorithm which uses lattice basis reduction to produce small
 integer multipliers $x_1,\ldots,x_m$ for the equation
 $s=gcd(x_1,\ldots,x_m)x_1s_1+\ldots+x_ms_m$ where
 $s_1,\ldots,s_m$ are given integers. The method generalises to
 produce small unimodular transformation matrices for computing the
 hermite normal form of an integer matrix.",
 paper = "Hava98.pdf"
+@article{Ciol11,
+ author = "Ciolli, Gianni and Gentili, Graziano and Maggesi, Marco",
+ title = {{A Certified Proof of the Cartan Fixed Point Theorem}},
+ journal = "J. Autom. Reasoning",
+ volume = "47",
+ number = "3",
+ pages = "319336",
+ year = "2011"
}
\end{chunk}
\index{Havas, George}
\index{Majewski, Bohdan}
+\index{Constable, R.L.}
+\index{Allen, S.F.}
+\index{Bromley, H.M.}
+\index{Cremer, J.F.}
+\index{Harper, R.W.}
+\index{Howe, D.J.}
+\index{Knoblock, T.B.}
+\index{Mendler, N.P.}
+\index{Panagaden, P.}
+\index{Tsaaki, J.T.}
+\index{Smith, S.F.}
\begin{chunk}{axiom.bib}
@techreport{Hava97,
 author = "Havas, George and Majewski, Bohdan",
 title = {{Extended GCD Algorithms}},
 type = "technical report",
 institution = "University of Queensland",
 number = "TR0325",
 year = "1997",
 abstract =
 "Given an integer vector of $n$ positive number numbers
 $a=\vert a_i \vert^n_{i=1}$ the extended gcd problem asks for an
 integer vector $x$ of length $n$ such that
 \[xa^T=\sum_{i=1}^n x_ia_i = gcd(a_1,a_2,\ldots,a_n)\]

 For many applications it is vital that some measure of $x$,
 $\norm{x}$ is small. We have proved, however, that if we choose
 either the max norm or the zero matric the question of finding
 $x$ such that $\norm{x}$ is smaller than some positive constant
 $K$ is NPcomplete. We conjecture that the questions remains
 NPcomplete for other norms.

 In the light of these results we hae proposed two approximation
 algorithms. Their respective complexities are
 $O(n^2 log(max_i\{a_i\}))$ and $O(n^4 log(max_i\{a_i\}))$.
 Theoretical analysis of the algorithms leads
 to unsatisfactory bounds on the quality of the solution. Thus here
 we undertake a practical study of the methods, where their
 performance is matched against optimal solutions.",
 paper = "Hava97.pdf"
}

\end{chunk}

\index{Cheng, Eugenia}
\begin{chunk}{axiom.bib}
@misc{Chen04,
 author = "Cheng, Eugenia",
 title = {{How to write proofs: A quick guide}},
 link = "\url{http://cheng.staff.shef.ac.uk/proofguide/proofguide.pdf}",
 year = "2004",
 paper = "Chen04.pdf",
 keywords = "printed,DONE"
+@book{Cons85,
+ author = "Constable, R.L. and Allen, S.F. and Bromley, H.M. and Cremer, J.F.
+ and Harper, R.W. and Howe, D.J. and Knoblock, T.B. and
+ Mendler, N.P. and Panagaden, P. and Tsaaki, J.T. and Smith, S.F.",
+ title = {{Implementing Mathematics with The Nuprl Proof Development System}},
+ publisher = "PrenticeHall",
+ year = "1985"
}
\end{chunk}
\index{Warren, David H.D.}
\index{Pereira, Luis M.}
+\index{Coquuand, Thierry}
+\index{Huet, Gerard}
\begin{chunk}{axiom.bib}
@misc{Warr77,
 author = "Warren, David H.D. and Pereira, Luis M.",
 title = {{Prolog  The Language and its Implementation Compared
 with Lisp}},
 year = "1977",
 link =
 "\url{http://www.public.imtbstsp.eu/~gibson/Teaching/TeachingReadingMaterial/WarrenPereiraPereira77.pdf}",
 abstract =
 "Prolog is a simple but powerful programming language founded on
 symbolic logic. The basic computational mechanism is a pattern
 matching process (``unification'') operating on general record
 structures (``terms of logic''). We priefly review the language
 and compare it especially with pure Lisp. The remainder of the
 paper discusses techniques for implementing Prolog efficiently; in
 particular we describe how to compile the patterns involved in the
 matching process. These techniques are as incorporated in our
 DECsystem10 Prolog compiler (written in Prolog). The code it
 generates is comparable in speed with that produced by existing
 DEC10 Lisp compilers. We argue that pattern matching is a better
 method for expressing operations on structured data than
 conventional selectors and constructors  both for the user and
 for the implementor.",
 paper = "Warr77.pdf"
+@incollection{Coqu88,
+ author = "Coquuand, Thierry and Huet, Gerard",
+ title = {{The Calculus of Constructions}},
+ booktitle = "Information and Computation, Volume 76",
+ year = "1988",
+ publisher = "Academic Press",
+ paper = "Coqu88.pdf",
+ keywords = "printed"
}
\end{chunk}
+\end{chunk}
\index{Day, Martin V.}
+\index{Coquand, Thierry}
+\index{Paulin, Christine}
\begin{chunk}{axiom.bib}
@book{Dayx16,
 author = "Day, Martin V.",
 title = {{An Introduction to Proofs and the Mathematical Vernacular}},
 year = "2016",
 publisher = "Virginia Tech",
 link = "\url{www.math.vt.edu/people/day/ProofsBook/IPaMV.pdf}",
 paper = "Dayx16.pdf"
+@inproceedings{Coqu90,
+ author = "Coquand, Thierry and Paulin, Christine",
+ title = {{Inductively Defined Types}},
+ booktitle = "Int. Conf. on Computer Logic",
+ publisher = "Springer",
+ pages = "5066",
+ year = "1990"
}
\end{chunk}
\index{Fourer, Robert}
\index{Gay, David M.}
\index{Kernighan, Brian W.}
+\index{Cramer, Marcos}
+\index{Koepke, Peter}
+\index{Schroder, Bernhard}
\begin{chunk}{axiom.bib}
@misc{Four03,
 author = "Fourer, Robert and Gay, David M. and Kernighan, Brian W.",
 title = {{AMPL Reference}},
 link = "\url{https://ampl.com/BOOK/CHAPTERS/24refman.pdf}",
 year = "2003",
 paper = "Four03.pdf"
+@article{Cram11,
+ author = "Cramer, Marcos and Koepke, Peter and Schroder, Bernhard",
+ title = {{Parsing and Disambiguation of Symbolic Mathematics in the
+ Naproche System}},
+ journal = "LNAI",
+ number = "6824",
+ pages = "180195",
+ year = "2011",
+ publisher = "Springer",
+ abstract =
+ "The Naproche system is a system for linguistically analysing and
+ proofchecking mathematical texts written in a controlled natural
+ language. The aim is to have an input language that is as close as
+ possible to the language that mathematicians actually use when
+ writing textbooks or papers.
+
+ Mathematical texts consist of a combination of natural language
+ and symbolic mathematics, with symbolic mathematics obeying its
+ own syntactic rules. We discuss the difficulties that a program
+ for parsing and disambiguating symbolic mathematics must face and
+ present how these difficulties have been tackled in the Naproche
+ system. One of these difficulties is the fact that information
+ provided in the preceding context  including information
+ provided in natural language  can influence the way a symbolic
+ expression has to be disambiguated.",
+ paper = "Cram11.pdf"
}
\end{chunk}
\index{Hammack, Richard}
+\index{Hales, Thomas C.}
+\index{Harrison, John}
+\index{McLaughlin, Sean}
+\index{Nipkow, Tobias}
+\index{Obua, Steven}
+\index{Zumkeller, Roland}
\begin{chunk}{axiom.bib}
@book{Hamm18,
 author = "Hammack, Richard",
 title = {{Book of Proof}},
 publisher = "Hammack, Richard",
 year = "2018",
 paper = "Hamm18.pdf"
+@article{Hale10,
+ author = "Hales, Thomas C. and Harrison, John and McLaughlin, Sean
+ and Nipkow, Tobias and Obua, Steven and Zumkeller,
+ Roland",
+ title = {{A Revision of the Proof of the Kepler Conjecture}},
+ jounal = "44",
+ volume = "1",
+ pages = "134",
+ year = "2010"
}
\end{chunk}
\index{Armstrong, J.L.}
\index{Birding, S.R.}
\index{Williams, M.C.}
+\index{Harrison, John}
\begin{chunk}{axiom.bib}
@inbook{Arms92,
 author = "Armstrong, J.L. and Birding, S.R. and Williams, M.C.",
 title = {{Use of Prolog for Developing a New Programming Language}},
 booktitle = "The Practical Application of Prolog",
 year = "1992",
 publisher = "Institute of Electrical Engineers, London",
 abstract =
 "This paper describes how Prolog was used for the development of a
 new concurrent realtime symbolic programming language called
 Erlang.

 Erlang was developed by first building a prototype in Prolog 
 the prototype was used by a user group to test their reactions to
 the language. As time passed many features were added (and
 removed) from the interpreter and eventually the language reached
 a level of maturity where it was decided to try it out on a
 significant problem.

 About 3 years and some 20,000 lines of Erlang later, performance
 became an issue  we wrote Prolog cross compilers from Erlang to
 various concurrent logic programming languages followed by a
 direct implementation of Erlang itself. The direct implementation
 of Erlang was loosely based on the WAM and made by writing a
 Prolog compiler from Erlang to a new abstractmachine and an
 emulator for the abstract machine in 'C'. The instruction set for
 the abstract machine was first prototyped in Prolog  finally the
 compiler was rewritten in Erlang, thus totally removing any
 dependency on Prolog.

 This paper describes some of the key events which lay between the
 simple prototype and the current version of the language.",
 paper = "Arms92.pdf",
 keywords = "printed"
+@inproceedings{Harr96a,
+ author = "Harrison, John",
+ title = {{HOL Light: A Tutorial Introduction}},
+ booktitle = "First Int. Conf. on Formal Methods in ComputerAided Design",
+ publisher = unknownn",
+ pages = "265269",
+ year = "1996"
}
\end{chunk}
\index{Pirog, Maciej}
\index{Gibbons, Jeremy}
+\index{Harrison, John}
\begin{chunk}{axiom.bib}
@misc{Piroxx,
 author = "Pirog, Maciej and Gibbons, Jeremy",
 title = {{Extended Abstract: A Functional Derivation of the Warren
 Abstract Machine}},
 link = "\url{http://www.cs.ox.ac.uk/jeremy.gibbons/publications/wam.pdf}",
 year = "unknown",
 abstract =
 "Based on Danvy et al.'s functional correspondence, we give a
 further example of gradual refinement of an interpreter into a
 known, lowlevel abstract machine underlying realworld compilers,
 by deriving an abstract model of the Warren Abstract Machine from
 a simple resolutionbased Prolog interpreter. We show that other
 wellknown functional programming techniques (namely, explicit
 laziness and semipersistent data structures) can help to develop
 abstract machines without detailed examination of the semantics
 realised by the interpreter.",
 paper = "Piroxx.pdf",
 keywords = "printed"
+@article{Harr09a,
+ author = "Harrison, John",
+ title = {{A Formalized Proof of Dirichlet's Theorem on Primes in
+ Arithmetic Progression}},
+ journal = "J. Formaliz. Reason.",
+ volume = "2",
+ number = "1",
+ pages = "6383",
+ year = "2009"
}
\end{chunk}
\index{Russinoff, David M.}
+\index{Harrison, John}
\begin{chunk}{axiom.bib}
@article{Russ92,
 author = "Russinoff, David M.",
 title = {{A Verified Prolog Compiler for the Warren Abstract Machine}},
 journal = "Journal of Logic Programming",
 volume = "13",
 number = "4",
 pages = "367412",
 year = "1992",
 abstract =
 "We extend the theory of Prolog to provide a framework for the
 study of Prolog compilation technology. For this purpose, we first
 demonstrate the semantic equivalence of two Prolog interpreters: a
 conventional SLDrefutation procedure and one that employs
 Warren's ``last call'' optimization. Next, we formally define the
 Warren Abstract Machine (WAM) and its instruction set and present
 a Prolog compiler for the WAM. Finally, we prove that the WAM
 execution of a compiled Prolog program produces the same result as
 the interpretation of its source.",
 paper = "Russ92.pdf",
 keywords = "printed"
+@article{Harr09b,
+ author = "Harrison, John",
+ title = {{Formalizing an Analytic Proof of the Prime Number Theorem}},
+ journal = "J. Automated Reasoning",
+ volume = "43",
+ pages = "243261",
+ year = "2009"
}
\end{chunk}
\index{Bohrer, Brandon}
\index{Crary, Karl}
+\index{Holzl, Johannes}
+\index{Heller, Armin}
\begin{chunk}{axiom.bib}
@misc{Bohr16,
 author = "Bohrer, Brandon and Crary, Karl",
 title = {{A ProofProducing Verified Prolog Compiler}},
 year = "2016",
 link = "\url{www.cs.cmu.edu/~bbohrer/pub/twamiclp2016long.pdf}",
 abstract =
 "We have designed and implemented a verified compiler for a
 dialect of Prolog. Our compiler is verified using proofproducing
 compilatoin: every compiled program is accompanied with a formal
 proof that it is equivalent to a particular source program. Our
 formal proofs take the form of type information for our new
 verifying abstract machine which we call the TWAM, whose type
 system natively understands logic programs specified in the
 logical framework LF. We present a soundness metatheorem for the
 TWAM showing that welltyped TWAM programs are sound proofsearch
 procedures. In doing so, we reduce our trusted computing base from
 the entire compiler to the TWAM typechecker.",
 paper = "Bohr16.pdf",
 keywords = "printed"
}
+@inproceedings{Holz11,
+ author = "Holzl, Johannes and Heller, Armin",
+ title = {{Three Chapters of Measure Theory in Isabelle / HOL}},
+ booktitle = "Interactive Theorem Proving",
+ publisher = "Springer",
+ pages = "135151",
+ year = "2011"
+}
\end{chunk}
\index{Tarau, Paul}
+\index{Huet, Gerard}
+\index{Saibi, Amokrane}
\begin{chunk}{axiom.bib}
@misc{Tara16,
 author = "Tarau, Paul",
 title = {{A Hitchhiker's Guide to Reinventing a Prolog Machine}},
 year = "2016",
 abstract =
 "We take a fresh, ``cleanroom'' look at implementing Prolog by
 deriving its translation to an executable representation and its
 execution algorithm from a simple Horn Clause metainterpreter.
 The resulting design has some interesting properties:
 \begin{itemize}
 \item the heap representation of terms and the abstract machine
 instruction encodings are the same.
 \item no dedicated code area is used as the code is placed
 directly on the heap.
 \item unification and indexing operations are orthogonal
 \item filtering of matching clauses happens without building new
 structures on the heap
 \item variables in function and predicate symbol positions are
 handled with no performance penalty
 \item a simple Englishlike syntax is used as an intermediate
 representation for clauses and goals
 \item the same Englishlike syntax can be used by programmers
 directly as an alternative to classic Prolog syntax
 \item solutions of (multiple) logic engines are exposed as answer
 streams that can be combined through typical functional
 programming patterns
 \item performance of a basic interpreter implemeting our design is
 within a factor of 2 of a highly optimized WAMbased system
 \end{itemize}
+@inproceedings{Huet00,
+ author = "Huet, Gerard and Saibi, Amokrane",
+ title = {{Constructive Category Theory}},
+ booktitle = "Proof, Language, and Interaction: Essays in Honour of
+ Robin Milner",
+ publisher = "MIT Press",
+ pages = "235275",
+ year = "2000"
+}
 To help placing our design on the fairly rich map of Prolog
 systems, we discuss similarities to existing Prolog abstract
 machines, with emphasis on separating necessary commonalities from
 arbitrary implementation choices.",
 paper = "Tara16.pdf"
+\end{chunk}
+
+\index{Kornilowicz, Artur}
+\begin{chunk}{axiom.bib}
+@article{Korn07,
+ author = "Kornilowicz, Artur",
+ title = {{A Proof of the Jordan Curve Theorem via the Brouwer Fixed
+ Point Theorem}},
+ journal = "Mechanized Mathematics and Its Applications",
+ volme = "6",
+ number = "1",
+ pages = "3340",
+ year = "2007"
}
\end{chunk}
\index{Felleisen, Matthias}
+\index{MartinL\"of, P.}
\begin{chunk}{axiom.bib}
@techreport{Fell85,
 author = "Felleisen, Matthias",
 title = {{Transliterating Prolog into Scheme}},
 type = "technical report",
 number = "182",
 institution = "University of Indiana",
 year = "1985",
 paper = "Fell85.pdf",
 keywords = "printed"
+@inproceedings{Mart73,
+ author = "MartinL\"of, P.",
+ title = {{An Intuitionistic Theory of Types: Predicative Part}},
+ booktitle = "Logic Colloqium '73",
+ publisher = "NorthHolland",
+ year = "1973"
}
\end{chunk}
\index{Wang, Ke}
+\index{Mhamdi, Tarek}
+\index{Hasan, Osman}
+\index{Tahar, Sofiene}
\begin{chunk}{axiom.bib}
@misc{Wang19,
 author = "Wang, Ke",
 title = {{Learning Scalable and Precise Representation of Program
 Semantics}},
 year = "2019",
 abstract =
 "Neural program embedding has shown potential in aiding the
 analysis of largescale, complicated software. Newly proposed deep
 neural architectures pride themselves on learning program
 semantics rather than superficial syntactic features. However, by
 considering the source code only, the vast majority of neural
 networks do not capture a deep, precise representation of program
 semantics. In this paper, we present DYPRO, a novel deep neural
 network that learns from program execution traces. Compared to the
 prior dynamic models, not only is DYPRO capable of generalizing
 across multiple executions for learning a program's dynamic
 semantics in its entirety, but DYPRO is also more efficient when
 dealing with programs yielding long execution traces. For
 evaluation, we task DYPRO with semantic classification
 (i.e. categorizing programs based on their semantics) and compared
 it against two prominent static models: Gated Graph Neural Network
 and TreeLSTM. We find that DYPRO achieves the highest prediction
 accuracy among all models. To further reeal the capacity of all
 aforementioned deep neural architectures, we examine if the models
 can learn to detect deeper semantic properties of a program. In
 particular given a task of recognizing loop invariants, we show
 DYPRO beats all static models by a wide margin.",
 paper = "Wang19.pdf",
 keywords = "printed"
+@inproceedings{Mham11,
+ author = "Mhamdi, Tarek and Hasan, Osman and Tahar, Sofiene",
+ title = {{Formalization of Entropy Measure in HOL}},
+ booktitle = "Interactve Theorem Proving",
+ publisher = "Springer",
+ pages = "233248",
+ year = "2011"
}
\end{chunk}
\index{Muller, Dennis}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
+\index{Nathanson, Melvyn B.}
\begin{chunk}{axiom.bib}
@article{Mull18,
 author = "Muller, Dennis and Kohlhase, Michael and Rabe, Florian",
 titlle = {{Automatically Finding Theory Morphisms for Knowledge
 Management}},
 journal = "LNCS",
 volume = "11006",
 year = "2018",
 abstract =
 "We present a method for finding morphisms between formal
 theories, both within as well as across libraries based on
 different logical foundations. As they induce new theorems in the
 target theory for any of the source theory, theory morphisms are
 highvalue elements of a modular formaly library. Usually, theory
 morphisms are manually encoded, but this practice requires authors
 who are familiar with source and target theories at the same time,
 which limits the scalability of the manual approach.

 To remedy this problem, we have developed a morphism finder
 algorithm that automates theory morphism discovery. In this paper we
 present an implementation in the MMT system and show specific use
 cases. We fous on an application of theory discovery, where a user
 can check whether a (part of a) formal theory already exists in
 some library, potentially avoiding duplication of work or
 suggesting an opportunity for refactoring.",
 paper = "Mull18.pdf"
}

\end{chunk}

\index{Cohl, Howard S.}
\index{GreinerPetter, Andre}
\index{Schubotz, Moritz}
\begin{chunk}{axiom.bib}
@article{Cohl18,
 author = "Cohl, Howard S. and GreinerPetter, Andre and Schubotz, Moritz",
 titlle = {{Automated Symbolic and Numerical Testing of DLMF Formulae
 Using Computer Algebra Systems}},
 journal = "LNCS",
 volume = "11006",
 year = "2018",
 abstract =
 "We have developed an automated procedure for symbolic and
 numerical testing of formulae extracted from the National
 Institute of Standards and Technology (NIST) Digital Library of
 Mathematical Functions (DLMF). For the NIST Digital Repository of
 Mathematical Formulae, we have developed conversion tools from
 semantic Latex to Computer Algebra System (CAS) MAPLE which relies
 on Youssef's partofmath tagger. We convert a test data subset of
 4,078 semantics Latex DLMF formulae extracted from the DLMF to the
 native CAS representation and then apply an automated scheme for
 symbolic and numerical testing and verification. Our framework is
 implemented using Java and MAPLE. We describe in detail the
 conversion process which is required so that the CAS is able to
 correctly interpret the mathematical representation of the
 formulae. We describe the improvement of the effectiveness of our
 automated scheme through incremental enhancements (making more
 precise) of the mathematical semantics markup of the formulae.",
 paper = "Cohl18.pdf"
}

\end{chunk}

\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Sharoda, Yasmine}
\begin{chunk}{axiom.bib}
@article{Care18,
 author = "Carette, Jacques and Farmer, William M. and Sharoda, Yasmine",
 titlle = {{Biform Theories: Project Description}},
 journal = "LNCS",
 volume = "11006",
 year = "2018",
 abstract =
 "A biform theory is a combination of an axiomatic theory and an
 algorithmic theory that supports the integration of reasoning and
 computation. These are ideal for specifying and reasoning about
 algorithms that manipulate mathematical expressions. However,
 formalizing biform theories is challenging as it requires the
 means to express statements about the interplay of what these
 algorithms do and what their actions mean mathematically. This
 paper describes a project to develop a methodology for expressing,
 manipulating, managing, and generating mathematical knowledge as a
 network of biform theories. It is a subproject of MathScheme, a
 longterm project at McMaster University to produce a framework
 for integrating formal deduction and symbolic computation."
 paper = "Care18.pdf",
 keywords = "printed"
+@article{Nath08,
+ author = "Nathanson, Melvyn B.",
+ title = {{Desperately Seeing Mathematical Proof}},
+ journal = "Notices of the American Math. Society",
+ volume = "55",
+ number = "7",
+ pages = "773",
+ year = "2008"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
+\index{Nipkow, Tobias}
+\index{Paulson, Lawrence C.}
+\index{Wenzel, Markus}
\begin{chunk}{axiom.bib}
@article{Care17,
 author = "Carette, Jacques and Farmer, William M.",
 titlle = {{Formalizing Mathematical Knowledge as a Biform Theory
 Graph: A Case Study}},
 journal = "LNCS",
 volume = "10383",
 year = "2017",
 abstract =
 "A biform theory is a combination of an axiomatic theory and an
 algorithmic theory that supports the integration of reasoning and
 computation. These are ideal for formalizing algorithms that
 manipulate mathematical expressions. A theory graph is a network
 of theories connected by meaningpreserving theory morphisms that
 map the formulae of one theory to the formulas of another
 theory. Theory graphs are in turn well suited for formalizing
 mathematical knowledge at the most convenient level of abstraction
 using the most convenient vocabulary. We are interested in the
 problem of whether a body of mathematical knowledge can be
 effectively formalized as a theory graph of biform theories. As a
 test case, we look at the graph of theories encoding natural
 number arithmetic. We used two different formalisms to do this,
 which we describe and compare. The first is realized in
 CTT$_{uqe}$, a version of Church's type theory with quotation and
 evaluation, and the second is realized in Agda, a dependently
 typed programming language.",
 paper = "Care17.pdf"
}

\end{chunk}

\index{Chojecki, Przemyslaw}
\begin{chunk}{axiom.bib}
@article{Choj17,
 author = "Chojecki, Przemyslaw",
 titlle = {{DeepAlgebra  An Outline of a Program}},
 journal = "LNCS",
 volume = "10383",
 year = "2017",
 abstract =
 "We outline a program in the area of formalization of mathematics
 to automate theorem proving in algebra and algebraic geometry. We
 propose a construction of a dictionary between automated theorem
 provers and (La)Tex exploiting syntactic parsers. We describe its
 application to a repository of humanwritten facts and definitions
 in algebraic geometry (The Stacks Project). We use deep learing
 techniques.",
 paper = "Choj17.pdf"
+@book{Nipk02a,
+ author = "Nipkow, Tobias and Paulson, Lawrence C. and Wenzel, Markus",
+ title = {{Isabelle / HOL. A Proof Assistant for HigherOrder Logic}},
+ publisher = "Springer",
+ year = "2002"
}
\end{chunk}
\index{Farmer, William M.}
+\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@article{Farm17,
 author = "Farmer, William M.",
 titlle = {{Theory Morphisms in Church's Type Theory with Quotation
 and Evaluation}},
 journal = "LNCS",
 volume = "10383",
 year = "2017",
 abstract =
 "CTT$_{qe}$ is a version of Church's type theory with global
 quotation and evaluation operators that is engineered to reason
 about the interplay of syntax and semantics and to formalize
 syntaxlevel mathematical algorithms. CTT$_{uqe}$ is a variant of
 CTT$_{qe}$ that admits undefined expressions, partial functions,
 and multiple base types of individuals. It is better suited than
 CTT$_{qe}$ as a logic for building networks of theories connected
 by theory morphisms. This paper presents the syntax and semantics
 of CTT$_{uqe}$, defines a notion of a theory morphism from one
 CTT$_{uqe}$ theory to another, and gives two simple examples
 involving monoids that illustrate the use of theory morphisms in
 CTT$_{qe}$.",
 paper = "Farm17.pdf"
}

\end{chunk}

\index{Muller, Dennis}
\index{Gauthier, Thibault}
\index{Kaliszyk, Cezary}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Mull17,
 author = "Muller, Dennis and Gauthier, Thibault and Kaliszyk, Cezary
 and Kohlhase, Michael and Rabe, Florian",
 title = {{Classification of Alignments Between Concepts of Formal
 Mathematical Systems}},
 journal = "LNCS",
 volume = "10383",
 year = "2017",
 abstract =
 "Mathematical knowledge is publicly available in dozens of
 different formats and languages, ranging from informal
 (e.g. Wikipedia) to formal corpora (e.g. Mizar). Despite an
 enormous amount of overlap between these corpora, only few
 machineactionalbe connections exist. We speak of alignment if the
 same concept occurs in different libraries, possibly with slightly
 different names, notations, or formal definitions. Leveraging
 these alignments creates a huge potential for knowledge sharing
 and transfer, e.g. integrating theorem provers ore reusing
 services across systems. Notably, even imperfect alignments,
 i.e. concepts that are very similar rather than identical, can
 often play very important roles. Specifically, in machine learning
 techniques for theorem proving and in automation techniques that
 use these, they allow learningreasoning base automation for
 theorem provers to take inspiration from proofs from different
 formal proof libraries or semiformal libraries even if the latter
 is based on a different mathematical foundation. We present a
 classification of alignments and design a simple format for
 describing alignments, as well as an infrastructure for sharing
 them. We propose these as a centralized standard for the
 community. Finally, we present an initial collection of
 approximately 12000 alignments from the different kinds of
 mathematical corpora, including proof assistant libraries and
 semiformal corpora as a public resource.",
 paper = "Mull17.pdf"
}

\end{chunk}

\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm16,
 author = "Farmer, William M.",
 titlle = {{Incorporating Quotation and Evaluation into Church's Type
 Theory: Syntax and Semantics}},
 journal = "LNCS",
 volume = "9791",
 year = "2016",
 abstract =
 "CTT$_{qe}$ is a version of Church's type theory that includes
 quotation and evaluation operators that are similar to quote and
 eval in the Lisp programming language. With quotation and
 evaluation it is possible to reason in CTT$_{qe}$ about the
 interplay of the syntax and semantics of expressions and, as a
 result, to formalize syntaxbased mathematical algorithms. We
 present the syntax and semantics of CTT$_{qe}$ and give several
 examples that illustrate the usefulness of having quotation and
 evaluation in CTT$_{qe}$. We do not give a proof system for
 CTT$_{qe}$ but we do sketch what a proof system could look like.",
 paper = "Farm16.pdf"
+@inproceedings{Ocon05,
+ author = "O'Connor, Russell",
+ title = {{Essential Incompleteness of Arithmetic Verified by Coq}},
+ booktitle = "Theorem Proving in Higher Order Logics",
+ publisher = "Springer",
+ pages = "245260",
+ year = "2005"
}
\end{chunk}
\index{Blanchette, Jasmin Christian}
\index{Haslbeck, Maximilian}
\index{Matichuk, Daniel}
\index{Nipkow, Tobias}
+\index{Dowek, Gilles}
\begin{chunk}{axiom.bib}
@article{Blan15,
 author = "Blanchette, Jasmin Christian and Haslbeck, Maximilian and
 Matichuk, Daniel and Nipkow, Tobias",
 titlle = {{Mining the Archive of Formal Proofs}},
 journal = "LNCS",
 volume = "9150",
 year = "2015",
 abstract =
 "The Archive of Formal Proofs is a vast collection of
 computerchecked proofs developed using the proof assistant
 Isabelle. We perform an indepth analysis of the archive, looking
 at various properties of the proof developments, including size,
 dependencies, and proof style. This gives some insights into the
 nature of formal proofs",
 paper = "Blan15.pdf"
+@inbook{Dowe01,
+ author = "Dowek, Gilles",
+ title = {{Handbook of Automated Reasoning, Vol II}},
+ publisher = "Elsevier Science",
+ year = "2001",
+ chapter = "16",
+ pages = "10091062"
}
\end{chunk}
\index{Horozal, Fulya}
\index{Rabe, Florian}
+\index{Feit, Walter}
+\index{Thompson, John G.}
\begin{chunk}{axiom.bib}
@article{Horo15,
 author = "Horozal, Fulya and Rabe, Florian",
 titlle = {{Formal Logic Definitions for Interchange Languages}},
 journal = "LNCS",
 volume = "9150",
 year = "2015",
 abstract =
 "System integration often requires standardized interchange
 languages, via which systems can exchange mathematical
 knowledge. Major examples are the MathMLbased markup languages
 and TPTP. However, these languages standardize only the syntax of
 the exchanged knowledge, which is insufficient when the involved
 logics are complex or numerous. Logical frameworks, on the other
 hand, allow representing the logics themselves (and are thus aware
 of the semantics), but they abstract from the concrete syntax.

 Maybe surprisingly, until recently, stateoftheart logical
 frameworks were not quite able to adequately represent logics
 commonly used in formal systems. Using a recent extension of the
 logical framework LF, we show how to give concise formal
 definitions of the logics used in TPTP. We can also formally
 define translations and combinations between the various TPTP
 logics. This allows us to build sematicsaware tool support such
 as typechecking TPTP content.

 While our presentation focuses on the current TPTP logics, our
 approach can be easily extended to other logics and interchange
 languages. In particular, our logic representations can be used
 with both TPTP and MathML. Thus, a single definition of the
 semantics can be used with either interchange syntax.",
 paper = "Horo15.pdf"
}

\end{chunk}

\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Rabe15,
 author = "Rabe, Florian",
 titlle = {{Generic Literals}},
 journal = "LNCS",
 volume = "9150",
 year = "2015",
 abstract =
 "MMT is a formal framework that combines the flexibility of
 knowledge representation languages like OPENMATH with the formal
 rigor of logical frameworks like LF. It systematically abstracts
 from theoretical and practical aspects of individual formal
 languages and tries to develop as many solutions as possible
 generically.

 In this work, we allow MMT theories to declare userdevined
 literals, which makes literals as userextensible as operators,
 axioms, and notations. This is particularly important for
 framework languages, which must be able to represent any choice of
 literals. Theoretically, our literals are introduced by importing
 a model that defines the denotations of some types and function
 symbols. Practically, MMT is coupled with a programming language,
 in which these models are defined.

 Our results are implemented in the MMT system. In particular,
 literals and computation on them are integrated with the parser
 and type checker.",
 paper = "Rabe15.pdf"
}

\end{chunk}

\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Kohlhase, Michael}
\begin{chunk}{axiom.bib}
@article{Care14,
 author = "Carette, Jacques and Farmer, William M. and Kohlhase, Michael",
 title = {{Realms: A Structure for Consolidating Knowledge about
 Mathematical Theories}},
 journal = "LNCS",
 volume = "8543",
 year = "2014",
 abstract =
 "Since there are different ways of axiomatizing and developing a
 mathematical theory, knowledge about such a theory may reside in
 many places and in many forms within a library of formalized
 mathematics. We introduce the notion of a realm as a structure for
 consolidating knowledge about a mathematical theory. A realm
 contains several axiomatizations of a theory that are separately
 developed. Views interconnect these developments and establish
 that the axiomatizations are equivalent in the sense of being
 mutually interpretable. A realm also contains an external interface
 that is convenient for users of the library who want to apply the
 concepts and facts of the theory without delving into the details
 of how the concepts are facts were developed. We illustrate the
 utility of realms through a series of examples. We also give an
 outline of the mechanisms that are needed to create and maintain
 realms.",
 paper = "Care14.pdf"
}

\end{chunk}

\index{Bradford, Russell}
\index{Davenport, James H.}
\index{England, Matthew}
\index{Wilson, David}
\begin{chunk}{axiom.bib}
@article{Brad13a,
 author = "Bradford, Russell and Davenport, James H. and England, Matthew
 and Wilson, David",
 title = {{Optimising Problem Formulation for Cylindrical Algebraic
 Decomposition}},
 journal = "LNCS",
 volume = "7961",
 year = "2013",
 abstract =
 "Cylindrical Algebraic Decomposition (CAD) is an important tool
 for the study of real algebraic geometry with many applications
 both within mathematics and elsewhere. It is known to have doubly
 exponential complexity in the number of variables in the worst
 case, but the actual computation time can vary greatly. It is
 possible to offer different formulations for a given problem
 leading to great differences in tractability. In this paper we
 suggest a new measure for CAD complexity which takes into account
 the real geometry of the problem. This leads to new heuristics for
 choosing: the variable ordering for a CAD problem, a designated
 equational constraint, and formulations for truthtable invariant
 CADs (TTICASs). We then consider the possibility of using Groebner
 bases to precondition TTICAD and when such formulations constitute
 the creation of a new problem.",
 paper = "Brad13a.pdf"
}

\end{chunk}

\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm13a,
 author = "Farmer, William M.",
 title = {{The Formalization of SyntaxBased Mathematical Algorithms
 Using Quotation and Evaluation}},
 journal = "LNCS",
 volume = "7961",
 year = "2013",
 abstract =
 "Algorithms like those for differentiating functional expressions
 manipulate the syntactic structure of mathematical expressions in
 a mathematically meaningful way. A formalization of such an
 algorithm should include a specification of its computational
 behavior, a specification of its mathematical meaning, and a
 mechanism for applying the algorithm to actual
 expressions. Achieving these goals requires the ability to
 integrate reasoning about the synta of the expressions with
 reaoning about what the expressions mean. A syntax framework is a
 mathematical structure that is an abstract model for a syntax
 reasoning system. It contains a mapping of expressions to
 syntactic values that represent the syntactic structures of the
 expressions; a language for reasoning about syntactic values; a
 quotation mechanism to refer to the syntactic value of an
 expression; and an evaluation mechanism to refer to the value of
 the expression represented by a syntactic value. We present and
 compare two approaches, based on instances of a syntax framework,
 to formalize a syntaxbased mathematical algorithm in a formal
 theory $T$. In the first approach the syntactic values for the
 expressions manipulated by the algorithm are members of an
 inductive type in $T$, but quotation and evaluation are functions
 defined in the matatheory of $T$. In the second approach every
 expression in $T$ is represented by a syntactic value, and
 quotation and evalution are operators in $T$ itself.",
 paper = "Farm13a.pdf"
}

\end{chunk}

\index{Heras, Jonathan}
\index{Komendantskaya, Ekaterina}
\begin{chunk}{axiom.bib}
@article{Hera13,
 author = "Heras, Jonathan and Komendantskaya, Ekaterina",
 title = {{ML4PG in Computer Algebra Verification}},
 journal = "LNCS",
 volume = "7961",
 year = "2013",
 abstract =
 "ML4PG is a machinelearning extension that provides statical
 proof hints during the process of Coq/SSReflect proof
 development. In this paper, we use ML4PG to find proof patterns in
 the CoqEAL library  a library that was devised to verify the
 correctness of Computer Algebra algorithms. In particular, we use
 ML4PG to help us in the formalisation of an efficient algorithm to
 computer the inverse of triangular matrices.",
 paper = "Hera13.pdf"
+@article{Feit63,
+ author = "Feit, Walter and Thompson, John G.",
+ title = {{Solvability of Groups of Odd Order}},
+ journal = "Pacific Journal of Mathematics",
+ volume = "13",
+ pages = "7751029",
+ year = "1963"
}
\end{chunk}
\index{Kohlhase, Michael}
\index{Mance, Felix}
\index{Rabe, Florian}
+\index{Ganesalingam, Mohan}
\begin{chunk}{axiom.bib}
@article{Kohl13,
 author = "Kohlhase, Michael and Mance, Felix and Rabe, Florian",
 title = {{A Universal Machine for Biform Theory Graphs}},
 journal = "LNCS",
 volume = "7961",
 year = "2013",
 abstract =
 "Broadly speaking, there are two kinds of semanticsaware
 assistant systems for mathematics: proof assistants express the
 semantic in logic and emphasize deduction, and computer algebra
 systems express the semantics in programming languages and
 empahsize computation. Combining the complementary strengths of
 both approaches while mending their complementary weaknesses has
 been an important goal of the mechanized mathematics community for
 some time.

 We pick up on the idea of biform theories and interpret it in the
 MMT/OMDOC framework which introduced the foundationsastheories
 approach, and can thus represent both logics and programming
 languages as theories. This yields a formal, modular framework of
 biform theory graphs which mixes specifications and implemenations
 sharing the module system and typing information.

 We present automated knowledge management work flows that
 interface to existing specification/programming tools and enable
 an OPENMATH Machine, that operationalizes biform theories,
 evaluating expressions by exhaustively applying the
 implementations of the respective operators. We evaluate the new
 biform framework by adding implementations to the OPENMATH
 standard content dictionaries.",
 paper = "Kohl13.pdf"
+@phdthesis{Gane09,
+ author = "Ganesalingam, Mohan",
+ title = {{The Language of Mathematics}},
+ school = "University of Cambridge",
+ year = "2009"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\begin{chunk}{axiom.bib}
+@article{Gont08,
+ author = "Gonthier, Goerges",
+ title = {{Formal Proof  The Four Color Theorem}},
+ journal = "Notices Amer. Math. Soc.",
+ volume = "55",
+ number = "11",
+ pages = "13821393",
+ year = "2008"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont11,
+ author = "Gonthier, Goerges",
+ title = {{Advances in the Formalization of the Odd Order Theorem}},
+ booktitle = "Interactive Theorem Proving",
+ publisher = "Springer",
+ pages = "2",
+ year = "2011"
}
\end{chunk}
+\index{Gonthier, Goerges}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont11a,
+ author = "Gonthier, Goerges",
+ title = {{PointFree, SetFree Concrete Linear Algebra}},
+ booktitle = "Interactive Theorem Proving",
+ publisher = "Springer",
+ pages = "103118",
+ year = "2011"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
\index{Mahboubi, Assia}
\begin{chunk}{axiom.bib}
@article{Mahb13,
 author = "Mahboubi, Assia",
 title = {{The Rooster and the Butterflies}},
 journal = "LNCS",
 volume = "7961",
 year = "2013",
 abstract =
 "This paper describes a machinechecked proof of the JordenHolder
 theorem for finite groups. This purpose of this description is to
 discuss the representation of the elementary concepts of finite
 group theory inside type theory. The design choices underlying
 these representations were crucial to the successful formalization
 of a complete proof of the Odd Order Theorem in the Coq system.",
 paper = "Mahb13.pdf"
+@article{Gont10,
+ author = "Gonthier, Goerges and Mahboubi, Assia",
+ title = {{An Introduction to Small Scale Reflection in Coq}},
+ journal = "J. Formaliz. Reason.",
+ volume = "3",
+ number = "2",
+ pages = 95152",
+ year = "2010"
+}
+
+\end{chunk}
+
+\index{Gonthier, Goerges}
+\index{Mahboubi, Assia}
+\index{Rideau, Laurence}
+\index{Tassi, Enrico}
+\index{Thery, Laurent}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gont07,
+ author = "Gonthier, Goerges and Mahboubi, Assia and Rideau, Laurence
+ and Tassi, Enrico and Thery, Laurent",
+ title = {{A Modular Formalisation of Finite Group Theory}},
+ booktitle = "Theorem Proving in Higher Order Logics",
+ publisher = "Springer",
+ pages = 86101",
+ year = "2007"
}
\end{chunk}
\index{Tankink, Carst}
\index{Kaliszyk, Cezary}
\index{Urban, Josef}
\index{Geuvers, Herman}
+\index{Gonthier, Goerges}
+\index{Ziliani, Beta}
+\index{Nanevski, Aleksandar}
+\index{Dreyer, Derek}
\begin{chunk}{axiom.bib}
@article{Tank13,
 author = "Tankink, Carst and Kaliszyk, Cezary and Urban, Josef and
 Geuvers, Herman",
 title = {{Formal Mathematics on Display: A Wiki for Flyspeck}},
 journal = "LNCS",
 volume = "7961",
 year = "2013",
 abstract =
 "The AGORA system is a prototype ``Wiki for Formal Mathematics'',
 with an aim to support developing and documenting large
 formalizations of mathematics in a proof assistant. The functions
 implemented in AGORA include inbrowser editing, strong AI/ATP
 proof advice, verification, and HTML rendering. The HTML rendering
 contains hyperlinks and provides ondemand explanation of the
 proof state for each proof step. In the present paper we show the
 prototype Flyspeck Wiki as an instance of AGORA for HOL Light
 formalizations. The wiki can be used for formalizations of
 mathematics and for writing informal wiki pages about
 mathematics. Such informal pages may contain islands of formal
 text, which is used here for providing an initial crosslinking
 between Hales's informal Flyspeck book, and the formal Flyspeck
 development.

 The AGORA platform intends to address distributed wikistyle
 collaboration on large formalization projects, in particular both
 the aspect of immediate editing, verification and rendering of
 formal code, and the aspect of gradual and mutual refactoring and
 correspondence of the initial informal text and its
 formalization. Here, we highlight these features with the Flyspeck
 Wiki.",
 paper = "Tank13.pdf"
+@inproceedings{Gont11b,
+ author = "Gonthier, Goerges and Ziliani, Beta and Nanevski, Aleksandar
+ and Dreyer, Derek",
+ title = {{How to make Ad Hoc Proof Automation less Ad Hoc}},
+ booktitle = "Int. Conf. on Functional Programming",
+ publisher = "ACM",
+ pages = "163175",
+ year = "2011"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Ricciotti, Wilmer}
+\index{Grabowski, Adam}
+\index{Kornilowicz, Artur}
+\index{Naumowicz, Adam}
\begin{chunk}{axiom.bib}
@article{Aspe12a,
 author = "Asperti, Andrea and Ricciotti, Wilmer",
 title = {{A Web Interface for Matita}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 paper = "Aspe12a.pdf"
+@article{Grab10,
+ author = "Grabowski, Adam and Kornilowicz, Artur and Naumowicz, Adam",
+ title = {{Mizar in a Nutshell}},
+ journal = "J. Formaliz. Reason.",
+ volume = "3",
+ number = "2",
+ pages = "153245",
+ year = "2010"
}
\end{chunk}
\index{Alama, Jesse}
\index{Mamane, Lionel}
\index{Urban, Josef}
+\index{Hales, Thomas C.}
\begin{chunk}{axiom.bib}
@article{Alam12,
 author = "Alama, Jesse and Mamane, Lionel and Urban, Josef",
 title = {{Dependencies in Formal Mathematics: Applications and
 Extration for Coq and Mizar}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "Two methods for extracting detailed formal dependencies from the
 Coq and Mizar system are presented and compared. The methods are
 used for dependency extraction from two large mathematical
 repositories: the Coq Repository at Nijmegen and the Mizar
 Mathematical Library. Several applications of the detailed
 dependency analysis are described and proposed. Motivated by the
 different applications, we discuss the various kinds of
 dependencies that we are interested in, and the suitability of
 various dependency extraction methods.",
 paper = "Alam12.pdf"
+@article{Hale07,
+ author = "Hales, Thomas C.",
+ title = {{The Jordan Curve Theorem, Formally and Informally}},
+ journal = "Amer. Math. Monthly",
+ volume = "114",
+ number = "10",
+ pages = "882894",
+ year = "2007"
}
\end{chunk}
\index{Hetzl, Stefan}
+\index{Saibi, Amokrane}
\begin{chunk}{axiom.bib}
@article{Hetz12,
 author = "Hetzl, Stefan",
 title = {{Project Presentation: Algorithmic Structuring and
 Compression of Proofs (ASCOP)}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "Computergenerated proofs are typically analytic, i.e. they
 essentially consist only of formulas which are present in the
 theorem that is shown. In contrast, mathematical proffs written by
 humans almost never are: they are highly structured due to the use
 of lemmas.
+@inproceedings{Saib97,
+ author = "Saibi, Amokrane",
+ title = {{Typing Algorithm in Type Theory with Inheritance}},
+ booktitle = "Symp. on Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "292301",
+ year = "1997"
+}
 The ASCOP project aims at developing algorithms and software which
 structure and abbreviate analytic proofs by computing useful
 lemmas. These algorithms will be based on recent groundbreaking
 results establishing a new connection between proof theory and
 formal language theory. This connecion allows the application of
 efficient algorithms based on formal grammars to structure and
 compress proofs.",
 paper = "Hetz12.pdf"
+\end{chunk}
+
+\index{Shankar, Natarjan}
+\index{Owre, Sam}
+\begin{chunk}{axiom.bib}
+@inproceedings{Shan00,
+ author = "Shankar, Natarjan and Owre, Sam",
+ title = {{Principles and Pragmatics of Subtyping in PVS}},
+ booktitle = "Recent Trends in Algebraic Development Techniques",
+ publisher = "Springer",
+ pages = "3752",
+ year = "2000"
}
\end{chunk}
+\index{Strub, PierreYves}
\begin{chunk}{axiom.bib}
@misc{Gapt19,
 author = "Unknown",
 title = {{GAPT: General Architecture for Proof Theory}},
 year = "2019",
 link = "\url{https://www.logic.at/gapt/downloads/gaptusermanual.pdf}",
 paper = "Gapt19.pdf"
+@inproceedings{Stru10,
+ author = "Strub, PierreYves",
+ title = {{Coq Modulo Theory}},
+ booktitle = "19th Annual Conf. on Computer Science Logic",
+ publisher = "Springer",
+ pages = "549643",
+ year = "2010"
}
\end{chunk}
\index{Horozal, Fulya}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
+\index{Troelstra, A.S.}
+\index{van Dalen, Dirk}
\begin{chunk}{axiom.bib}
@article{Horo12,
 author = "Horozal, Fulya and Kohlhase, Michael and Rabe, Florian",
 title = {{Extending MKM Formats at the Statement Level}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "Successful representation and markup languages find a good
 balance between giving the user freedom of expression, enforcing
 the fundamental semantic invariants of the modeling framework, and
 allowing machine support for the underlying semantic
 structures. MKM formats maintain strong invariants while trying to
 be foundationally unconstrained, which makes the induced design
 problem particularly challenging.

 In this situation, it is standard practice to define a minimal
 core language together with a scripting/macro facility for
 syntactic extensions that map into the core language. In practice,
 such extension facilities are either fully unconstrained (making
 invariants and machine support difficult) or limited to the object
 level (keeping the statement and theory levels fixed).

 In this paper we develop a general methodology for extending MKM
 representation formats at the statement level. We show the utility
 (and indeed necessity) of statementlevel extensions by
 redesigning the OMDoc format into a minimal, regular core language
 (strict OMDoc) and an extension (pragmatic OMDoc) that maps into
 strict OMDoc.",
 paper = "Horo12.pdf"
}

\end{chunk}

\index{Iancu, Mihnea}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Ianc12,
 author = "Iancu, Mihnea and Rabe, Florian",
 title = {{Management of Change in Declarative Languages}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "Due to the high degree of interconnectedness of formal
 mathematical statements and theories, human authors often have
 difficulties anticipating and tracking the effects of a change in
 large bodies of symbolic mathematical knowledge. Therefore, the
 automation of change management is desirable. But while computers
 can in principle detect and propagate changes automatically, this
 process must take the semantics of the underlying mathematical
 formalism into account. Therefore, concrete management of change
 solutions are difficult to realize.

 The MMT language was designed as a generic declarative language
 that captures universal structural features while avoiding a
 commitment to a particular formalism. Therefore, it provides a
 promising framework for the systematic study of changes in
 declarative languages. We leverage this framework by providing a
 generic change management solution at the MMT level, which can be
 instantiated for arbitrary specific languages.",
 paper = "Ianc12.pdf"
}

\end{chunk}

\index{Wilson, David J.}
\index{Bradford, Russell J.}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Wils12,
 author = "Wilson, David J. and Bradford, Russell J. and
 Davenport, James H.",
 title = {{Speeding Up Cylindrical Algebraic Decomposition by
 Groebner Bases}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "Groebner Bases and Cylindrical Algebraic Decomposition are
 generally thought of as two, rather different, methods of looking
 at systems of equations and, in the case of Cylindrical Algebraic
 Decomposition, inequalities. However, even for a mixed system of
 equalities and inequalities, it is possible to apply Groebner
 bases to the (conjoined) equalities before invoking CAD. We see
 that this is, quite often but not always, a beneficial
 preconditioning of the CAD problem.

 It is also possible to precondition the (conjoined) inequalities
 with respect to the equalities, and this can also be useful in
 many cases.",
 paper = "Wils12.pdf"
+@book{Troe88,
+ author = "Troelstra, A.S. and van Dalen, Dirk",
+ title = {{Constructivism in Mathematics, Vol 2}},
+ publisher = "NorthHolland",
+ year = "1988"
}
\end{chunk}
\index{Horozal, Fulya}
\index{Iacob, Alin}
\index{Jucovschi, Constantin}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
+\index{Tait, William W.}
\begin{chunk}{axiom.bib}
@article{Horo11,
 author = "Horozal, Fulya and Iacob, Alin and Jucovschi, Constantin
 and Kohlhase, Michael and Rabe, Florian",
 title = {{Combining Source, Content, Presentation, Narration, and
 Relational Presentation}},
 journal = "LNCS",
 volume = "6824",
 year = "2011",
 abstract =
 "In this paper, we try to bridge the gap between different
 dimensions / incarnations of mathematical knowledge: MKM
 representation formats (content), their humanoriented languages
 (source, presentation), their narrative linearizations
 (narration), and relational presentations used in the semantic
 web. The central idea is to transport solutions from software
 engineering to MKM regarding the parallel interlinked maintenance
 of the different incarnations. We show how the integration of
 these incarnations can be utilized to enrich the authoring and
 viewing processes, and we evaluate our infrastructure on the LATIN
 Logic Atlas, a modular library of logic formalizations, and a set
 of computer science lecture notes written in STEX  a modular,
 semantic variant of LATEX.",
 paper = "Horo11.pdf"
+@article{Tait86,
+ author = "Tait, William W.",
+ title = {{Truth and Proof: The Platonism of Mathematics}},
+ link = "\url{logic.harvard.edu/EFI_Tait_PlatonisminMathematics.pdf}",
+ journals = "Synthese",
+ volume = "69",
+ pages = "341370",
+ year = "1986",
+ paper = "Tait86.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Krebbers, Robbert}
\index{Spitters, Bas}
+\index{Werner, Benjamin}
\begin{chunk}{axiom.bib}
@article{Kreb11,
 author = "Krebbers, Robbert and Spitters, Bas",
 title = {{Computer Certified Efficient Exact Reals in Coq}},
 journal = "LNCS",
 volume = "6824",
 year = "2011",
 abstract =
 "Floating point operations are fast, but require continuous effort
 on the part of the user in order to ensure that the results are
 correct. This burden can be shifted away from the user by
 providing a library of exact analysis in which the computer
 handles the error estimates. We provide an implementaiton of the
 exact real numbers in the Coq proof assistant. This improves on
 the earlier Coqimplementation by O'Connor in two ways: we use
 dyadic rationals built from the machine integers and we optimize
 computation of power series by using approximate
 division. Moreover, we use type classes for clean mathematical
 interfaces. This appears to be the first time that type classes
 are used in heavy computation. We obtain over a 100 times speed up
 of the basic operations and indications for improving the Coq system.",
 paper = "Kreb11.pdf"
}

\end{chunk}

\index{Rabe, Florian}
\index{Kohlhase, Michael}
\index{Coen, Claudio Sacerdoti}
+@inbook{Wern97,
+ author = "Werner, Benjamin",
+ title = {{Sets in Types, Types in Sets}},
+ booktitle = "Theoretical Aspects of Computer Software",
+ publisher = "Springer",
+ chapter = "unknown",
+ pages = "530546",
+ year = "1997"
+}
+
+\end{chunk}
+
+\index{Parigot, Michel}
\begin{chunk}{axiom.bib}
@article{Rabe11,
 author = "Rabe, Florian and Kohlhase, Michael and Coen, Claudio Sacerdoti",
 title = {{A Foundational View on Integration Problems}},
+@article{Pari92,
+ author = "Parigot, Michel",
+ title = {{$\lambda\mu$Calculus: An Algorithmic Interpretation of
+ Classical Natural Deduction}},
journal = "LNCS",
 volume = "6824",
 year = "2011",
 abstract =
 "The integration of reasoning and computation services across
 system and language boundaries is a challenging problem of
 computer science. In this paper, we use integration for the
 scenario where we have two systems that we integrate by moving
 problems and solutions between them. While this scenario is often
 approached from an engineering perspective, we take a foundational
 view. Based on the generic declarative language MMT, we develop a
 theoretical framework for system integration using theories and
 partial theory morphisms. Because MMT permits representations of
 the metalogical foundations themselves, this includes integration
 across logics. We discuss save and unsafe integration schemes and
 devise a general form of safe integration.",
 paper = "Rabe11.pdf"
+ volume = "624",
+ pages = "190201",
+ year = "1992",
+ paper = "Pari92.pdf"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Coen, Claudio Sacerdoti}
+\index{Parigot, Michel}
\begin{chunk}{axiom.bib}
@article{Aspe10a
 author = "Asperti, Andrea and Coen, Claudio Sacerdoti",
 title = {{Some Considerations on the Usability of Interactive Provers}},
 journal = "LNCS",
 volume = "6167",
 year = "2010",
 abstract =
 "In spite of the remarkable achievements recently obtained in the
 field of mechanization of formal reasoning, the overall usability
 of interactive provers does not seem to be sensibly improved since
 the advent of the ``second generation'' of systems, in the mid of
 the eighties. We try to analyze the reasons of such a slow
 progress, pointing out the main problems and suggesting some
 possible research directions.",
 paper = "Aspe10a.pdf",
 keywords = "DONE"
+@article{Pari92a,
+ author = "Parigot, Michel",
+ title = {{Recursive Programming with Proofs}},
+ journal = "Theoretical Computer Science",
+ volume = "94",
+ pages = "335356",
+ year = "1992",
+ paper = "Pari92a.pdf"
}
\end{chunk}
\index{Calmet, Jacques}
\index{Campbell, John A.}
+\index{Murthy, Chetan R.}
\begin{chunk}{axiom.bib}
@article{Calm10,
 author = "Calmet, Jacques and Campbell, John A.",
 title = {{A Revisited Perspective on Symbolic Mathematical Computing
 and Artificial Intelligence}},
 journal = "LNCS",
 volume = "6167",
 year = "2010",
 abstract =
 "We provide a perspective on the current state and possible future
 of links between symbolic mathematical computing and artificial
 intelligence, on the occasion of the 10th biennial conference
 (AISMC, later AISC) devoted to those connections. It follows a
 similar perspective expressed for the first such conference in 1992
 and then revised and expanded 5 years later. Issues related to the
 computational management of mathematical knowledge are
 highlighted.",
 paper = "Calm10.pdf"
+@techreport{Murt91,
+ author = "Murthy, Chetan R.",
+ title = {{Classical Proofs as Programs: How, What and Why}},
+ type = "technical report",
+ institution = "Cornell University",
+ number = "TR911215",
+ year = "1991",
+ abstract =
+ "We recapitulate Friedman's conservative extension result of
+ (suitable) classical over constructive systems for $\prod_2^0$
+ sentences, viewing it in two lights: as a translation of programs
+ from an almostfunctional language (with $C$) back to its
+ functional core, and as a translation of a constructive logic for
+ a functional language to a classical logic for an
+ almostfunctional language. We investigate the computational
+ properties of the translation and of classical proofs and
+ characterize the classical proofs which give constructions in
+ concrete, computational terms, rather than logical terms. We
+ characterize different versions of Friedman's translation as
+ translating slightly different almostfunctional languages to a
+ functional language, thus giving a general method for arriving at
+ a sound reduction semantics for an almostfunctional language with
+ a mixture of eager and lazy constructors and destructors, as well
+ as integers, pairs, unions, etc. Finally, we describe how to use
+ classical reasoning in a disciplined manner in giving classical
+ (yet constructivizable) proofs of sentences of greater complexity
+ than $\prod_2^0$. This direction offers the possibility of
+ applying classical reasoning to more general programming problems.",
+ paper = "Murt91.pdf"
+}
+
+\end{chunk}
+
+\index{Turing, A. M.}
+\begin{chunk}{axiom.bib}
+@misc{Turi48,
+ author = "Turing, A. M.",
+ title = {{Intelligent Machinery}},
+ year = "1948",
+ link = "\url{https://weightagnostic.github.io/papers/turning1948.pdf}",
+ abstract =
+ "The possible ways in which machinery might be made to show
+ intelligent behaviour are discussed. The analogy with the human
+ brain is used as a guiding principle. It is pointed out that the
+ potentialities of the human intelligence can only be realized if
+ suitable education is provided. The investigation mainly centres
+ round an analogous teaching process applied to machines. The idea
+ of an unorganized machine is defined, and it is suggested that the
+ infant human cortex is of this nature. Simple examples of such
+ machines are given, and their education by means of rewards and
+ punishments is discussed. In one case the education process is
+ carried through until the organization is similar to that of an
+ ACE.",
+ paper = "Turi48.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Sexton, Alan P.}
\index{Sorge, Volker}
\index{Watt, Stephen M.}
+\index{Bailleux, Olivier}
\begin{chunk}{axiom.bib}
@article{Care10a,
 author = "Carette, Jacques and Sexton, Alan P. and Sorge, Volker and
 Watt, Stephen M.",
 title = {{Symbolic Domain Decomposition}},
 journal = "LNCS",
 volume = "6167",
 year = "2010",
+@misc{Bail19,
+ author = "Bailleux, Olivier",
+ title = {{Subsumptiondriven Clause Learning with DPLL+Restarts}},
+ year = "2019",
+ link = "\url{https://arxiv.org/pdf/1906.07508.pdf}",
abstract =
 "Decomposing the domain of a function into parts has many uses in
 mathematics. A domain may naturally be a union of pieces, a
 function may be defined by cases, or different boundary conditions
 may hold on different regions. For any particular problem the
 domain can be given explicitly, but when dealing with a family of
 problems given in terms of symbolic parameters, matters become
 more difficult. This article shows how hybrid sets, that is
 multisets allowing negative multiplicity, may be used to express
 symbolic domain decompositions in an efficient, elegant and
 uniform way, simplifying both computation and reasoning. We apply
 this theory to the arithmetic of piecewise functions and symbolic
 matrices and show how certain operations may be reduced from
 exponential to linear complexity.",
 paper = "Care10a.pdf"
+ "We propose to use a DPLL+restart to solve SAT instances by
+ successive simplifications based on the production of clauses that
+ subsume the initial clauses. We show that this approach allows the
+ refutation of pebbling formulae in polynomial time and linear
+ space, as effectvely as with a CDCL solver.",
+ paper = "Bail19.pdf"
}
\end{chunk}
\index{Cohen, Cyril}
\index{Mahboubi, Assia}
+\index{Clark, Kevin}
\begin{chunk}{axiom.bib}
@article{Cohe10,
 author = "Cohen, Cyril and Mahboubi, Assia",
 title = {{A Formal Quantifier Elimination for Algebraically Closed Fields}},
 journal = "LNCS",
 volume = "6167",
 year = "2010",
+@misc{Clar11,
+ author = "Clark, Kevin",
+ title = {{An Algorithm that Decides PRIMES in Polynomial Time}}
+, year = "2011",
+ link =
+ "\url{https://sites.math.washington.edu/~morrow/336_11/papers/kevin.pdf}",
+ paper = "Clar11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{van Tonder, Rijnard}
+\index{Le Goues, Claire}
+\begin{chunk}{axiom.bib}
+@inproceedings{Tond19,
+ author = "van Tonder, Rijnard and Le Goues, Claire",
+ title = {{Lightweight MultiLanguage Syntax Transformation with
+ Parser Parser Combinators}},
+ booktitle = "PLDI",
+ publisher = "ACM",
+ isbn = "9781450367127",
+ year = "2019",
+ paper = "Tond19.pdf"
+}
+
+\end{chunk}
+
+\index{Qiu, Xiaokang}
+\index{Garg, Pranav}
+\index{Stefanescu, Andrei}
+\index{Madhusudan, P.}
+\begin{chunk}{axiom.bib}
+@misc{Qiux13,
+ author = "Qiu, Xiaokang and Garg, Pranav and Stefanescu, Andrei and
+ Madhusudan, P.",
+ title = {{Natural Proofs for Structure, Data, and Separation}},
+ year = "2013",
+ link = "\url{http://madhu.cs.illinois.edu/dryad_full_version.pdf}",
+ abstract =
+ "We propose {\sl natural proofs} for reasoning with programs that
+ manipulate datastructures against complex specifications 
+ specifications that describe the structure of the heap, the data
+ stored within it, and separation and framing of
+ substructures. Natural proofs are a subclass of proofs that are
+ amenable to completely automated reasoning, that provide sound but
+ incomplete procedures, and that capture common reasoning tactics
+ in program verification. We develop a dialect of separation logic
+ over heaps, called DRYAD, with recursive definitions that avoids
+ explicit quantification. We develop ways to reason with heaplets
+ using classical logic over the theory of sets, and develop natural
+ proofs for reasoning using proof tactics involving disciplined
+ unfoldings and formula abstractions. Natural proofs are encoded
+ into decidable theories of firstorder logic so as to be
+ discharged using SMT solvers.
+
+ We also implement the technique and show that a large class of
+ more than 100 correct programs that manipulate datastructures are
+ amenable to full functional correctness using the proposed natural
+ proof method. These programs are drawn from a variety of sources
+ including standard datastructures, the SchorrWaite algorithm for
+ garbage collection, a large number of lowlevel C routines from
+ the Glib library, the OpenBSD library and the Linux kernel, and
+ routines from a secure verified OSbrowser project. Our work is
+ the first that we know of that can handle such a wide range of
+ full functional verification properties of heaps automatically,
+ given pre/post and loop invariant annotations. we believe that
+ this work paves the way for the deductive verification technology
+ to be used by programmers who do not (and need not) understand the
+ internals of the underlying logic solvers, significantly
+ increasing their applicability in building reliable systems.",
+ paper = "Qiux13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Propp, James}
+\begin{chunk}{axiom.bib}
+@misc{Prop13,
+ author = "Propp, James",
+ title = {{Real Analysis in Reverse}},
+ year = "2013",
+ link = "\url{https://arxiv.org/pdf/1204.4483.pdf}",
abstract =
 "We prove formally that the first order theory of algebraically
 closed fields enjoys quantifier elimination, and hence is
 decidable. This proof is organized in two modular parts. We first
 reify the first order theory of rings and prove that quantifier
 elimination leads to decidability. Then we implement an algorithm
 which constructs a quantifier free formula from any first order
 formula in the theory of ring. If the underlying ring is in fact
 an algebraically closed field, we prove that the two formulas have
 the same semantic. The algorithm producing the quantifier free
 formula is programmed in continuation passing style, which leads
 to both a concise program and an elegant proof of semantics
 correctness.",
 paper = "Cohe10.pdf"
+ "Many of the theorems of real analysis, against the background of
+ the ordered field axioms, are equivalent to Dedekind completeness,
+ and hence can serve as completeness axioms for the reals. In the
+ course of demonstrating this, the article offers a tour of some
+ lessfamiliar ordered fields, provides some of the relevant
+ history, and considers pedagogical implications.",
+ paper = "Prop13.pdf"
}
\end{chunk}
\index{Dominguez, Cesar}
\index{Rubio, Julio}
+\index{Coquand, Thierry}
\begin{chunk}{axiom.bib}
@article{Domi10,
 author = "Dominguez, Cesar and Rubio, Julio",
 title = {{Computing in Coq with Infinite Algebraic Data Structures}},
 journal = "LNCS",
 volume = "6167",
 year = "2010",
+@techreport{Coqu86,
+ author = "Coquand, Thierry",
+ title = {{An Analysis of Girard's Paradox}},
+ year = "1986",
+ institution = "INRIA Centre de Rocquencourt",
+ number = "531",
abstract =
 "Computational content encoded into constructive type theory
 proofs can be used to make computing experiments over concrete
 data structures. In this paper, we explore this possibility when
 working in Coq with chain complexes of infinite type (that is to
 say, generated by infinite sets) as a part of the formalization of
 a hierarchy of homological algebra structures.",
 paper = "Domi10.pdf"
+ "We study the consistency of a few formal systems specially some
+ extensions of Church's calculus and the construction system. We
+ show that Church's calculus is not compatible with the notion of
+ secondorder type. We apply this result for showing that the
+ calculus of construction wit four levels is inconsistent. We
+ suggest finally some consistent extensions of these two calculi.",
+ paper = "Coqu86.pdf"
}
\end{chunk}
\index{Kerber, Manfred}
+\index{Grossman, Dan}
\begin{chunk}{axiom.bib}
@article{Kerb10,
 author = "Kerber, Manfred",
 title = {{Proofs, Proofs, Proofs, and Proofs}},
 journal = "LNCS",
 volume = "6167",
 year = "2010",
+@inproceedings{Gros02,
+ author = "Grossman, Dan",
+ title = {{Existential Types for Imperative Languages}},
+ booktitle = "Euro. Symp. on Prog. Langs. and Systems",
+ publisher = "SpringerVerlag",
+ pages = "2135",
+ year = "2002",
+ isbn = "3540433635",
+ abstract =
+ "We integrate existential types into a strongly typed Clike
+ language. In particular, we show how a bad combination of
+ existential types, mutation, and aliasing can cause a subtle
+ violation of type safety. We explort two independent ways to
+ strengthen the type system to restore safety. One restricts the
+ mutation of existential packages. The other restricts the types of
+ aliases of extential packages. We use our framework to explain why
+ other languages with existential types are safe.",
+ paper = "Gros02.pdf"
+}
+
+\end{chunk}
+
+\index{Jung, Ralf}
+\index{Jourdan, JacquesHenri}
+\index{Krebbers, Robbert}
+\index{Dreyer, Derek}
+\begin{chunk}{axiom.bib}
+@inproceedings{Jung18,
+ author = "Jung, Ralf and Jourdan, JacquesHenri and
+ Krebbers, Robbert and Dreyer, Derek",
+ title = {{RustBelt: Securing the Foundations of the Rust Programming
+ Language}},
+ booktitle = "POPL '18",
+ publisher = "ACM",
+ year = "2018",
abstract =
 "In logic there is a clear concept of what constitutes a proof and
 what not. A proof is essentially defined as a finite sequence of
 formulae which are either axioms or derived by proof rules from
 formulae earlier in the sequence. Sociologically, however, it is
 more difficult to say what should constitute a proof and what
 not. In this paper we will look at different forms of proofs and
 try to clarify the concept of proof in the wider meaning of the
 term. This has implications on how proofs should be represented
 formally.",
 paper = "Kerb10.pdf",
+ "Rust is a new systems programming language that promises to
+ overcome the seemingly fundamental tradeoff between highlevel
+ safety guarantees and lowlevel control over resource
+ management. Unfortunately, none of Rust's safety claims have been
+ formally proven, and there is good reason to question whether they
+ actually hold. Specifically, Rust employs a string,
+ ownershipbased, type system, but then extends the expressive
+ power of this core type system through libraries that internally
+ use unsafe features. In this paper, we give the first formal (and
+ machinechecked) safety proof for a language representing a
+ realistic subset of Rust. Our proof is extensible in the sense
+ that, for each new Rust library that uses unsafe features, we can
+ say what verification condition it must satisfy in order for it to
+ be deemed a safe extension to the language. We have carried out
+ this verification for some of the most important libraries that
+ are used throughout the Rust ecosystem.",
+ paper = "Jung18.pdf"
+}
+
+\end{chunk}
+
+\index{Grossman, Dan}
+\begin{chunk}{axiom.bib}
+@article{Gros06,
+ author = "Grossman, Dan",
+ title = {{Quantified Types for Imperative Languages}},
+ journal = "Trans. on Prog. Lang. and Systems",
+ volume = "28",
+ number = "3",
+ year = "2006",
+ pages = "429475",
+ abstract =
+ "We describe universal types, existential types, and type
+ constructors in Cyclone, a stronglytyped Clike language. We show
+ how the language naturally supports firstclass polymorphism and
+ polymorphic recursion while requiring an acceptable amount of
+ explicit type information. More importantly, we consider the
+ soundness of type variables in the presence of Cstyle mutation
+ and the addressof operator. For polymorphic references, we
+ describe a solution more natural for the C level than the MLstyle
+ ``value restriction''. For existential types, we discover and
+ subsequently avoid a subtle unsoundness issue resulting from the
+ addressof operator. We develop a formal abstract machine and
+ typesafety proof that captures the essence of type variables at
+ the C level.",
+ paper = "Gros06.pdf"
+}
+
+\end{chunk}
+
+\index{Sarkar, Dipanwita}
+\index{Waddell, Oscar}
+\index{Dybvig, R. Kent}
+\begin{chunk}{axiom.bib}
+@inproceedings{Sark04,
+ author = "Sarkar, Dipanwita and Waddell, Oscar and Dybvig, R. Kent",
+ title = {{A Nanopass Infrastructure for Compiler Education}},
+ booktitle = "9th ACM SIGPLAN",
+ publisher = "ACM",
+ pages = "201212",
+ year = "2004",
+ isbn = "1581139055",
+ abstract =
+ "A compiler structured as a small number of monolithic passes is
+ difficult to understand and difficult to maintain. The steep
+ learning curve is daunting, and even experienced developers find
+ that modifying existing passes is difficult and often introduces
+ subtle and tenacious bugs. These problems are especially
+ frustrating when the developer is a student in a compiler
+ class. An attractive alternative is to structure a compiler as a
+ collection of many finegrained passes, each of which performs a
+ single task. This structure aligns the implementation of a
+ compiler with its logical organization, simplifying development,
+ testing, and debugging. This paper describes the methodology and
+ tools comprising a framework for constructing such compilers.",
+ paper = "Sark04.pdf",
keywords = "printed"
}
\end{chunk}
\index{Nederpelt, R.}
\index{Kamareddine, F.}
+\index{SanchezStern, Alex}
+\index{Alhessi, Yousef}
+\index{Saul, Lawrence}
+\index{Lerner, Sorin}
\begin{chunk}{axiom.bib}
@inproceedings{Nede01,
 author = "Nederpelt, R. and Kamareddine, F.",
 title = {{An Abstract Syntax for a Formal Language of Mathematics}},
 booktitle = "4th int. Tbilisi Symp. on Language, Logic, and Computation",
 publisher = "unknown",
 year = "2001",
+@misc{Sanc19,
+ author = "SanchezStern, Alex and Alhessi, Yousef and Saul, Lawrence
+ and Lerner, Sorin",
+ title = {{Generating Correctness Proofs with Neural Networks}},
+ year = "2019",
+ link = "\url{https://arxiv.org/pdf/1907.07794.pdf}",
abstract =
 "This paper provides an abstract syntax for a formal language of
 mathematics. We call our language Weak Type Theory (abbreviated
 WTT). WTT will be as faithful as possible to the mathematician's
 language yet will be formal and will not allow ambiguities. WTT
 can be used as an intermediatry between the natural language of
 the mathematician and the formal language of the logician. As far
 as we know, this is the first extensive formalization of an
 abstract syntax of a formal language of mathematics.",
 paper = "Nede01.pdf"
+ "Foundational verification allows programmers to build software
+ which has been empirically shown to have high levels of assurance
+ in a variety of important domains. However, the cost of producing
+ foundationally verified software remains prohibitively high for
+ most projects, as it requires significant manual effort by highly
+ trained experts. In this paper we present Proverbot9001 a proof
+ search system using machine learning techniques to produce proofs
+ of software correctness in interactive theorem provers. We
+ deomonstrate Proverbot9001 on the proof obligations from a large
+ practical proof project, the CompCert verified C compiler, and
+ show that it can effectively automate what was previously manual
+ proofs, automatically solving 15.77\% of proofs in our test
+ dataset. This corresponds to an over 3X improvement over the prior
+ state of the art machine learning technique for generating proofs
+ in Coq.",
+ paper = "Sanc19.pdf"
}
\end{chunk}
\index{Barbeau, Edward J.}
+\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@book{Barb00,
 author = "Barbeau, Edward J.",
 title = {{Mathematical Fallacies, Flaws, and Flimflam}},
 publisher = "American Mathematical Society",
 year = "2000",
 paper = "Barb00.pdf"
+@misc{Chri12,
+ author = "Christiansen, David Thrane",
+ title = {{Converting Regular Expressions to Discrete Finite Automata}},
+ year = "2012",
+ link = "\url{http://davidchristiansen.dk/tutorials/regextonfa.pdf}"
+ paper = "Chri12.pdf"
}
\end{chunk}
\index{Baker, Josef B.}
\index{Sexton, Alan P.}
\index{Sorge, Volker}
+\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@article{Bake09,
 author = "Baker, Josef B. and Sexton, Alan P. and Sorge, Volker",
 title = {{A Linear Grammar Approach to Mathematical Formula
 Recognition from PDF}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
 abstract =
 "Many approaches have been proposed over the years for the
 recognition of mathematical formulae from scanned documents. More
 recently a need has arisen to recognise formulae from PDF
 documents. Here we can avoid ambiguities introduced by traditional
 OCR approaches and instead extract perfect knowledge of the
 characters used in formulae directly from the document. This can
 be exploited by formula recognition techniques to achieve correct
 results and high performance.
+@misc{Chri13,
+ author = "Christiansen, David Thrane",
+ title = {{Didirectional Typing Rules: A Tutorial}},
+ year = "2013",
+ link = "\url{http://davidchristiansen.dk/tutorials/bidirectional.pdf}"
+ paper = "Chri13.pdf"
+}
+
+\end{chunk}
 In this paper we revist an old grammatical approach to formula
 recognition, that of Anderson from 1968, and assess its
 applicability with respect to data extracted from PDF
 documents. We identify some problems of the original method when
 applied to common mathematical expressions and show how they can
 be overcome. The simplicity of the original method leads to a very
 efficient recognition technique that not only is very simple to
 implement but also yields results of high accuracy for the
 recognition of mathematical formulae from PDF documents.",
 paper = "Bake09.pdf"
+\index{Christiansen, David Thrane}
+\begin{chunk}{axiom.bib}
+@misc{Chri14,
+ author = "Christiansen, David Thrane",
+ title = {{A Tutorial on Polymorphic Type Derivations}},
+ year = "2014",
+ link = "\url{http://davidchristiansen.dk/tutorials/typeruletutorial.pdf}"
+ paper = "Chri14.pdf"
}
\end{chunk}
\index{Biha, Sidi Ould}
+\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@article{Biha09,
 author = "Biha, Sidi Ould",
 title = {{Finite Group Representation Theory with Coq}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
+@misc{Chri18,
+ author = "Christiansen, David Thrane",
+ title = {{A Little Taste of Dependent Types}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=VxINoKFmS4}",
abstract =
 "Representation theory is a branch of algebra that allows the
 study of groups through linear applications, i.e. matrices. Thus
 problems in abstract groups can be reduced to problems on
 matrices. Representation theory is the basis for character
 theory. In this paper we present a formalization of finite groups
 representation theory in the Coq system that includes a
 formalization of Maschke's theorem on reducible finite group algebra.",
 paper = "Biha09.pdf"
+ "Dependent types let us use the same programming language for
+ compiletime and runtime code, and are inching their way towards the
+ mainstream from research languages like Coq, Agda and Idris. Dependent
+ types are useful for programming, but they also unite programming and
+ mathematical proofs, allowing us to use the tools and techniques we
+ know from programming to do math.
+
+ The essential beauty of dependent types can sometimes be hard to find
+ under layers of powerful automatic tools. The Little Typer is an
+ upcoming book on dependent types in the tradition of the The Little
+ Schemer that features a tiny dependently typed language called Pie. We
+ will demonstrate a proof in Pie that is also a program."
}
\end{chunk}
\index{Calmet, Jacques}
+\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@article{Calm09,
 author = "Calmet, Jacques",
 title = {{AbstractionBased Information Technology: A Framework for
 Open Mechanized Reasoning}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
 abstract =
 "OMRS (Open Mechanized Reasoning Systems) was designed for
 Automated Theorem Proving and then extended to Computer
 Algebra. These are the two domains at the heart of the Calculemus
 approach. An obvious question is to assess whether such an
 approach can be extended to new domains either within AI or
 outside of AI. There have been several attempts to turn the world
 into a computational system. This talk stays away from such
 general attempts and introduces a framework that is fully set
 within AI. It extends the basic concepts of OMRS to diverse fields
 ranging from information technology to sociology through law as
 illustrated by examples. The main motivation is to claim that
 whatever the selected approach, Artificial Intelligence is gaining
 enough strength and power to reach new frontiers and to turn
 challenges that are not a priori of a purely computational nature
 into AI domains.",
 paper = "Calm09.pdf"
}

\end{chunk}

\index{Calude, Cristian S.}
\index{Muller, Christine}
\begin{chunk}{axiom.bib}
@article{Calu09,
 author = "Calude, Cristian S. and Muller, Christine",
 title = {{Formal Proof: Reconciling Correctness and Understanding}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
 abstract =
 "Hilbert's concept of formal proof is an ideal of rigour for
 mathematics which has important applications in mathematical
 logic, but seems irrelevant for the practice of mathematics. The
 advent, in the last twenty years, of proof assistants was followed
 by an impressive record of deep mathematical theorems formally
 proved. Formal proof is practically achievable. With formal proof,
 correctness reaches a standard that no penandpaper proof can
 match, but an essential component of mathematics  the insight
 and understanding  seems to be in short supply. So, what makes a
 proof understandable? To answer this question we first suggest a
 list of symptoms of understanding. We then propose a vision of an
 environment in which users can write and check formal proofs as
 well as query them with reference to the symptoms of
 understanding. In this way, the environment reconciles the main
 features of proof: correctness and understanding.",
 paper = "Calu09.pdf"
}

\end{chunk}

\index{Davenport, James H.}
\index{Kohlhase, Michael}
\begin{chunk}{axiom.bib}
@article{Dave09,
 author = "Davenport, James H. and Kohlhase, Michael",
 title = {{Unifying Math Ontologies: A Tale of Two Standards}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
 abstract =
 "One of the fundamental and seemingly simple aims of mathematical
 knowledge management (MKM) is to develop and standardize formats
 that allow to ``represent the meaning of the objects of
 mathematics''. The open formats OpenMath and MathML address this,
 but differ subtly in syntax, rigor, and structural viewpoints
 (notably over calculus). To avoid fragmentation and smooth out
 interoperability obstacles, effort is under way to align them into
 a joint format OpenMath/MathML 3. We illustrate the issues that
 come up in such an alignment by looking at three main areas: bound
 variables and conditions, calculus (which relates to the previous)
 and ``lifted'' nary operators.",
 paper = "Dave09.pdf"
}

\end{chunk}

\index{Heras, Jonathan}
\index{Pascual, Vico}
\index{Rubio, Julio}
\begin{chunk}{axiom.bib}
@article{Hera09,
 author = "Heras, Jonathan and Pascual, Vico and Rubio, Julio",
 title = {{Using Open Mathematical Documents to Interface Computer
 Algebra and Proof Assistant Systems}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
+@misc{Chri18a,
+ author = "Christiansen, David Thrane",
+ title = {{Coding for Types: The Universe Pattern in Idris}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=AWeT_G04a0A}"
+}
+
+\end{chunk}
+
+\index{Christiansen, David Thrane}
+\begin{chunk}{axiom.bib}
+@misc{Chri19,
+ author = "Christiansen, David Thrane",
+ title = {{Bidirectional Type Checking}},
+ year = "2019",
+ link = "\url{http://www.youtube.com/watch?v=utyBNDj7s2w}"
+}
+
+\end{chunk}
+
+\index{Coquand, Thierry}
+\begin{chunk}{axiom.bib}
+@misc{Coqu96a,
+ author = "Coquand, Thierry",
+ title = {{An Algorithm for TypeChecking Dependent Types}},
+ year = "1996",
abstract =
 "Mathematical Knowledge can be encoded by means of Open
 Mathematical Documents (OMDoc) to interface both Computer Algebra
 and Proof Assistant systems. In this paper, we show how a unique
 OMDoc structure can be used to dynamically generate, both a
 Graphical User Interface for a Computer Algebra system and a
 script for a Proof Assistant. This generic approach has been made
 concrete through a first prototype interfacing the Kenzo Computer
 Algebra system with the ACL2 Theorem Prover, both based on the
 Common Lisp programming language. An OMDoc repository has been
 developed allowing the user to customize the application in an
 easy way.",
 paper = "Hera09.pdf"
+ "We present a simple typechecker for a language with dependent
+ types and let expressions, with a simple proof of correctness.",
+ paper = "Coqu96a.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Rich, Albert D.}
\index{Jeffrey, David J.}
+\index{Pierce, Benjamin C.}
+\index{Turner, David N.}
\begin{chunk}{axiom.bib}
@article{
 author = "Rich, Albert D. and Jeffrey, David J.",
 title = {{A Knowledge Repository for Indefinite Integration
 Based on Transformation Rules}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
 abstract = "
 Taking the specific problem domain of indefinite integration, we
 describe the ongoing development of a repository of mathematical
 knowledge based on transformation rules. It is important that the
 repository be not confused with a lookup table. The database of
 transformation rules is at present encoded in Mathematica, but this is
 only one convenient form of the repository, and it could be readily
 translated into other formats. The principles upon which the set of
 rules is compiled is described. One important principle is
 minimality. The benefits of the approach are illustrated with
 examples, and with the results of comparisons with other approaches.",
 paper = "Rich09.pdf"
+@misc{Pier98,
+ author = "Pierce, Benjamin C. and Turner, David N.",
+ title = {{Local Type Inference}},
+ year = "1998",
+ link =
+ "\url{http://www.cis.upenn.edu/~bcpierce/papers/ltitoplas.pdf}",
+ abstract =
+ "We study two partial type inference methods for a language
+ combining subtyping and impredicative polymorphism. Both methods
+ are local in the sense that missing annotations are recovered
+ using only information from adjacent nodes in the syntax tree,
+ without longdistance constraints such as unification
+ variables. One method infers type arguments in polymorphic
+ applications using a local constraint solver. The other infers
+ annotations on bound variables in function abstractions by
+ propagating type constraints downward from enclosing application
+ nodes. We motivate our design choices by a statistical analysis of
+ the uses of type inference in a sizable body of existing ML code.",
+ paper = "Pier98.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Watt, Stephen M.}
+\index{Davies, Rowan}
+\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@article{Watt09,
 author = "Watt, Stephen M.",
 title = {{Algorithms for the Functional Decomposition of Laurent
 Polynomials}},
 journal = "LNCS",
 volume = "5625",
 year = "2009",
 abstract =
 "Recent work has detailed the conditions under which univariate
 Laurent polynomials have functional decompositions. This paper
 presents algorithms to compute such univariate Laurent polynomial
 decompositions efficiently and gives their multivariate
 generalizations.

 One application of functiona decomposition of Laurent polynomials
 is the functional decomposition of socalled ``symbolic
 polynomials''. These are polynomiallike objects whose exponents
 are themselves integervalued polynomials rather than
 integers. The algebraic independence of $X%, $X^n$, $X^{n^2/2}$,
 etc., and some elementary results on integervalued polynomials
 allow problems with symbolic polynomials to be reduced to problems
 with multivariate Laurent polynomials. Hence we are interested in
 the functional decomposition of these objects.",
 paper = "Watt09.pdf"
+@misc{Davi00,
+ author = "Davies, Rowan and Pfenning, Frank",
+ title = {{Intersection Types and Computational Effects}},
+ year = "2000",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/icpf00.pdf}",
+ abstract =
+ "We show that standard formulations of intersection type systems
+ are unsound in the presence of computational effects, and propose
+ a solution similar to the value restriction for polymorphism
+ adopted in the revised definition of Standard ML. It differs in
+ that it is not tied to letexpressions and requires an additional
+ weakening of the usual subtyping rules. We also present a
+ bidirectional typechecking algorithm for the resulting language
+ that does not require an excessive amount of type annotations and
+ illustrate it through some examples. We further show that the type
+ assignment system can be extended to incorporate parametric
+ polymorphism. Taken together, we see our system and associated
+ typechecking algorithm as a significant step towards the
+ introduction of intersection types into realistic programming
+ languages. The added expressive power would allow many more
+ properties of programs to be stated by the programmer and
+ statically verified by the compiler.",
+ paper = "Davi00.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Aspinall, David}
\index{Denney, Ewen}
\index{Luth, Christoph}
+\index{Odersky, Martin}
+\index{Zenger, Christoph}
+\index{Zenger, Matthias}
\begin{chunk}{axiom.bib}
@article{Aspi08,
 author = "Aspinall, David and Denney, Ewen and Luth, Christoph",
 title = {{A Tactic Language for Hiproofs}},
 journal = "LNCS",
 volume = "5144",
 year = "2008",
 abstract =
 "We introduce and study a tactic language, Hitac, for constructing
 hierarchical proofs, known as hiproofs. The idea of hiproofs is to
 superimpose a labelled hierarchical nesting on an ordinary proof
 tree. The labels and nesting are used to describe the organisation
 of the proof, typically relating to its construction process. This
 can be useful for understanding and navigating the proof. Tactics
 in our language construct hiproof structure together with an
 underlying proof tree. We provide both a bigstep and a smallstep
 operational semantics for evaluating tactic expressions. The
 bigstep semantics captures the intended meaning, whereas the
 smallstep semantics hints at possible implementations and
 provides a unified notion of proof state. We prove that these
 notions are equivalent and construct valid proofs.",
 paper = "Aspi08.pdf"
}

\end{chunk}

\index{Backeljauw, Franky}
\index{Becuwe, Stefan}
\index{Cuyt, Annie}
\begin{chunk}{axiom.bib}
@article{Back08,
 author = "Backeljauw, Franky and Becuwe, Stefan and Cuyt, Annie",
 title = {{Validated Evaluation of Special Mathematical Functions}}
 journal = "LNCS",
 volume = "5144",
 year = "2008",
+@misc{Oder01,
+ author = "Odersky, Martin and Zenger, Christoph and
+ Zenger, Matthias",
+ title = {{Colored Local Type Inference}},
+ year = "2001",
abstract =
 "Because of the importance of special functions, several books and
 a large collection of papers have been devoted to the numerical
 computation of these functions, the most wellknown being the
 Abramowitz and Stegun handbook. But up to this date, no
 environment offers routines for the provable correct evaluation of
 these special functions.
+ "We present a type system for a language based on $F_{\le}$, which
+ allows certain type annotations to be elided in actual
+ programs. Local type inference determines types by a combination
+ of type propagation and local constraint solving, rather than by
+ global constraint solving. We refine the previously existing local
+ type inference system of Pierce and Turner by allowing partial
+ type information to be propagated. This is expressed by coloring
+ types to indicate propagation directions. Propagating partial type
+ information allows us to omit type annotations for the visitor
+ pattern, the analogue of pattern matching in languages without sum
+ types.",
+ paper = "Oder01.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
 We point out how series and limitperiodic continued fraction
 representation of the functions can be helpful in this
 respect. Our scalable precision technique is mainly based on the
 use of sharpened a priori truncation and roundoff error upper
 bounds, in case of real arguments. The implementation is validated
 in the sense that it returns a sharp interval enclosure for the
 requested function evaluation, at the same cost as the evaluation.",
 paper = "Back08.pdf"
+\index{Norell, Ulf}
+\begin{chunk}{axiom.bib}
+@phdthesis{Nore07,
+ author = "Norell, Ulf",
+ title = {{Towards a Practical Programming Language Based on
+ Dependent Type Theory}},
+ school = "Chalmers University",
+ year = "2007",
+ link = "\url{http://www.cse.chalmers.se/~ulfn/papers/thesis.pdf}",
+ abstract =
+ "Dependent type theories have a long history of being used for
+ theorem proving. One aspect of type theory which makes it very
+ powerful as a proof language is that it mixes deduction with
+ computation. This also makes type theory a good candidate for
+ programming  the strength of the type system allows properties
+ of programs to be stated and established, and the computational
+ properties provide semantics for the programs.
+
+ This thesis is concerned with bridging the gap between the
+ theoretical presentations of type theory and the requirements of
+ practical programming languages. Although there are many
+ challenging research problems left to solve before we have an
+ industrial scale programming language based on type theory, this
+ thesis takes us a good step along the way.
+
+ In functional programming languages pattern matching provides a
+ concise notation for defining functions. In dependent type theory,
+ pattern matching becomes even more powerful, in that inspecting
+ the value of a particular term can reveal information about the
+ types and values of other terms. In this thesis we give a type
+ checking algorithm for definitions by pattern matching in type
+ theory, supporting overlapping patterns, and pattern matching on
+ intermediate results using the {\sl with} rule.
+
+ Traditional presentations of type theory suffer from rather
+ verbose notation, cluttering programs and proofs with, for
+ instance, explicit type information. One solution to this problem
+ is to allow terms that can be inferred automatically to be
+ omitted. This is usually implemented by inserting metavariables in
+ place of the omitted terms and using unification to solve these
+ metavariables during type checking. We present a type checking
+ algorithm for a theory with metavariables and prove its soundness
+ independent of whether the metavariables are solved or not.
+
+ In any programming language it is important to be able to
+ structure large programs into separate units or modules and limit
+ the interaction between these modules. In this thesis we present a
+ simple, but powerful module system for a dependently typed
+ language. The main focus of the module system is to manage the
+ name space of a program, and an important characteristic is a
+ clear separation between the module system and the type checker,
+ making it largely independent of the underlying language.
+
+ As a side track, not directly related to the use of type theory
+ for programming, we present a connnection between type theory and
+ a firstorder logic theorem prover. This connection saves the user
+ the burden of proving simple, but tedious firstorder theorems by
+ leaving them for the prover. We use a transparent translation to
+ firstorder logic which makes the proofs constructed by the
+ theorem prover human readable. The soundness of the connection is
+ established by a general metatheorem.
+
+ Finally we put our work into practice in the implementation of a
+ programming language, Agda, based on type theory. As an
+ illustrating example, we show how to program a simple certified
+ prover for equations in a commutative monoid, which can be used
+ internally in Agda. Much more impressive examples have been done
+ by others, showing that the ideas developed in this thesis are
+ viable in practice.",
+ paper = "Nore07.pdf"
}
\end{chunk}
\index{Bouche, Thierry}
+\index{Dunfield, Joshua}
+\index{Krishnaswami, Neelakantan R.}
\begin{chunk}{axiom.bib}
@article{Bouc08,
 author = "Bouche, Thierry",
 title = {{Digital Mathematics Libraries: The Good, the Bad, the Ugly}},
 journal = "LNCS",
 volume = "5144",
 year = "2008",
 abstract =
 "The mathematicians' Digital mathematics library (DML), which is
 not to be confused with libraries of mathematical objects
 represented in some digital format, is the generous idea that all
 mathematics ever published should end up in digital form so that
 it would be more easily referenced, accessible, usable. This
 concept was formulated at the very beginning of this century, and
 yielded a lot of international activity that culminated around
 years 20022005. While it is estimated that a substantial part of
 the existing math literature is already available in some digital
 format, nothing looking like one digital mathematics library has
 emerged, but a multiplicity of competing electronic offers, with
 unique standards, features, business models, access policies,
 etc.  even though the contents themselves overlap somewhat,
 while leaving wide areas untouched. The millenium's applealing
 idea has become a new Tower of Babel.

 It is not obvious how much of the traditional library functions we
 should give up while going digital. The point of view shared by
 many mathematicians is that we should be able to find a reasonable
 archiving policy fitting all stakeholders, allowing to translate
 the essential features of the past library system  which is the
 central infrastructure of all math departments worldwide  in the
 digital paradigm, while enhancing overall performances thanks to
 dedicated information technology.

 The vision of this library is rather straightforward: a third
 party to the academic publishing system, preserving, indexing, and
 keeping current its digital collections through a distributed
 network of partners curating the physical holdings, and a
 centralized access facility making use of innovative mining and
 interlinking techniques for easy navigation and discovery.

 However, the fragmentation level is so high that the hope of a
 unique portal providing seamless access to everything relevant to
 mathematical research seems now completely out of reach.
 Nevertheless, we have lessons to learn from each one of the
 already numerous projects running. One of them is that there are
 too many items to deal with, and too many different inital choices
 over metadata sets and formats: it won't be possible to find a
 nontrivial greatest common divisor coping with everything already
 available, and manual upgrading is highly improbable.

 This is where future management techniques for loosely formalised
 mathematical knowledge could provide a new impetus by at last
 enabling a minimum set of features across projects borders through
 automated procedures. We can imagine e.g. mathaware OCR on
 scanned pages, concurrently with interpreters of electronic
 sources of born digital texts, both producing searchable full
 texts in a compatible semistructured format. The challenge is
 ultimately to take advantage of the high formalisation of
 mathematical texts rather than merely ignoring it!

 With these considerations in mind, the talk will focus on
 achievements, limitations, and failures of existing digital
 mathematics libraries, taking the NUMDAM and CEDRAM programs as
 principal examples, hence the speaker himself is the target.",
 paper = "Bouc08.pdf"
}

\end{chunk}

\index{Dominguez, Cesar}
\begin{chunk}{axiom.bib}
@article{Domi08,
 author = "Dominguez, Cesar",
 title = {{Formalizing in Coq Hidden Algebras to Specify Symbolic
 Computation Systems}},
 journal = "LNCS",
 volume = "5144",
 year = "2008",
+@misc{Dunf18,
+ author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
+ title = {{Sound and Complete Bidirectional Typechecking for
+ HigherRank Polymorphism with Existentials and
+ Indexed Types}},
+ year = "2018",
+ link = "\url{https://arxiv.org/pdf/1601.05106.pdf}",
abstract =
 "This work is an attempt to formalize, using the Coq proof
 assistant, the algebraic specification of the data structures
 appearing in two symbolic computation systems for algebraic
 topology called EAT and Kenzo. The specification of these
 structures have been obtained through an operation, called imp
 operation, between different specification frameworks as standard
 algebraic specifications and hidden specifications. Resuing
 previous Coq implementations of universal algebra and category
 theory we have proposed a Coq formalization of the imp operation,
 extending the representation to the particular hidden algebras
 which take part in this operation.",
 paper = "Domi08.pdf"
+ "Bidirectional typechecking, in which terms either synthesize a
+ type or are checked against a known type, has become popular for
+ its applicability to a variety of type systems, its error
+ reporting, and its ease of implementation. Following principles
+ from proof theory, bidirectional typing can be applied to many
+ type constructs. The principles underlying a bidirectional
+ approach to indexed types (\sl generalized algebraic datatypes}
+ are less clear. Building on prooftheoretic treatments of
+ equality, we give a declarative specification of typing based on
+ {\sl focalization}. This approach permits declarative rules for
+ coverage of pattern matching, as well as support for firstclass
+ existential types using a focalized subtyping judgment. We use
+ refinement types to avoid explicitly passing equality proofs in
+ our term syntax, making our calculus similar to languages such as
+ Haskell and OCaml. We also extend the declarative specification
+ with an explicit rules for deducing when a type is principal,
+ permitting us to give a complete declarative specification for a
+ rich type system with significant type inference. We also give a
+ set of algorithmic typing rules, and prove that it is sound and
+ complete with respect to the declarative system. The proof
+ requires a number of technical innovations, including proving
+ soundness and completeness in a mutually recursive fashion.",
+ paper = "Dunf18.pdf"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Pfen04a,
+ author = "Pfenning, Frank",
+ title = {{Lecture Notes on Bidirectional Type Checking}},
+ year = "2004",
+ paper = "Pfen04a.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Kohlhase, Michael}
\index{Muller, Christine}
\index{Rabe, Florian}
+\index{Fieker, Claus}
+\index{Hart, William}
+\index{Hofmann, Tommy}
+\index{Johansson, Fredrik}
\begin{chunk}{axiom.bib}
@article{Kohl08a,
 author = "Kohlhase, Michael and Muller, Christine and Rabe, Florian",
 title = {{Notations for Living Mathematical Documents}},
 journal = "LNCS",
 volume = "5144",
 year = "2008",
 abstract =
 "Notations are central for understanding mathematical
 discourse. Readers would like to read notations that transport the
 meaning well and prefer notations that are familiar to
 them. Therefore, authors optimze the choice of notations with
 respect to these two criteria, while at the same time trying to
 remain consistent over the document and their own prior
 publications. In print media where notations are fixed at
 publication time, this is an overconstrained problem. In living
 documents notations can be adapted at reading time, taking reader
 preferences into account.

 We present a representational infrastructure for notations in
 living mathematical documents. Mathematical notations can be
 defined declaratively. Author and reader can extensionally define
 the set of available notation definitions at arbitrary document
 levels, and they can guide the notation selection function via
 intensional annotations.

 We give an abstract specification of notation definitions and the
 flexible rendering algorithms and show their coverage on
 paradigmatic examples. We show how to use this framework to render
 OPENMATH and ContentMathML to PresentationMathML, but the
 approach extends to arbitrary content and presentation formats. We
 discuss prototypical implementations of all aspects of the
 rendering pipeline.",
 paper = "Kohl08a.pdf"
}

\end{chunk}

\index{Stratford, Jonathan}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Stra08,
 author = "Stratford, Jonathan and Davenport, James H.",
 title = {{Unit Knowledge Management}},
 journal = "LNCS",
 volume = "5144",
 year = "2008",
 abstract =
 "In 9, various observations on the handling of (physical) units in
 OpenMath were made. In this paper, we update those observations,
 and make some comments based on a working unit converter that,
 because of its OpenMathbased design, is modular, extensible, and
 reflective. We also note that some of the issues in an effective
 converter, such as the rules governing abbreviations, being more
 linguistic than mathematical, do not lend themselves to easy
 expression in OpenMath.",
 paper = "Stra08.pdf"
+@inproceedings{Fiek17,
+ author = "Fieker, Claus and Hart, William and Hofmann, Tommy and
+ Johansson, Fredrik",
+ title = {{Nemo/Hecke: Computer Algebra and Number Theory Package
+ for the Julia Programming Language}},
+ booktitle = "ISSAC'17",
+ publisher = "ACM",
+ year = "2017",
+ pages = "157164",
+ abstract =
+ "We introduce two new packages, Nemo and Hecke, written in the
+ Julia programming language for computer algebra and number
+ theory. We demonstrate that high performance generic algorithms
+ can be implemented in Julia, without the need to resort to a
+ lowlevel C implementation. For specialised algorithms, we use
+ Julia's efficient native C interface to wrap existing C/C++
+ libraries such as Flint, Arb, Antic and Singular. We give examples
+ of how to use Hecke and Nemo and discuss some algorithms that we
+ have implemented to provide high performance basic arithmetic.",
+ paper = "Fiek17.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Andres, Mirian}
\index{Lamban, Laureano}
\index{Rubio, Julio}
+\index{Yallop, Jeremy}
+\index{White, Leo}
\begin{chunk}{axiom.bib}
@article{Andr07,
 author = "Andres, Mirian and Lamban, Laureano and Rubio, Julio",
 title = {{Executing in Common Lisp, Proving in ACL2}},
 journal = "LNCS",
 volume = "4573",
 year = "2007",
+@inproceedings{Yall19,
+ author = "Yallop, Jeremy and White, Leo",
+ title = {{Lambda: The Ultimate Sublanguage (Experience Report}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
abstract =
 "In this paper, an approach to integrate an alreadywritten Common
 Lisp program for algebraic manipulation with ACL2 proofs of
 properties of that program is presented. We report on a particular
 property called ``cancellation theorem'', which has been proved in
 ACL2, and could be applied to several problems in the field of
 Computational Algebraic Topology.",
 paper = "Andr07.pdf"
+ "We describe our experience teaching an advanced typed functional
+ programming course based around the use of Sysmte $F_\omega$ as a
+ programming language.",
+ paper = "Yall19.pdf"
}
\end{chunk}
\index{Davenport, James H.}
+\index{Zhao, Jinxu}
+\index{Oliveira, Bruno C.D.S}
+\index{Schrijvers, Tom}
\begin{chunk}{axiom.bib}
@article{Dave07a,
 author = "Davenport, James H.",
 title = {{What Might ``Understand a Function'' Mean?}},
 journal = "LNCS",
 volume = "4573",
 year = "2007",
+@inproceedings{Zhao19,
+ author = "Zhao, Jinxu and Oliveira, Bruno C.D.S and
+ Schrijvers, Tom",
+ title = {{A Mechanical Formalization of HigherRanked Polymorphic
+ Type Interence}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM"
+ year = "2019",
abstract =
 "Many functions in classical mathematics are largely defined in
 terms of their derivatives, so Bessel's function is ``the''
 solution of Bessel's equation, etc. For definiteness, we need to
 add other properties, such as initial values, branch cuts,
 etc. What actually makes up ``the definition'' of a function in
 computer algebra? The answer turns out to be a combination of
 arithmetic and analytic properties.",
 paper = "Dave07a.pdf"
+ "Modern functional programming languages, such as Haskell or
+ OCaml, use sophisticated forms of type inference. While an
+ important topic in the Programming Languages research, there is
+ little work on the mechanization of the metatheory of type
+ inference in theorem provers. In particular we are unaware of any
+ complete formalization of the type inference algorithms that are
+ the backbone of modern functional languages.
+
+ This paper presents the first full mechanical formalization of the
+ metatheory for higherranked polymorphic type inference. The
+ system that we formalize is the bidirectional type system by
+ Dunfield and Krishnaswami (DK). The DK type system has two
+ variants (a declarative and an algorithmic one) that have been
+ manually proven sound, complete and decidable. We present a
+ mechanical formalization in the Abella theorem provers of DK's
+ declarative type system with a novel algorithmic system. We have a
+ few reasons to use a new algorithm. Firstly, our new algorithm
+ employs worklist judgments, which precisely capture the scope of
+ variables and simplify the formalization of scoping in a theorem
+ prover. Secondly, while DKs original formalizations comes with
+ very wellwritten manual proofs, there are several details missing
+ and some incorrect proofs, which complicate the task of writing a
+ mechanized proof. Despite the use of a different algorithm we
+ prove the same results as DK, although with significantly
+ different proofs and proof techniques. Since such type inference
+ algorithms are quite subtle and have a complex metatheory,
+ mechanical formalizations are an important advance in type
+ inference research.",
+ paper = "Zhao19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Geuvers, Herman}
\index{Loeb, Iris}
\index{Mamane, Lionel Elie}
\index{Coen, Claudio Sacerdoti}
+\index{Swierstra, Wouter}
+\index{Baanen, Tim}
\begin{chunk}{axiom.bib}
@article{Aspe06,
 author = "Asperti, Andrea and Geuvers, Herman and Loeb, Iris and
 Mamane, Lionel Elie and Coen, Claudio Sacerdoti",
 title = {{An Interactive Algebra Course with Formalised Proofs and
 Definitions}},
 journal = "LNCS",
 volume = "4108",
 year = "2006",
+@inproceedings{Swie19,
+ author = "Swierstra, Wouter and Baanen, Tim",
+ title = {{A Predicate Transformer Semantics for Effects}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM"
+ year = "2019",
abstract =
 "We describe a casestudy of the application of web technology to
 create webbased didactic material out of a repository of formal
 mathematics, using the structure of an existing course. The paper
 discusses the difficulties related to associating notation to a
 formula, the embedding of formal notions into a document (the
 ``view''), and the rednering of proofs.",
 paper = "Aspe06.pdf"
+ "Reasoning about programs that use effects can be much harder than
+ reasoning about their pure counterparts. This paper presents a
+ predicate transformer semantics for a variety of effects,
+ including exceptions, state, nondeterminism, and general
+ recursion. The predicate transformer semantics gives rise to a
+ refinement relation that can be used to relate a program to its
+ specification, or even calculate effectful programs that are
+ correct by construction.",
+ paper = "Swie19.pdf"
}
\end{chunk}
\index{Cairns, Paul}
\index{Gow, Jeremy}
+\index{Eremondi, Joseph}
+\index{Tanter, Eric}
+\index{Garcia, Ronald}
\begin{chunk}{axiom.bib}
@article{Cair06,
 author = "Cairns, Paul and Gow, Jeremy",
 title = {{Literate Proving: Presenting and Documenting Formal Proofs}},
 journal = "LNCS",
 volume = "4108",
 year = "2006",
+@inproceedings{Erem19,
+ author = "Eremondi, Joseph and Tanter, Eric and Garcia, Ronald",
+ title = {{Approximate Normalization for Gradual Dependent Types",
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
abstract =
 "Literate proving is the analogue for literate programming in the
 mathematical realm. That is, the goal of literate proving is for
 humans to produce clear expositions of formal mathematics that
 could even be enjoyable for people to read whilst remaining
 faithful representations of the actual proofs. This paper
 describes maze, a generic literate proving system. Authors markup
 formal proof files, such as Mizar files, with arbitrary XML and
 use maze to obtain the selected extracts and transform them for
 presentation, e.g. as Latex. To aid its use, maze has built in
 transformations that include pretty printing and proof sketching
 for inclusion in latex documents. These transformations challenge
 the concept of faithfulness in literate proving but it is argued
 that this should be a distinguishing feature of literate proving
 from literate programming.",
 paper = "Cair06.pdf"
}

\end{chunk}

\index{Raja, Amar}
\index{Rayner, Matthew}
\index{Sexton, Alan}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@article{Raja06,
 author = "Raja, Amar and Rayner, Matthew and Sexton, Alan and
 Sorge, Volker",
 title = {{Towards a Parser for Mathematical Formula Recognition}},
 journal = "LNCS",
 volume = "4108",
 year = "2006",
+ "Dependent types help programmers write highly reliable
+ code. However, this reliability comes at a cost: it can be
+ challenging to write new prototypes in (or migrate old code to)
+ dependentlytyped programming languages. Gradual typing makes
+ static type disciplines more flexible, so an appropriate notion of
+ gradual dependent types could fruitfully lower this cost. However,
+ dependent types raise unique challenges for gradual
+ typing. Dependent typechecking involves the execution of program
+ code, but graduallytyped code can signal runtime type errors or
+ diverge. These runtime errors threaten the soundness guarantees
+ that make dependen types so attractive, while divergence spoils
+ the typedriven programming experience.
+
+ this paper presents GDTL, a gradual dependentlytyped language
+ that emphasizes pragmatic dependentlytyped programming. GDTL
+ fully embeds both an untyped and dependentlytyped language, and
+ allows for smooth transitions between the two. In addition to
+ gradual types we introduce gradual terms, which allow the user to
+ be imprecise in type indices and to omit proof terms; runtime
+ checks ensure type safety. To account for nontermination and
+ failure, we distinguish between compiletype normalization and
+ runtime execution: compiletime normalization is approximate but
+ total, while runtime execution is exact, but may fail or
+ diverge. We prove that GDTL has decidable typechecking and
+ satisfies all the expected properties of gradual languages. In
+ particular, GDTL satisfies the static and dynamic gradual
+ guarantees: reducing type precision preserves typedness, and
+ altering type precision does not change program behavior outside
+ of dynamic type failures. To prove these properties, we were led
+ to establish a novel normalization gradual guarantee thata
+ captures the monotonicity of approximate normalization with
+ respect to imprecision.",
+ paper = "Erem19.pdf"
+}
+
+\end{chunk}
+
+\index{Patterson, Daniel}
+\index{Ahmed, Amal}
+\begin{chunk}{axiom.bib}
+@inproceedings{Patt19,
+ author = "Patterson, Daniel and Ahmed, Amal",
+ title = {{The Next 700 Compiler Correctness Theorems}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
abstract =
 "For the transfer of mathematical knowledge from paper to
 electronic form, the reliable automatic analysis and understanding
 of mathematical texts is crucial. A robust system for this task
 needs to combine low level character recognition with higher level
 structural analysis of mathematical formulas. We present progress
 towards this goal by extending a databasedriven optical character
 recognition system for mathematics with two high level analysis
 features. One extends and enhances the traditional approach of
 projection profile cutting. The second aims at integrating the
 recognition process with graph grammar rewriting by giving support
 to the interactive construction and validation of grammar
 rules. BOth approaches can be successfully employed to enhance the
 capabilities of our system to recognise and reconstruct compound
 mathematical expressions.",
 paper = "Raja06.pdf"
}

\end{chunk}

\index{Adams, Andrew A.}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Adam04,
 author = "Adams, Andrew A. and Davenport, James H.",
 title = {{Copyright Issues for MKM}},
 journal = "LNCS",
 volume = "3119",
 year = "2004",
+ "Compiler correctness is an old problem, with results stretching
+ back beyond the last halfcentury. Founding the field, John
+ McCarthy and James Painter set out to build ' completely
+ trustworthy compiler'. And yet, until quite recently, even despite
+ truly impressive verification efforts, the theorems being proved
+ were only about the compilation of whole programs, a theoretically
+ quite appealing but practically unrealistic simplification. For a
+ compiler correctness theorem to assure complete trust, the theorem
+ must reflect the reality of how the compiler will be used.
+
+ There has been much recent work on more realistic 'compositional'
+ compiler correctness aimed at proving correct compilation of
+ components while supporting linking with components compiled from
+ different languages using different compilers. However, the
+ variety of theorems, stated in remarkably different ways, raises
+ questions about what researchers even mean by a 'compiler is
+ correct'. In this pearl, we develop a new framework with which to
+ understand compiler correctness theorems in the presence of
+ linking, and apply it to understanding and comparing this
+ diversity of results. In doing so, not only are we better able to
+ assess their relative strengths and weaknesses, but gain insight
+ into what we as a community should expect from compiler
+ correctness theorems of the future.",
+ paper = "Patt19.pdf"
+}
+
+\end{chunk}
+
+\index{Paraskevopoulou, Zoe}
+\index{Appel, Andrew W.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Para19,
+ author = "Paraskevopoulou, Zoe and Appel, Andrew W.",
+ title = {{Closure Conversion is Safe for Space}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
abstract =
 "We present an overview of the current situation and recent and
 expected future developments in areas of copyright law and
 economics relevant to Mathematical Knowledge Management.",
 paper = "Adam04.pdf"
+ "We formally prove that closure conversion with flat environments
+ for CPS lambda calculus is correct (preserves semantics) and safe
+ for time and space, meaning that produced code preserves the time
+ and space required for execution of the source program.
+
+ We give a cost model to pre and postclosureconversion code by
+ formalizing profiling semantics that keep track of the time and
+ space resources needed for the execution of a program, taking
+ garbage collection into account. To show preservation of time and
+ space we set up a genera 'garbagecollection compatible' binary
+ logical relation that establishes invariants on resource
+ consumption of the related programs, along with functional
+ correctness. Using this framework, we show semantics preservation
+ and space and time safety for terminating source programs, and
+ divergence preservation and space safety for diverging source
+ programs.
+
+ This is the first formal proof of spacesafety of a
+ closureconversion transformation. The transformation and the
+ proof are parts of the CertiCoq compiler pipeline from Coq
+ (Gallina) through CompCert Clight to assembly language. Our
+ results are mechanized in the Coq proof assistant.",
+ paper = "Para19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Carlisle, David}
\index{Dewar, Mike}
+\index{Cong, Youyou}
+\index{Osvald, Leo}
+\index{Essertel, Gregory M.}
+\index{Rompf, Tiark}
\begin{chunk}{axiom.bib}
@article{Carl03,
 author = "Carlisle, David and Dewar, Mike",
 title = {{NAG Library Documentation}},
 journal = "LNCS",
 volume = "2594",
 year = "2003",
+@inproceedings{Cong19,
+ author = "Cong, Youyou and Osvald, Leo and Essertel, Gregory M.
+ and Rompf, Tiark",
+ title = {{Compiling with Continuations, or without? Whatever}},
+ booktitle = "Inter. Conf. on Functional Programming",
+ publisher = "ACM",
+ year = "2019",
abstract =
 "This paper describes the management and evolution of a large
 collection of 1200 documents detailing the functionality in NAG
 Library products.

 This provides a case study addressing many of the issues which
 concern the ``MKM'' project, involving conversion of legacy
 formats (SGML and Latex) to XML, and inferring semantic content
 from mainly presentational mathematical expressions.",
 paper = "Carl03.pdf"
+ "What makes a good compiler IR? In the context of functional
+ languages, there has been an extensive debate on the advantages
+ and disadvantages of continuationpassing style (CPS). The
+ consensus seems to be that some form of explicit continuations is
+ necessary to model jumps in a functional style, but that they
+ should have a 2ndclass status, separate from regular functions,
+ to ensure efficient code generation. Building on this observation,
+ a recent study from PLDI 2017 proposed a directstyle IR with
+ explicit join points, which essentially represent local
+ continuations, i.e. functions that do not return or escape. While
+ this IR can work well in practice, as evidenced by the
+ implementation of join points in the Glasgow Haskell Compiler
+ (GHC), there still seems to be room for improvement, especially
+ with regard to the way continuations are handled in the course of
+ optimization.
+
+ In this paper, we contribute to the CPS debate by developing a
+ novel IR with the following features. First, we integrate a
+ control operator that resembles Felleisen's C, eliminating certain
+ redundant rewrites observed in the previous study. Second, we
+ treat the nonreturning and nonescaping aspects of continuations
+ separately, allowing efficient compilation of wellbehaved
+ functions defined by the user. Third, we define a selective CPS
+ translation of our IR, which erases control operators while
+ preserving the meaning and typing of programs. These features
+ enable optimizations in both direct style and full CPS, as well as
+ in any intermediate style with selectively exposed continuations.
+ Thus, we change the spectrum of available options from 'CPS yes or
+ no' to 'as much or as little CPS as you want, when you want it'.",
+ paper = "Cong19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Davenport, James H.}
+\index{Zavialov, Vladislav}
\begin{chunk}{axiom.bib}
@article{Dave03,
 author = "Davenport, James H.",
 title = {{MKM from Book to Computer: A Case Study}},
 journal = "LNCS",
 volume = "2594",
 year = "2003",
 abstract =
 "[2] is one of the great mathematical knowledge
 repositories. Nevertheless, it was written for a different era,
 and for human readership. In this paper, we describe the sorts of
 knowledge in one chapter (elementary transcendental functions) and
 the difficulties in making this sort of knowledge formal. This
 makes us ask questions about the nature of a Mathematical
 Knowledge Repository, and whether a database is enough, or whether
 more ``intelligence'' is required.",
 paper = "Dave03.pdf"
+@misc{Zavi18,
+ author = "Zavialov, Vladislav",
+ title = {{Why Dependent Haskell is the Future of Software Development}},
+ link = "\url{https://serokell.io/blog/whydependenthaskell}",
+ year = "2018"
}
\end{chunk}
\index{Wiedijk, Freek}
+\index{Eisenberg, Richard A.}
\begin{chunk}{axiom.bib}
@article{Wied03b,
 author = "Wiedijk, Freek",
 title = {{Comparing Mathematical Provers}},
 journal = "LNCS",
 volume = "2594",
 year = "2003",
+@phdthesis{Eise16,
+ author = "Eisenberg, Richard A.",
+ title = {{Dependent Types in Haskell: Theory and Practice}},
+ school = "University of Pennsylvania",
+ year = "2016",
abstract =
 "We compare fifteen systems for the formalizatioin of mathematics
 with a computer. We present several tables that list various
 properties of these programs. The three main dimensions on which
 we compare these systems are: the size of their library, the
 strength of their logic and their level of automation.",
 paper = "Wied03b.pdf"
+ "Haskell, as implemented by the Glasgow Haskell Compiler (GHC),
+ has been adding new typelevel programming features for some
+ time. Many of these features  generalized algebraic datatypes
+ (GADT)s, type families, kind polymorphism, and promoted datatypes
+  have brought Haskell to the doorstep of dependent types. Many
+ dependently typed programs can even currently be encoded, but
+ often the constructions are painful.
+
+ In this dissertation, I describe Dependent Haskell, which supports
+ full dependent types via a backwardcompatible extension to
+ today's Haskell. An important contribution to this work is an
+ implementation, in GHC, of a portion of Dependent Haskell, with
+ the rest to follow. The features I have implemented are already
+ released, in GHC 8.0. This dissertation contains several practical
+ examples of Dependent Haskell code, a full description of the
+ differences between Dependent Haskell and today's Haskell, a novel
+ dependently typed lambdacalculus (called PICO) suitable for use
+ as an intermediate language for compiling Dependent Haskell, and a
+ type inference and elaboration algorithm, BAKE, that translates
+ Dependent Haskell to typecorrect PICO. Full proofs of type safety
+ of PICO and the soundness of BAKE are included in the appendix.",
+ paper = "Eise16.pdf"
+}
+
+\end{chunk}
+
+\index{Pressler, Ron}
+\begin{chunk}{axiom.bib}
+@misc{Pres19,
+ author = "Pressler, Ron",
+ title = {{Correctness and Complexity}},
+ year = "2019",
+ link = "\url{https://pron.github.io/posts/correctnessandcomplexity}"
}
\end{chunk}
\index{Deplagne, Eric}
\index{Kirchner, Claude}
+\index{Rado, Tibor}
\begin{chunk}{axiom.bib}
@article{Depl02,
 author = "Deplagne, Eric and Kirchner, Claude",
 title = {{Deduction versus Computation: The Case of Induction}},
 journal = "LNCS",
 volume = "2385",
 year = "2002",
 abstract =
 "The fundamental difference and the essential complementarity
 between computation and deduction are central in computer algebra,
 automated deduction, proof assistants and in frameworks making
 them cooperating. In this work we show that the fundamental proof
 method of induction can be udnerstood and implemented as either
 computation or deduction.

 Inductive proofs can be built either explicitly by making use of
 an induction principle or implicitly by using the socalled
 induction by rewriting and inductionless induction methods. When
 mechanizing proof construction, explicit induction is used in
 proof assistants and implicit induction is used in rewrite based
 automated theorem provers. The two approaches are clearly
 complementary but up to now there was no framework able to
 encompass and to understand uniformly the two methods. In this
 work, we propose such an approach based on the general notion of
 deduction modulo. We extend slightly the original version of the
 deduction modulo framework and we provide modularity properites
 for it. We show how this applies to a uniform understanding of the
 so called induction by rewriting method and how this relates
 directly to the general use of the induction principle.",
 paper = "Depl02.pdf"
}

\end{chunk}

\index{Meier, Andreas}
\index{Sorge, Volker}
\index{Colton, Simon}
\begin{chunk}{axiom.bib}
@article{Meie02,
 author = "Meier, Andreas and Sorge, Volker and Colton, Simon",
 title = {{Employing Theory Formationi to Guide Proof Planning}},
 journal = "LNCS",
 volume = "2385",
 year = "2002",
+@article{Rado61,
+ author = "Rado, Tibor",
+ title = {{On NonComputable Functions}},
+ journal = "Bell System Technical Journal",
+ volume = "41",
+ number = "3"
+ year = "1961",
abstract =
 "The invention of suitable concepts to characterise mathematical
 structures is one of the most challenging tasks for both human
 mathematicians and automated theorem provers alike. We present an
 approach where automatic concept formation is used to guide
 nonisomorphism proofs in the residue class domain. The main idea
 behind the proof is to automatically identify discriminants for
 two given structures to show that they are not
 isomorphic. Suitable discriminants are generated by a theory
 formation system; the overall proof is constructe by a proof
 planner with the additional support of traditional automated
 theorem provers and a computer algebra system.",
 paper = "Meie02.pdf"
+ "The construction of noncomputable functions used in this paper
+ is based on the priciple that a finite, nonempty set of
+ nonnegative integers has a largest element. Also, this principle
+ is used only for sets which are exceptionally welldefined by
+ current standards. No enumeration of computable functions is used,
+ and in this sense the diagonal process is not employed. This, it
+ appears that an apparently selfevident principle, of constant use
+ in every area of mathematics, yields nonconstructive entities.",
+ paper = "Rado61.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Sturm, Thomas}
+\index{Yedidia, Adam}
+\index{Aaronson, Scott}
\begin{chunk}{axiom.bib}
@article{Stur02,
 author = "Sturm, Thomas",
 title = {{Integration of Quantifier Elimination with Constraint
 Logic Programming}},
 journal = "LNCS",
 volume = "2385",
 year = "2002",
+@article{Yedi16,
+ author = "Yedidia, Adam and Aaronson, Scott",
+ title = {{A Relatively Small Turing Machine Whose Behavior Is
+ Independent of Set Theory}},
+ journal = "Complex Systems",
+ volume = "25",
+ number = "4",
+ pages = "297327",
+ link = "\url{http://www.complexsystems.com/pdf/2555.pdf}",
+ year = "2016",
abstract =
 "We examine the potential of an extension of constraint logic
 programming, where the admissible constraints are arbitrary
 firstorder formulas over some domain. Constraint solving is
 realized by effective quantifier elimination. The arithmetic is
 always exact. We descrbe the conceptual advantages of our approach
 and the capabilities of the current implementation
 CLP(RL). Supported domains are $\mathbb{R}$, $\mathbb{C}$, and
 $\mathbb{Q}_p$. For our discussion here we restrict to $\mathbb{R}$.
 paper = "Stur02.pdf"
+ "Since the definition of the Busy Beaver function in Rado in 1962,
+ an interesting open question has been what the smallest value of
+ $n$ for which $BB(n)$ is independent of ZFC set theory. Is this
+ $n$ approximately 10, or closer to 1,000,000, or is it even
+ larger? In this paper, we show that it is at most 7,918 by
+ presenting an explicit description of a 7,918state Turing machine
+ $Z$ with 1 tape and a 2symbol alphabet that cannot be proved to
+ run forever in ZFC (even though it presumably does), assuming ZFC
+ is consistent. The machine is based on work of Harvey Friedman on
+ independent statements involving orderinvariant graphs. In doing
+ so, we give the first known upper bound on the highest provable
+ Busy Beaver number in ZFC. We also present a 4,888state Turing
+ machine $G$ that halts if and only if there is a counterexample of
+ Goldbach's conjecure, an at 5,372state Turing machine $R$ that
+ halts if and only if the Riemann hypothesis is false. To create
+ $G$, $R$, and $Z$, we develop and use a higherlevel language,
+ Laconic, which is much more convenient than direct state
+ manipulation.",
+ paper = "Yedi16.pdf"
}
\end{chunk}
\index{Rabe, Florian}
+\index{Hartmanis, J.}
+\index{Stearns, R.E.}
\begin{chunk}{axiom.bib}
@article{Rabe12,
 author = "Rabe, Florian",
 title = {{Q Query Language for Formal Mathematical Libraries}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "One of the most promising applications of mathematical knowledge
 management is search: Even if we restrict attention to the tiny
 fragment of mathematics that has been formalized, the amount
 exceeds the comprehension of an individual human.

 Based on the generic representation language MMT, we introduce the
 mathematical query langauge QMT: It combines simplicity,
 expressivity, and scalability while avoiding a commitment to a
 particular logical formalism. QMT can integrate various search
 paradigms such as unification, semantic web, or XQuery style
 queries, and QMT queries can span different mathematical
 libraries.

 We have implemented QMT as a part of the MMT API. This combination
 provides a scalable indexing and query engine that can be readily
 applied to any library of mathematical knowledge. While our focus
 here is on libraries that are available in a content markup
 language, QMT naturally extends to presentation and narration
 markup languages.",
 paper = "Rabe12.pdf"
+@article{Hart63,
+ author = "Hartmanis, J. and Stearns, R.E.",
+ title = {{On the Computational Complexity of Algorithms}},
+ journal = "Trans. American Mathematical Society",
+ volume = "117",
+ pages = "285306",
+ year = "1963",
+ paper = "Hart63.pdf"
}
\end{chunk}
\index{Amin, Nada}
\index{Rompf, Tiark}
+\index{Brooks, Frederick P.}
\begin{chunk}{axiom.bib}
@inproceedings{Amin18,
 author = "Amin, Nada and Rompf, Tiark",
 title = {{Collapsing Towers of Interpreters}},
 booktitle = "Principles of Programming Languages",
 year = "2018",
 publisher = "ACM",
+@misc{Broo86a,
+ author = "Brooks, Frederick P.",
+ title = {{No Silver Bullet  Essence and Accident in Software
+ Engineering}},
+ booktitle = "The Mythical ManMonth, Anniversary Edition",
+ publisher = "Elsevier Science",
+ pages = "10691076",
+ year = "1986",
+ comment = "chapter",
abstract =
 Given a tower of interpreters, i.e., a sequence of multiple
 interpreters interpreting one another as input programs, we aim to
 collapse this tower into a compiler that removes all interpretive
 overhead and runs in a single pass. In the real world, a use case
 might be Python code executed by an x86 runtime, on a CPU emulated
 in a JavaScript VM, running on an ARM CPU. Collapsing such a tower
 can not only exponentially improve runtime performance, but also
 enable the use of base language tools for interpreted programs,
 e.g. for analysis and verification. In this paper, we lay the
 foundations in an idealized but realistic setting.

 We present a multilevel lambda calculus that features staging
 constructs and stage polymorphism: based on runtime parameters, an
 evaluator either executes source code (thereby acting as an
 interpreter) or generates code (thereby acting as a compiler). We
 identify stage polymorphism, a programming model from the domain
 of highperformance program generators, as the key mechanism to
 make such interpreters compose in a collapsible way.

 We present Pink, a metacircular Lisplike evaluator on top of
 this calculus, and demonstrate that we can collapse arbitrarily
 many levels of selfinterpretation, including levels with
 semantics modifications. We discuss several examples: compiling
 regular expressions through an interpreter to base code, building
 program transformers from modified interpreters, and others. We
 develop these ideas further to include reflection and reification,
 culminating in Purple, a reflective language inspired by Brown,
 Blond, and Black, which realizes a conceptually infinite tower,
 where every aspect of the semantics can change
 dynamically. Addressing an open challenge, we show how user
 programs can be compiled and recompiled under usermodified
 semantics.",
 paper = "Amin18.pdf",
 keywords = "printed"
+ "There is no single development, in either technology or
+ management technique, which by itself promises even one
+ orderofmagnitude improvement within a decade in productivity, in
+ reliability, in simpliity",
+ paper = "Broo86a.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
\index{Ritt, Joseph Fels}
+\index{Turing, A. M.}
\begin{chunk}{axiom.bib}
@book{Ritt48,
 author = {{Ritt, Joseph Fels}},
 title = {{Integration in Finite Terms}},
 publisher = "Columbia University Press, New York",
 year = "1948"
+@misc{Turi36,
+ author = "Turing, A. M.",
+ title = {{On Computable Numbers, with an Application to the
+ Entscheidungsproblem}},
+ year = "1936",
+ link =
+ "\url{https://www.cs.virginia.edu/~robins/Turing_Paper_1936.pdf}",
+ abstract =
+ "The ``computeble'' numbers may be described briefly as the real
+ numbers whose expressions as a decimal are calculable by finite
+ means. Although the subjec of this paper is ostensibly the
+ computable {\sl numbers}, it is almost equally easy to define and
+ investigate computable functions of an integral variable or a real
+ or computable variable, computable predicates, and so forth. The
+ fundamental problems involed are, however, the same in each case,
+ and I have chosen the computable numbers for explicit treatment as
+ involving the least cumbrous technique. I hope shortly to give an
+ account of the relations of the computable numbers, functions, and
+ so forth to one another. This will include a development of the
+ theory of functions of a real variable expressed in terms of
+ computable numbers. According to my definition, a number is
+ computable if its decimal can be written down by a machine.",
+ paper = "Turi36.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Conrad, Brian}
+\index{Turing, A. M.}
\begin{chunk}{axiom.bib}
@misc{Conr05,
 author = "Conrad, Brian",
 title = {{Impossibility Theorems for Elementary Integration}},
 year = "2005",
 link =
 "\url{http://www2.maths.ox.ac.uk/cmi/library/academy/LectureNotes05/Conrad.pdf}",
 abstract =
 "Liouville proved that certain integrals, most famously
 $\int{e^{x^2}}~dx$, cannot be expressed in elementary terms. We
 explain how to give precise meaning to the notion of integration
 ``in elementary terms'', and we formulate Liouville's theorem
 that characterizes the possible form of elementary
 antiderivatives. Using this theorem, we deduce a practical
 criterion for proving such impossibility results in special cases.

 This criterion is illustrated for the Gaussian integral
 $\int{e^{x^2}}~dx$ from probability theory, the logarithmic
 integral $\int{}~dt/log(t)$ from the study of primes, and
 elliptic integrals. Our exposition is aimed at students who are
 familiar with calculus and elementary abstract algebra (at the
 level of polynomial rings $F(t)$ over a field $F$).",
 paper = "Conr05.pdf",
+@misc{Turi47,
+ author = "Turing, A. M.",
+ title = {{Lecture to the London Mathematical Society on 20 February
+ 1947}},
+ year = "1947",
+ link = "\url{http://www.vordenker.de/downloads/turingvorlesung.pdf}",
+ paper = "Turi47.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kiss, Csongor}
\index{Eisenbach, Susan}
\index{Field, Tony}
\index{Jones, Simon Peyton}
+\index{Demri, S.}
+\index{Laroussinie, F.}
+\index{Schnoebelen, Ph.}
\begin{chunk}{axiom.bib}
@inproceedings{Kiss18,
 author = "Kiss, Csongor and Eisenbach, Susan and Field, Tony and
 Jones, Simon Peyton",
 title = {{Higherorder Typelevel Programming in Haskell}},
 booktitle = "Proc. ACM Programming Languages",
 year = "2018",
 publisher = "ACM"
 abstract =
 "Type family applications in Haskell must be fully saturated. This
 means that all typelevel functions have to be firstorder,
 leading to code that is both messy and long winded. In tis paper
 we detail an extension to GHC that removes this restriction. We
 augment Haskell's existing type arrow, $\rightarrow$, with an
 unmatchable arrow $\twoheadrightarrow$, that supports partial
 application of type families without compromising soundness. A
 soundness proof is provided. We show how the techniques described
 can lead to substantial codesize reduction (circa 80\%) in the
 typelevel logic of commonlyused typelevel libraries whilst
 simultaneously improving code quality and readability.",
 paper = "Kiss18.pdf"
}

\end{chunk}

\index{Ostebee, Arnold}
\index{Zorn, Paul}
\begin{chunk}{axiom.bib}
@article{Oste93,
 author = "Ostebee, Arnold and Zorn, Paul",
 title = {{Telegraphic Reviews}},
 journal = "The American Mathematical Monthly",
 volume = "100",
 number = "8",
 pages = "812817",
 year = "1993",
 paper = "Oste93.pdf",
 keywords = "axiomref"
+@article{Demr06,
+ author = "Demri, S. and Laroussinie, F. and Schnoebelen, Ph.",
+ title = {{A Parametric Analysis of the StateExplosion Problem in
+ Model Checking}},
+ journal = "Computer and System Sciences",
+ volume = "72",
+ pages = "547575",
+ year = "2006",
+ abstract =
+ "In model checking, the stateexplosion problem occurs when one
+ checks a {\sl nonflat system}, i.e., a system implicitly described
+ as a synchronized product of elementary subsystems. In this paper,
+ we investigate the complexity of a wide variedty of modelchecking
+ problems for nonflat systems under the light of
+ {\sl parameterized complexity}, taking the number of synchronized
+ components as a parameter. We provide precise complexity measures
+ (in the parameterized sense) for most of the problems we
+ investigate, and evidence that the results are robust.",
+ paper = "Demr06.pdf"
}
\end{chunk}
\index{Davenport, James}
+\index{Schnoebelen, Ph.}
\begin{chunk}{axiom.bib}
@misc{Davexxa,
 author = "Davenport, James",
 title = {{Integration in Finite Terms}},
 year = "unknown",
 paper = "Davexxa.pdf",
 keywords = "printed"
+@article{Schn02,
+ author = "Schnoebelen, Ph.",
+ title = {{The Complexity of Temporal Logic Model Checking}},
+ journal = "Advances in Modal Logic",
+ volume = "4",
+ pages = "144",
+ year = "2002",
+ paper = "Schn02.pdf"
}
\end{chunk}
\index{Jenks, Richard D.}
+\index{Noonan, Matt}
\begin{chunk}{axiom.bib}
@article{Jenk75,
 author = "Jenks, Richard D.",
 title = {{Course Outline: Yale University, New Haven}},
 journal = "SIGSAM Bulletin",
 volume = "9",
 number = "3",
 pages = "910",
+@inproceedings{Noon18,
+ author = "Noonan, Matt",
+ title = {{Ghosts of Departed Proofs (Functional Pearl)}},
+ booktitle = "Haskell '18",
publisher = "ACM",
 year = "1975",
 paper = "Jenk75.pdf",
 keywords = "axiomref, printed, DONE"
+ isbn = "9781450358354",
+ abstract =
+ "Library authors often are faced with a design chice: should a
+ function with preconditions be implemented as a partial function,
+ or by returning a failure condition on incorrect use? Neither
+ option is ideal. Partial functions lead to frustrating runtime
+ errors. Failure conditions must be checked at the usesite,
+ placing an unfair tax on the users who have ensured that the
+ function's preconditions were correctly met.
+
+ In this paper, we introduce an API design concept called `ghosts
+ of departed proofs' based on the following observation:
+ sophisticated preconditions can be encoded in Haskell's type
+ system with no runtime overhead, by using proofs that inhabit
+ phantom type parameters attached to new type wrappers. The user
+ expresses correctness arguments by constructing proofs to inhabit
+ these phantom types. Critically, this technique allows the library
+ {\sl user} to decide when and how to validate that the API's
+ preconditions are met.
+
+ The 'ghost of departed proofs' approach to API design can achieve
+ many of the benefits of dependent types and refinement types, yet
+ only requires some minor and wellunderstood extensions to Haskell
+ 2010. We demonstrate the utility of this approach through a series
+ of case studies, showing how to enforce novel invariants for
+ lists, maps, graphs, shared memory regions, and more.",
+ paper = "Noon18.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Jenks, Richard D.}
+\index{Innes, Sean}
+\index{Uu, Nicolas}
\begin{chunk}{axiom.bib}
@inproceedings{Jenk71a,
 author = "Jenks, Richard D.",
 title = {{META LISP and META PLUS:: Tools for Rapidly Implementing
 extendable language translators}},
 booktitle = "Proc. 2nd ACM Symposium on Symbolic and Algebraic
 Manipulation",
+@inproceedings{Inne19,
+ author = "Innes, Sean and Uu, Nicolas",
+ title = {{Tic Tak Types}},
+ booktitle  "Int. Workshop on Type Driven Development",
publisher = "ACM",
 pages = "281",
 year = "1971",
+ year = "2019",
abstract =
 "A unique feature of the SCRATCHPAD system for symbolic
 manipulation is its powerful translation facilities. The essential
 components are META/LISP, a translator writing system, and
 META/PLUS, a facility for immediately extending the syntax of any
 translator produced through META/LISP. This talk will illustrate
 how these facilities may be used to produce a conversational
 higherlevel LISP system. The language chosen is called ALPL
 because of its similarity to APL and LPL, a language resident in
 the SCRATCHPAD system. The essential characteristics of ALPL are
 described by eight syntax rules together with brief examples of
 corresponding ALPL and LISP programs. It is shown how a META/LISP
 program may be easily written to produce a conversation ALPL
 system. The ALPL language is then incrementally extended by
 introducing new notations defined in terms of existing ALPL
 constructs through calls to META/PLUS.",
 keywords = "axiomref"
+ "TicTacToe is a simple, familiar, classic game enjoyed by
+ many. This pearl is designed to give a flavour of the world of
+ dependent types to the uninitiated functional programmer. We cover
+ a journey from TicTakTerrible implementations in the harsh world
+ of virtually untyped {\sl Strings}, through the safe haven of
+ vectors that know their own length, and into a TicTacTitanium
+ version that is too strongly typed for its own good. Along the way
+ we discover something we knew all along: types are great, but in
+ moderation. This lesson is quickly put to use in a more complex
+ recursive version.",
+ paper = "Inne19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Davenport, James}
\index{Padget, Julian}
+\index{Naur, Peter}
\begin{chunk}{axiom.bib}
@article{Dave85c,
 author = "Davenport, James and Padget, Julian",
 title = {{HEUGCD: How Elementary Upperbounds Generate Cheaper Data}},
 journal = "LNCS",
 volume = "204",
 year = "1985",
 booktitle = "EUROCAL '85 European Conference on Computer Algebra",
 publisher = "Springer",
 abstract =
 "The work presented in this paper is a direct consequence of the
 ideas set forth by Char et al (1984b) describing a new technique
 for computing the greatest common divisor of polynomials.",
 paper = "Dave85c.pdf"
+@misc{Naur85,
+ author = "Naur, Peter",
+ title = {{Programming as Theory Building}},
+ link = "\url{http://pages.cs.wisc.edu/~remzi/Naur.pdf}",
+ paper = "Naur85.pdf",
+ keywords = "DONE"
}
\end{chunk}
\index{Char, Bruce W.}
\index{Geddes, Keith O.}
\index{Gonnet, Gaston H.}
+\index{Elliott, Conal}
+\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@article{Char84,
 author = "Char, Bruce W. and Geddes, Keith O. and Gonnet, Gaston H.",
 title = {{GCDHEU: Heuristic polynomial GCD algorithm base on Integer
 GCD computation}},
 journal = "LNCS",
 volume = "174",
 pages = "285296",
+@inproceedings{Elli90,
+ author = "Elliott, Conal and Pfenning, Frank",
+ title = {{A SemiFunctional Implementation of a HigherOrder Logic
+ Programming Language}},
+ year = "1990",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/elpsml90.pdf}",
+ comment = "\url{http://www.cs.cmu.edu/~fp/papers/elpsmlpaper.tar.gz}",
+ paper = "Elli90.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carlsson, Mats}
+\begin{chunk}{axiom.bib}
+@article{Carl84,
+ author = "Carlsson, Mats",
+ title = {{On Implementing Prolog in Functional Programming}},
+ journal = "New Generation Computing",
+ volume = "2",
+ pages = "347359",
year = "1984",
abstract =
 "The design of algorithms for polynomial GCD computation has been
 a continuing area of research since the beginning of the
 development of symbolic computation systems. The earliest efforts
 were mainly directed at PRS (Polynomial Remainder Sequence)
 algorithms which are a direct generalization of Euclid's
 algorithm. The main algorithms of this type are the Reduced PRS
 algorithm and the Subresultant PRS algorithm. Hearn discusses the
 use of trial divisions to further improve the performance of PRS
 algorithms. The first fundamentally different polynomial GCD
 algorithm was the modular algorithm. To amek the modular algorithm
 competative for sparse multivariate polynomals, Zippel developed
 the sparse modular algorithm. Another modulartype algorithm was
 the Henselbased EZ GCD algorithm which was later improved as the
 EEZ GCD algorithm.

 the present paper discusses a new heuristic algorithm, GCDHEU,
 which is found to be very efficient for problems in a small number
 of variables. The heuristic algorithm can be viewed as
 amodulartype algorithm in that it uses evaluation and
 interpolation, but only a single evaluation per variable is
 used. The heuristic algorithm can be incorporated into a
 reorganized form of the EEZ GCD algorithm such that the base of
 the EEZ GCD algorithm, rather than a univariate GCD algorithm, is
 GCDHEU which is often successful for problems in up to four variables.",
 paper = "Char84.pdf",
+ "This report surveys techniques for implementing the programming
+ language Prolog. It focuses on explaining the procedural semantics
+ of the language in terms of functional programming constructs. The
+ techniques {\sl success continuations} and {\sl proof streams} are
+ introduced, and it is shown how Horn clause interpreters can be
+ built upon them. Continuations are well known from denotational
+ semantics theory, in this paper it is shown that they are viable
+ constructs in actual programs.",
+ paper = "Carl84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Gianni, Patrizia}
\index{Trager, Barry}
+\index{Baker, Henry}
\begin{chunk}{axiom.bib}
@article{Gian85a,
 author = "Gianni, Patrizia and Trager, Barry",
 title = {{GCD's and Factoring Multivariate Polynomials using
 Grobner Bases}},
 journal = "LNCS",
 volume = "204",
 year = "1985",
 booktitle = "EUROCAL '85 European Conference on Computer Algebra",
 publisher = "Springer",
 abstract =
 "This paper shows how Grobner basis computations can be used to
 compute multivariate gcds, perform Hensel lifting, and reduce
 multivariate factorization to univariate. The essential idea is to
 produce an ideal containing the desired polynomial as an element
 of least degree. The construction is somewhat analogous to the
 recent lattice algorithms for polynomial factorization. A major
 difference is that we don't need to perform the hensel lifting as
 a separate step; one Brobner basis computation is sufficient to
 find the desired multivariate factor or gcd. We produce algorithms
 which are vary simple and may be of use on small systems where
 code size is critical. We feel that these results demonstrate the
 fundamental importance of the Grobner basis in computer algebra.",
 paper = "Gian85a.pdf"
+@misc{Bake84,
+ author = "Baker, Henry",
+ title = {{The Nimble Type Inferencer for Common Lisp84}},
+ year = "1984",
+ link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
+ abstract =
+ "We describe a framework and an algorithm for doing type inference
+ analysis on programs written in full Common Lisp84 (Common Lisp
+ without the CLOS objectoriented extensions). The objective of
+ type inference is to determine tight lattice upper bounds on the
+ range of runtime data types for Common Lisp program variables and
+ temporaries. Depending upon the lattice used, type inference can
+ also provide range analysis information for numeric
+ variables. This lattice upper bound information can be used by an
+ optimizing compiler to choose more restrictive, and hence more
+ efficient, representations for these program variables. Our
+ analysis also produces tighter control flow information, which can
+ be used to eliminate redundant tests which result in dead
+ code. The overall goal of type inference is to mechanically
+ extract from Common Lisp programs the same degree of
+ representation information that is usually provided by the
+ programmer in traditiional stronglytyped languages. In this way,
+ we can provide some classes of Common Lisp programs execution time
+ efficiency expected only for more stronglytyped compiled languages.",
+ paper = "Bake84.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
\index{Griesmer, J.H.}
\index{Jenks, R.D.}
\index{Yun, D.Y.Y}
\begin{chunk}{axiom.bib}
@article{Grie78a,
 author = "Griesmer, J.H. and Jenks, R.D. and Yun, D.Y.Y",
 title = {{A Taxonomy for Algebraic Computation}},
 journal = "ACM SIGSAM Bulletin",
 volume = "12",
 number = "3",
 pages = "2528",
 year = "1978",
 abstract =
 "Recently the authors responded to a request from Professor
 Anthony Ralston of the State University of New York at Buffalo to
 participate in the review of a proposed Taxonomy of Computer
 Science and Engineering.",
 paper = "Grie78a.pdf"
+\index{Dunfield, Joshua}
+\index{Krishnaswami, Neel}
+\begin{chunk}{axiom.bib}
+@misc{Dunf19,
+ author = "Dunfield, Joshua and Krishnaswami, Neel",
+ title = {{Bidirection Typing}},
+ year = "2019",
+ link = "\url{https://www.cl.cam.ac.uk/~nk480/bidirsurvey.pdf}",
+ abstract =
+ "Bidirectional typing combines two modes of typing: type checking,
+ which checks that a program satisfies a known type, and type
+ synthesis, which determines a type from the program. Using
+ checking enables bidirectional typing to break the decidability
+ barrier of DamasMilner approaches; using synthesis enables
+ bidirectional typing to avoid the large annotation burden of
+ explicitly typed languages. In addition, bidirectional typing
+ improves error locality. We highlight the design principles that
+ underlie bidirectional type systems, survey the development of
+ bidirectional typing from the prehistoric period before Pierce and
+ Turner's local type inference to the present day, and provide
+ guidance for future investigations.",
+ paper = "Dunf19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Davenport, J.H.}
+\index{Sannella, Donald}
+\index{Tarlecki, Andrzej}
\begin{chunk}{axiom.bib}
@article{Dave79,
 author = "Davenport, J.H.",
 title = {{The Computerisation of Algebraic Geometry}},
 journal = "LNCS",
 volume = "72",
 pages = "119133",
 year = "1979",
 abstract =
 "This paper is concerned with the problems of performing computer
 algebra when the variables involved are related by some algebraic
 dependencies. It is shown that heuristic or ad hoc treatment of
 such cases leads rapidly to problems, and the proper mathematical
 foundations for the treatment of algebraic functions is
 presented. The formalism leads directly to the requirement for
 algorithms to find the genus of an algebraic curve, and to
 discover what function, if any, is associated with a given
 divisor. These algorithms and the relevant computational
 techniques are briefly described. In a concluding section the
 areas where these techniques are required in an integration scheme
 for algebraic functions are explained.",
 paper = "Dave79.pdf"
+@book{Sann12,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Foundations of Algebraic Specification and Formal Software
+ Development}},
+ publisher = "Springer",
+ year = "2012",
+ isbn = "9783642173363",
+ paper = "Sann12.pdf"
+}
+
+\end{chunk}
+
+\index{Sannella, D.}
+\index{Tarlecki, A.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Sann91,
+ author = "Sannella, D. and Tarlecki, A.",
+ title = {{Formal Program Development in Extended ML for the Working
+ Programmer}},
+ booktitle = "3rd BCS/FACS Workshop on Refinement",
+ publisher = Springer",
+ pages = "99130",
+ year = "1991",
+ abstract =
+ "Extened ML is a framework for the formal development of programs
+ in the Standard ML programming language from highlevel
+ specifications of their required input/output behavior. It
+ strongly supports the development of modular programs consisting
+ of an interconnected collection of generic and reusable units. The
+ Extended ML framework includes a methodology for formal program
+ development which establishes a number of ways of proceeding from
+ a given specification of a programming task towards a
+ program. Each such step gives rise to one or more proof
+ oblisgations which must be proved in order to establish the
+ correctness of that step. This paper is inteded as a useroriented
+ summary of the Extended ML language and mthodology. Theoretical
+ technicalities are avoided whenever possible, with emphasis placed
+ on the practical aspects of formal program development. An
+ extended example of a complete program development in Extended ML
+ is included.",
+ paper = "Sann91.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Trager, Barry}
+\index{Sannella, D.}
+\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@article{Trag79,
 author = "Trager, Barry",
 title = {{Integration of Simple Radical Extensions}},
 journal = "LNCS",
 volume = "72",
 pages = "408414",
 year = "1979",
 abstract =
 "Risch's landmark paper presented the first decision procedure for
 the integration of elementary functions. In that paper he required
 that the functions appearing in the integrand be algebraically
 independent. Shortly afterwards in [Risalg] and [Ris70] he relaxed
 that restriction and outlined a complete decision procedure for
 the integration of elementary functions in finite
 terms. Unfortunately his algorithms for dealing with algebraic
 functions required considerably more complex machinery than his
 earlier ones for purely transcendental functions. Moses'
 implementation of the earlier approach in MACSYMA demonstrated its
 practicality, whereas the same has yet to be done for Risch's more
 recent approach.

 This paper will show how Risch's earlier techniques can be
 generalized to deal with unnested radicals. While this may seem a
 severe restriction, perusing an integral table such as [Bois61] will
 show that fewer than 1\% of the problems are excluded.",
 paper = "Trag79.pdf"
}

\end{chunk}

\index{Moses, Joel}
\index{Zippel, Richard}
\begin{chunk}{axiom.bib}
@article{Mose79,
 author = "Moses, Joel and Zippel, Richard",
 title = {{Algorithms for the Integration of Algebraic Functions}},
 journal = "LNCS",
 volume = "72",
 pages = "426430",
 year = "1979",
 paper = "Mose79.pdf"
+@article{Sann99,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Algebraic Methods for Specification and Formal Development
+ of Programs}},
+ journal = "ACM Computing Surveys",
+ volume = "31",
+ year = "1999",
+ paper = "Sann99.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Avigad, Jeremy}
+\index{Kahrs, Stefan}
\begin{chunk}{axiom.bib}
@misc{Avig19,
 author = "Avigad, Jeremy",
 title = {{The Mechanization of Mathematics}},
 year = "2019",
 comment = "The Big Proof Workshop"
 paper = "Avig19.pdf",
 keywords = "DONE"
+@techreport{Kahr95,
+ author = "Kahrs, Stefan",
+ title = {{On the Static Analysis of Extended ML}},
+ type = "technical report",
+ institution = "Lab for Foudations of Comp Sci. Univ. Edinburgh",
+ number = "Research Note",
+ abstract =
+ "This is a short note describing differences in static analysis of
+ EML, as defined in [KST94] and SML, as defined in [MTH90] and
+ [MT91]. It is intended for use by people who are building an EML
+ parser/typechecker by modifying an existing SML compiler.",
+ paper = "Kahr95.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Mosses, Peter}
+\index{Kahrs, S.}
+\index{Sannella, D.}
+\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@article{Moss80,
 author = "Mosses, Peter",
 title = {{A Constructive Approach to Compiler Correctness}},
 journal = "LNCS",
 volume = "85",
 year = "1980",
 booktitle = "Automata, Languages and Programming",
 publisher = "Springer",
+@article{Kahr94,
+ author = "Kahrs, S. and Sannella, D. and Tarlecki, A.",
+ title = {{Interfaces and Extended ML}},
+ journal = "SIGPLAN Notices",
+ volume = "29",
+ number = "8",
+ pages = "111118",
+ year = "1994",
abstract =
 "It is suggested that denotational semantics definitions of
 programming languages should be based on a small number of
 abstract data types, each embodying a fundamental concept of
 computation. Once these fundamental abstract data types have been
 implemented in a particular target language (e.g. stackmachine
 code), it is a simple matter to construct a correct compiler for
 any source language from its denotational semantic definition. The
 approach is illustrated by constructing a compiler similar to the
 one which was proved correct by Thatcher, Wagner \& Wright
 (1979). Some familiarity with manysorted algebras is presumed.",
 paper = "Moss80.pdf",
+ "This is a position paper giving our views on the uses and makeup
+ of module interfaces. The position espoused is inspired by our
+ work on the Extended ML (EML) formal software development
+ framework and by ideas in the algebraic foundations of
+ specification and formal development. The present state of
+ interfaces in EML is outlined and set in the context of plans for
+ a more general EMLlike framework with axioms in interfaces taken
+ from an arbitrary logical system formulated as an
+ {\sl institution}. Some more speculative plans are sketched
+ concerning the simultaneous use of multiple institutions in
+ specification and development.",
+ paper = "Kahr94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Ehrig, Hartmut}
\index{Kreowski, HansJorg}
\index{Thatcher, James}
\index{Wagner, Eric}
\index{Wright, Jesse}
+\index{Sannella, D.}
+\index{Tarlecki, A.}
\begin{chunk}{axiom.bib}
@article{Ehri80a,
 author = "Ehrig, Hartmut and Kreowski, HansJorg and Thatcher, James
 and Wagner, Eric and Wright, Jesse",
 title = {{Parameterized Data Types in Algebraic Specification Languages}},
+@article{Sann91a,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Extended ML: Past, Present and Future}},
journal = "LNCS",
 volume = "85",
 year = "1980",
 booktitle = "Automata, Languages and Programming",
 publisher = "Springer",
 paper = "Ehri80a.pdf",
+ volume = "534",
+ pages = "297322",
+ year = "1991",
+ abstract =
+ "An overview of past, present and future work on the Extended ML
+ formal program development framework is given, with emphasis on
+ two topics of current active research: the semantics of the
+ Extened ML specification langauge, and tools to support formal
+ program development.",
+ paper = "Sann91a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Liskov, Barbara}
\index{Zilles, Stephen}
+\index{Sannella, Donald}
\begin{chunk}{axiom.bib}
@article{Lisk77a,
 author = "Liskov, Barbara and Zilles, Stephen",
 title = {{Programming with Abstract Data Types}},
 journal = "SIGPLAN Notices",
 volume = "9",
 number = "4",
 pages = "5059",
 year = "1977",
 abstract =
 "The motivation behind the work in veryhighlevel languages is to
 ease the programming task by providing the programmer with a
 language containing primitives or abstractions suitable to his
 problem area. The programmer is then able to spend his effort in
 the right place; he concentrates on solving his problem, and the
 resulting program will be more reliable as a result. Clearly, this
 is a worthwhile goal.

 Unfortunately, it is very difficult for a designer to select in
 advance all the abstractions which the users of his language might
 need. If a language is to be used at all, it is likely to be used
 to solve problems which its designer did not envision, and for
 which the abstractions embedded in the language are not sufficient.

 This paper presents an approach which allows the set of builtin
 abstractions to be augmented when the need for a new data
 abstraction is discovered. This approach to the handling of
 abstraction is an outgrowth of work on designing a language for
 structured programming. Relevant aspects of this language are
 described, and examples of the use and definitions of abstraction
 are given.",
 paper = "Lisk77a.pdf"
+@misc{Sann86a,
+ author = "Sannella, Donald",
+ title = {{Formal Specification of ML Programs}},
+ link =
+ "\url{http://www.lfcs.inf.ed.ac.uk/reports/86/ECSLFCS8615/ECSLFCS86.15.ps}",
+ year = "1986",
+ abstract =
+ "These notes were written to accompany lectures on program
+ specification which formed part of a course on functional
+ programming in ML. Functions can be specified using a
+ specification language obtained by extending ML with
+ (nonexecutable) firstorder axioms. Simple inductive proofs
+ suffice to show that in ML function satisfies such a
+ specification. This approach can also be used to specify and
+ verify larger programs built from smaller pieces using ML's
+ modularisation facilities. Examples are used to illustrate the
+ methods discussed.",
+ paper = "Sann86a.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Black, A.P.}
+\index{Sannella, Donald}
+\index{Tarlecki, Andrzej}
\begin{chunk}{axiom.bib}
@techreport{Blac80,
 author = "Black, A.P.",
 title = {{Exception Handling and Data Abstraction}},
 type = "Research Report",
 institution = "IBM Research",
 number = "RC8059",
 year = "1980"
+@article{Sann97,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Essential Concepts of Algebraic Specification and Program
+ Development}},
+ journal = "Formal Aspects of Computing",
+ volume = "9",
+ pages = "229269",
+ year = "1997",
+ abstract =
+ "The main ideas underlying work on the modeltheoretic foundations
+ of algebraic specification and formal program development are
+ presented in an informal way. An attempt is made to offer an
+ overall view, rather than new results, and to focus on the basic
+ motivation behind the technicalities presented elsewhere.",
+ paper = "Sann97.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Goguen, J.A.}
\index{Thatcher, J.W.}
\index{Wagner, E.G.}
\index{Wright, J.B.}
+\index{Wright, Andrew K.}
\begin{chunk}{axiom.bib}
@techreport{Gogu76,
 author = "Goguen, J.A. and Thatcher, J.W. and Wagner, E.G. and
 Wright, J.B.",
 title = {{An Initial Algebra Approach to the Specification,
 Correctness and Implementation of Abstract Data Types}},
 type = "Research Report",
 institution = "IBM Research",
 number = "RC6487",
 year = "1976"
+@inproceedings{Wrig95,
+ author = "Wright, Andrew K.",
+ title = {{Simple Imperative Polymorphism}},
+ booktitle = "LISP and Symbolic Computation",
+ publisher = "Kluwer Academic",
+ pages = "242256",
+ year = "1995",
+ abstract =
+ "This paper describes a simple extension of the HindleyMilner
+ polymorphic type discipline to callbyvalue languages that
+ incorporate imperative features like references, exceptions, and
+ continuations. This extension sacrifices the ability to type every
+ purely functional expression that is typable in the HindleyMilner
+ system. In return, it assigns the same type to functional and
+ imperative implementations of the same abstraction. Hence with a
+ module system that separates specifications from implementations,
+ imperative features can be freely used to implement polymorphic
+ specifications. A study of a number of ML programs shows that the
+ inability to type all HindleyMilner typable expressions seldom
+ impacts realistic programs. Furthermore, most programs that are
+ rendered untypable by the new system can be easily repaired.",
+ paper = "Wrig95.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Donahue, J.}
@misc{Dona77,
 author = "Donahue, J.",
 title = {{On the semantics of ``Data Type''}},
 comment = "Cornell University",
 year = "1977"
+\index{Tofte, Mads}
+\begin{chunk}{axiom.bib}
+@phdthesis{Toft88,
+ author = "Tofte, Mads",
+ title = {{Operational Semantics and Polymorphic Type Inference}},
+ school = "Univ. of Edinburgh",
+ year = "1988",
+ abstract =
+ "Three languages with polymorphic type disciplines are discussed,
+ namely the $\lambda$calculus with Milner's polymorphic type
+ discipline; a language with imperative features (polymorphic
+ references); and a skeletal module language with structures,
+ signatures and functors. In each of the two first cases we show
+ that the type inference system is consistent with an operational
+ dynamic semantics.
+
+ On the module level, polymorphic types correspond to
+ signatures. There is a notion of principal signatures. Socalled
+ signature checking is the module level equivalent of type
+ checking. In particular, there exists an algorithm which either
+ fails or produces a principal signature.",
+ paper = "Toft88.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Ershov, A.P.}
+\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@misc{Ersh77,
 author = "Ershov, A.P.",
 title = {{On the Essence of Compilation}},
 comment = "Proc. IFIP Working Conf. on Formal Description of
 Programming Concepts, Vol. 1",
 year = "1977"
+@inproceedings{Reyn83,
+ author = "Reynolds, John C.",
+ title = {{Types, Abstraction and Parametric Polymorphism}},
+ booktitle = "Information Processing 83",
+ publisher = "Elsevier Science Publishers",
+ year = "1983",
+ abstract =
+ "We explore the thesis that type structure is a syntactic
+ discipline for maintaining levels of abstraction. Traditionally,
+ this view has beeen formalized algebraically, but the algebraic
+ approach fails to encompass higherorder functions. For this
+ purpose, it is necessary to generalize homomorphic functions to
+ relations; the result is an ``abstraction'' theorem that is
+ applicable to the typed lambda calculus and various extensions,
+ including userdefined types.
+
+ Finally, we consider polymorphic functions, and show that the
+ abstraction theorem captures Strachey's concept of parametric, as
+ opposed to ad hoc, polymorphism.",
+ paper = "Reyn83.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Hearn, Anthony C.}
+\index{Leroy, Xavier}
\begin{chunk}{axiom.bib}
@book{Hear73,
 author = "Hearn, Anthony C.",
 title = {{REDUCE2 Users Manual}},
 comment = "Computing Physics Group",
 publisher = "University of Utah",
 year = "1973"
+@techreport{Lero92,
+ author = "Leroy, Xavier",
+ title = {{Polymorphic Typing of an Algorithmic Language}},
+ type = "research report",
+ institution = "INRIA",
+ number = "N1778",
+ year = "1992",
+ abstract =
+ "The polymorphic type discipline, as in the ML language, fits well
+ within purely applicative languages, but does not extend naturally
+ to the main feature of algorithmic languages: inplace update of
+ data structures. Similar typing difficulties arise with other
+ extensions of applicative languages: logical variables,
+ communication channels, continuation handling. This work studies
+ (in the setting of relational semantics) two new approaches to the
+ polymorphic typing of these nonapplicative features. The first
+ one relies on a restriction of generalization over types (the
+ notion of dangerous variables), and on a refined typing of
+ functional values (closure typing). The resulting type system is
+ compatible with the ML core language, and is the most expressive
+ type systems for ML with imperative features so far. The second
+ approach relies on switching to ``byname'' sematics for the
+ constructs of polymorphism, instead of the usual ``byvalue''
+ semanticcs. The resulting language differs from ML, but lends
+ itself easily to polymorphic typing. Both approaches smoothly
+ integrate nonapplicative features and polymorphic typing.",
+ paper = "Lero92.pdf"
+}
+
+\end{chunk}
+
+\index{Hutton, Graham}
+\begin{chunk}{axiom.bib}
+@article{Hutt99,
+ author = "Hutton, Graham",
+ title = {{A Tutorial on the Universality and Expressiveness of
+ Fold}},
+ journal = "J. Functional Programming",
+ volume = "9",
+ number = "4",
+ pages = "355372",
+ year = "1999",
+ abstract =
+ "In functional programming, {\sl fold} is a standard operator that
+ encapsulates a simple pattern of recursion for processing
+ lists. This article is a tutorial on two key aspects of the fold
+ operator for lists. First of all, we emphasize the use of the
+ universal property of fold both as a proof principle that avoids
+ the need for inductive proofs, and as a definition principle that
+ guides the transformation of recursive functions into definitions
+ using fold. Secondly, we show that even though the pattern of
+ recursion encapsulated by fold is simple, in a language with
+ tuples and functions as firstclass values the fold operator has
+ greater expressive power than might first be expected.",
+ paper = "Hutt99.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Hearn, Anthony C.}
+\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Hear71,
 author = "Hearn, Anthony C.",
 title = {{Applications of Symbol Manipulation in Theoretical Physics}},
 journal = "Communications of the ACM",
 volume = "14",
 number = "8",
 pages = "511516",
 year = "1971",
 paper = "Hear71.pdf"
+@article{Card88b,
+ author = "Cardelli, Luca",
+ title = {{Basic Polymorphic Typechecking}},
+ journal = "Science of Computer Programming",
+ volume = "8",
+ number = "2",
+ year = "1988",
+ paper = "Card88b.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Hearn, Anthony C.}
+\index{Hughes, John}
\begin{chunk}{axiom.bib}
@article{Hear72,
 author = "Hearn, Anthony C.",
 title = {{An Improved NonModular Polynomial GCD Algorithm}},
 journal = "ACM SIGSAM Bulletin",
 volume = "23",
 pages = "1015",
 year = "1972,
+@misc{Hugh19,
+ author = "Hughes, John",
+ title = {{How to Specify it!}},
+ year = "2019",
+ link = "\url{https://www.dropbox.com/s/tx2b84kae4bw1p4/paper.pdf}",
abstract =
 "An improved nonmodular algorithm for the calculation of the
 greatest common divisor of two multivariate polynomials is
 presented.",
 paper = "Hear72.pdf"
+ "Propertybased testing tools test software against a
+ specification, rather than a set of examples. This tutorial paper
+ presents five generic approaches to writing such specifications
+ (for purely functional code). We discuss costs, benefits, and
+ bugfinding power of each approach, with reference to a simple
+ example with eight buggy variants. The lessons learned should help
+ the reader to develope effective propertybased tests in the future.",
+ paper = "Hugh19.pdf",
+ keywords = "printed, DONE"
}
\end{chunk}
\index{Campbell, J.A.}
\index{Hearn, Anthony C.}
+\index{Goto, Kazushige}
+\index{van de Geijn, Robert A.}
\begin{chunk}{axiom.bib}
@article{Hear70,
 author = "Campbell, J.A. and Hearn, Anthony C.",
 title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
 journal = "J. of Computational Physics",
 volume = "5",
 number = "2",
 pages = "280327",
 year = "1970",
+@article{Goto19,
+ author = "Goto, Kazushige and van de Geijn, Robert A.",
+ title = {{Anatomy of HighPerformance Matrix Multiplication}},
+ journal = "Transactions on Mathematical Software",
+ volume = "V",
+ number = "N",
+ year = "2019",
abstract =
 "We describe a system of programs in the language LISP 1.5 which
 handles all stages of calculation from the specification of an
 elementaryparticle process in terms of a Hamiltonian of
 interaction or Feynman diagrams to the derivation of an absolute
 square of the matrix element for the process. Examples of
 significant parts of the program are presented in the text, while
 a detailed listing of this material is contained in two Appendices
 which are avaiable on request from the authors.",
 paper = "Camp70.pdf"
+ "We present the basic principles which underlie the high
+ performance implementation of the matrix multiplication that is
+ part of the widely used GotoBLAS library. Design decisions are
+ justified by successively refining a model of architectures with
+ multilevel memories. A simple but effective algorithm for
+ executing this operation results. Implementations on a broad
+ selection of architectures are shown to achieve nearpeak
+ performacne.",
+ paper = "Goto19.pdf"
}
\end{chunk}
\index{Campbell, J.A.}
\index{Hearn, Anthony C.}
+\index{Wernhard, Christoph}
\begin{chunk}{axiom.bib}
@article{Hear70,
 author = "Campbell, J.A. and Hearn, Anthony C.",
 title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
 journal = "J. of Computational Physics",
 volume = "5",
 number = "2",
 pages = "280327",
 year = "1970",
 abstract =
 "We describe a system of programs in the language LISP 1.5 which
 handles all stages of calculation from the specification of an
 elementaryparticle process in terms of a Hamiltonian of
 interaction or Feynman diagrams to the derivation of an absolute
 square of the matrix element for the process. Examples of
 significant parts of the program are presented in the text, while
 a detailed listing of this material is contained in two Appendices
 which are avaiable on request from the authors.",
 paper = "Camp70.pdf"
+@misc{Wern19,
+ author = "Wernhard, Christoph",
+ title = {{PIE  Proving, Interpolating and Eliminating on the Basis
+ of FirstOrder Logic}},
+ year = "2019",
+ link = "\url{https://arxiv.org/pdf/1908.11137.pdf}",
+ abstract =
+ "PIE is a Prologembedded environment for automated reasoning on
+ the basis of firstorder logic. It includes a versatile formula
+ macro system and supports the creation of documents that
+ intersperse macro definitions, reasoner invocations and LaTeX
+ formatted natural language text. Invocation of various reasoners
+ is supported. External provers as well as subsystems of PIE,
+ which include preprocessors, a Prologbased firstorder prover,
+ methods for Craig interpolation and methods for secondorder
+ quantifier elimination.",
+ paper = "Wern19.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Henderson, Peter}
\index{Morris Jr., James H.}
+\index{Aldrich, Jonathan}
\begin{chunk}{axiom.bib}
@inproceedings{Hend76,
 author = "Henderson, Peter and Morris Jr., James H.",
 title = {{A Lazy Evaluator}},
 booktitle = "3rd Symp. on Principles of Programming Languages",
+@inproceedings{Aldr13,
+ author = "Aldrich, Jonathan",
+ title = {{The Power of Interoperability: Why Objects are Inevitable}},
+ booktitle = "Onward!",
publisher = "ACM",
 pages = "95103",
 year = "1976",
+ year = "2013",
+ link = "\url{https://www.cs.cmu.edu/~aldrich/papers/objectsessay.pdf}",
+ abstract =
+ "Three years ago, in this venue, Cook argued that in their
+ essence, objects are what Reynolds called {\sl procedural data
+ structures}. His observations raises a natural question: if
+ procedural data structures are the essence of objects, has this
+ contributed to the empirical success of objects, and if so, how?
+
+ This essay attempts to answer that question. After reviewing
+ Cook's definition, I propose the term {\sl service abstractions}
+ to capture the essential nature of objects. This terminology
+ emphasizes, following Kay, that objects are not primarily about
+ representing and manipulating data, but are more about providing
+ services in support of higherlevel goals. Using examples taken
+ from object oriented frameworks, I illustrate the unique design
+ leverage that service abstractions provide: the ability to define
+ abstractions that can be extended, and whose extensions are
+ interoperable in a firstclass way. The essay argues that the form
+ of interoperable extension supported by service abstractions is
+ essential to modern software: many modern frameworks and
+ ecosystems could not have been built without service
+ abstractions. In this sense, the success of objects was not a
+ coincidence: it was an inevitable consequence of their service
+ abstraction nature.",
+ paper = "Aldr13.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Chang, Stephen}
+\index{Knauth, Alex}
+\index{Greenman, Ben}
+\begin{chunk}{axiom.bib}
+@inproceedings{Chan17,
+ author = "Chang, Stephen and Knauth, Alex and Greenman, Ben",
+ title = {{Type Systems as Macros}},
+ booktitle = "Principles of Programming Languages",
+ publisher = "ACM",
+ year = "2017",
abstract =
 "A different way to execute pure LISP programs is presented. It
 delays the evaluation of parameters and list structures without
 ever having to perform more evaluation steps than the usual
 method. Although the central idea can be found in earlier work
 this paper is of interest since it treats a rather wellknown
 language and works out an algorithm which avoids full
 substitution. A partial correctness proof using ScottStrachey
 semantics is sketched in a later section.",
 paper = "Hend76.pdf",
+ "We present TURNSTILE, a metalanguage for creating typed embedded
+ languages. To implement the type system, programmers write type
+ checking rules resembling traditional judgment syntax. To
+ implement the semantics, they incorporate elaborations into these
+ rules. TURNSTILE critically depends on the idea of linguistic
+ reuse. It exploits a macro system in a novel way to simultaneously
+ type check and rewrite a surface program into a target
+ language. Reusing a macro system also yields modular
+ implementations whose rules may be mixed and matched to create
+ other languages. Combined with typical compiler and runtime reuse,
+ TURNSTILE produces performant typed embedded languages with little
+ effort.",
+ paper = "Chan17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jensen, Kathleen}
\index{Wirth, Niklaus}
+\index{Xi, Hongwei}
\begin{chunk}{axiom.bib}
@book{Jens75,
 author = "Jensen, Kathleen and Wirth, Niklaus",
 title = {{PASCAL User Manual and Report}},
 publisher = "SpringerVerlag",
 year = "1975",
 isbn = "0387901442",
 keywords = "owned"
+@book{Xixx19,
+ author = "Xi, Hongwei",
+ title = {{Introduction to Programming in ATS}},
+ publisher = "ATS Trustful Software, Inc",
+ year = "2019",
+ abstract =
+ "As a programming language, ATS is both syntaxrich and
+ featurerich. This book introduces the reader to some core
+ features of ATS, including basic functional programming, simple
+ types, (recursively defined) datatypes, polymorphic types,
+ dependent types, linear types, theorem proving, programming with
+ theorem proving (PwTP), and templatebased programming. Although
+ the reader is not assumed to be familiar with programming in
+ genera, the book is likely to be rather dense for someone without
+ considerable programming experience",
+ paper = "X1xx19.pdf"
}
\end{chunk}
+\index{Loh, Andres}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@book{IBMx78,
 author = "IBM",
 title = {{LISP/370 Program Description / Operations Manual}},
 publisher = "IBM Research",
 year = "1978",
 comment = "SH2020760"
+@article{Lohx01,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{A Tutorial Implementation of a Dependently Typed Lambda
+ Calculus}},
+ journal = "Foundations Informaticae",
+ volume = "XXI",
+ pages = "10011031",
+ year = "2001",
+ abstract =
+ "We present the type rules for a dependently typed core calculus
+ together with a straightforward implementation in Haskell. We
+ explicitly highlight the changes necessary to shift from a
+ simplytyped lambda calculus to a dependently typed lambda
+ calculus. We also describe how to extend our core language with
+ data types and write several small example programs. The article
+ is accompanied by an executable interpreter and example code that
+ allows immediate experimentation with the system we describe.",
+ paper = "Lohx01.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Morris Jr., J.H.}
+\index{Bahr, Patrick}
+\index{Hutton, Graham}
\begin{chunk}{axiom.bib}
@inproceedings{Morr73,
 author = "Morris Jr., J.H.",
 title = {{Types are not Sets}},
 booktitle = "Symp. on the Principles of Programming Languages",
 publisher = "ACM",
 pages = "120124",
 year = "1973"
+@article{Bahr15,
+ author = "Bahr, Patrick and Hutton, Graham",
+ title = {{Calculating Correct Compilers}},
+ journal = "Functional Programming",
+ year = "2015",
+ link = "\url{www.cs.nott.ac.uk/~pszgmh/ccc.pdf}",
+ abstract =
+ "In this article we present a new approach to the problem of
+ calculating compilers. In particular, we develop a simple but
+ general technique that allos us to derive correct compilers from
+ highlevel semantics by systematic calculation, with all details
+ of the implementation of the compilers falling naturally out of
+ the calculation process. Our approach is based upon the use of
+ standard equational reasoning techniques, and has been applied to
+ calculate compilers for a wide range of language features and
+ their combination, including arithmetic expressions, exceptions,
+ state, various forms of lambda calculi, bounded and unbounded
+ loops, nondeterminism, and interrupts. All the calculations in
+ the article have been formalised using the Coq proof assistant,
+ which serves as a convenient interactive tool for developing and
+ verifying the calculations."
+ paper = "Bahr15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bahr, Patrick}
+\index{Hutton, Graham}
+\begin{chunk}{axiom.bib}
+@article{Bahr19,
+ author = "Bahr, Patrick and Hutton, Graham",
+ title = {{Calculating Correct Compilers II}},
+ journal = "Functional Programming",
+ year = "2019",
+ link = "\url{www.cs.nott.ac.uk/~pszgmh/ccc2.pdf}",
+ abstract =
+ "In 'Calculating Correct Compilers' (Bahr and Hutton, 2015) we
+ developed a new approach to calculating compilers directly from
+ specifications of their correctness. Our approach only required
+ elementary reasoning techniques, and has been used to calculate
+ compilers for a wide range of language features and their
+ combination. However, the methodology was focused on stackbased
+ target machines, whereas real compilers often target
+ registerbased machines. In this article, we show how our approach
+ can naturally be adapted to calculate compilers for register
+ machines.",
+ paper = "Bahr19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Swords, Sol}
+\index{Davis, Jared}
+\begin{chunk}{axiom.bib}
+@article{Swor11,
+ author = "Swords, Sol and Davis, Jared",
+ title = {{Bit_Blasiting ACL2 Theorems}},
+ journal = "EPTCS",
+ volume = "70",
+ pages = "84102",
+ year = "2011",
+ abstract =
+ "Interactive theorem proving requires a lot of human
+ guidance. Proving a property involes (1) figuring out why it
+ holds, the (2) coaxing the theorem prover into believing it. Both
+ steps can take a long time. We explain how to use GL, a framework
+ for proving finite ACL2 theorems with BDD and SATbased
+ reasoning. This approach makes it unnecessary to deeply understand
+ why a property is true, and automates the process of admitting it
+ as a theorem. We use GL at Centaur Technology to verify execution
+ units for x86 Integer, MMX, SSE, and floatingpoint arithmetic.",
+ paper = "Swor11.pdf"
}
\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index fc8b9c3..4eea021 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 6000,6 +6000,10 @@ books/bookheader.tex add names to credit list
books/multind.sty multiple index files in a book
20190531.01.tpd.patch
books/bookvol4 add MODLISP Davenport chapter
+20191011.01.tpd.patch
+minor cleanups
+20191011.02.tpd.patch
+add references

1.9.1