From 004f18a1ab8a8c1c918e9cec699274032b662628 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Fri, 29 Jun 2018 21:26:21 -0400
Subject: [PATCH] books/bookvolbib add references
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Goal: Proving Axiom Sane
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dave12b,
author = "Davenport, James H.",
title = {{Small Algorithms for Small Systems}},
journal = "ACM Comm. in Computer Algebra",
volume = "46",
number = "1",
year = "2012",
paper = "Dave12b.pdf"
}
\end{chunk}
\index{Stoutemyer, David R.}
\begin{chunk}{axiom.bib}
@article{Stou12,
author = "Stoutemyer, David R.",
title = {{Series Crimes}},
journal = "ACM Comm. in Computer Algebra",
volume = "46",
number = "2",
year = "2012",
abstract =
"Puiseux series are power series in which the exponents can be
fractional and/or negative rational numbers. Several computer algebra
systems have one or more built-in or loadable functions for computing
truncated Puiseux series. Some are generalized to allow coefficients
containing functions of the series variable that are dominated by any
power of that variable, such as logarithms and nested logarithms of
the series variable. Some computer algebra systems also have built-in
or loadable functions that compute infinite Puiseuxseries.
Unfortunately, there are some little-known pitfalls in
computing Puiseux series. The most serious of these is expansions
within branch cuts or at branch points that are incorrect for some
directions in the complex plane. For example with each series
implementation accessible to you:
Compare the value of $(z^2 + z^3)^{3/2}$ with that of its
truncated series expansion about $z = 0$, approximated at
$z = −0.01$. Does the series converge to a value that is
the negative of the correct value?
Compare the value of $ln(z^2 + z^3)$ with its truncated series
expansion about $z = 0$, approximated at $z= −0.01 + 0.1i$. Does
the series converge to a value that is incorrect by $2\pi i$?
Compare $arctanh(−2 + ln(z)z)$ with its truncated series
expansion about $z = 0$, approximated at $z = −0.01$. Does the series
converge to a value that is incorrect by about $\pi i$?
At the time of this writing, most implementations that accommodate
such series exhibit such errors. This article describes how to avoid
these errors both for manual derivation of series and when
implementing series packages.",
paper = "Stou12.pdf"
}
\end{chunk}
\index{Blair, Fred W.}
\index{Griesmer, James H.}
\index{Harry, Joseph}
\index{Pivovonsky, Mark}
\begin{chunk}{axiom.bib}
@misc{Blai71,
author = "Blair, Fred W. and Griesmer, James H. and Harry, Joseph and
Pivovonsky, Mark",
title = {{Design and Development Document for Lisp on Several S/360
Operating Systems}},
year = "1971",
publisher = "IBM Research",
paper = "Blai71.pdf"
}
\end{chunk}
\index{Alford, W.R.}
\index{Granville, A.}
\index{Pomerance, C.}
\begin{chunk}{axiom.bib}
@misc{Alfo92,
author = "Alford, W.R. and Granville, A. and Pomerance, C.",
title = {{There are Infinitely Many Carmichael Numbers}},
year = "1992",
comment = "Preprint"
}
\end{chunk}
\index{Arnault, F.}
\begin{chunk}{axiom.bib}
@misc{Arna91,
author = "Arnault, F.",
title = {{Le Test de Primalite de Rabin-Miller: Un Nombre compose
qui le "passe"}},
comment = "Report 61",
university = "Universite de Poitiers Departement de Mathematiques",
year = "1991"
}
\end{chunk}
\index{Damgard, I.}
\index{Landrock, P.}
\begin{chunk}{axiom.bib}
@misc{Damg91,
author = "Damgard, I. and Landrock, P.",
title = {{Improved Bounds for the Rabin Primality Test}},
booktitle = "Proc. 3rd IMA Conf. on Coding and Cryptography",
year = "1991"
}
\end{chunk}
\index{Davenport, James H.}
\index{Smith, G.C.}
\begin{chunk}{axiom.bib}
@misc{Dave87,
author = "Davenport, James H. and Smith, G.C.",
title = {{Rabin's Primality Testing Algorithm -- a Group Theory View}},
school = "University of Bath",
type = "technical report",
number = "87-04",
year = "1987"
}
\end{chunk}
\index{Jaeschke, G.}
\begin{chunk}{axiom.bib}
@misc{Jaes91,
author = "Jaeschke, G.",
title = {{Private Communication}},
comment = "to James Davenport",
year = "1991"
}
\end{chunk}
\index{Leech, J.}
\begin{chunk}{axiom.bib}
@misc{Leec92,
author = "Leech, J.",
title = {{Private Communication}},
comment = "to James Davenport",
year = "1992"
}
\end{chunk}
\index{Koblitz, N.}
\begin{chunk}{axiom.bib}
@book{Kobl87,
author = "Koblitz, N.",
title = {{A Cource in Number Theory and Cryptography}},
publisher = "Springer-Verlog",
year = "1987"
}
\end{chunk}
\index{Lenstra Jr., H.W.}
\begin{chunk}{axiom.bib}
@article{Lens81,
author = "Lenstra Jr., H.W.",
title = {{Primality Testing Algorithms}},
journal = "Lecture Notes in Mathematics",
volume = "901",
publisher = "Springer-Verlag",
pages = "243-257",
year = "1981"
}
\end{chunk}
\index{Pomerance, C.}
\index{Slefridge, J.L.}
\index{Wagstaff Jr., S.S.}
\begin{chunk}{axiom.bib}
@article{Pome80,
author = "Pomerance, C. and Slefridge, J.L. and Wagstaff Jr., S.S.",
title = {{The Pseudoprimes up to $25\cdot 10^9$}},
journal = "Math. Comp.",
volume = "35",
pages = "1003-1026",
year = "1980"
}
\end{chunk}
\index{Rabin, M.O.}
\begin{chunk}{axiom.bib}
@article{Rabi80,
author = "Rabin, M.O.",
title = {{Probabilistic Algorithm for Testing Primality}},
journal = "J. Number Theory",
volume = "12",
pages = "128-138",
year = "1980"
}
\end{chunk}
\index{Steele, Guy Lewis}
\index{Sussman, Gerald Jay}
\begin{chunk}{axiom.bib}
@techreport{Stee78a,
author = "Steele, Guy Lewis and Sussman, Gerald Jay",
title = {{The Art of the Interpreter}},
institution = "MIT",
type = "technical report",
number = "AI Memo No. 453",
year = "1978",
abstract =
"We examine the effes of various language design decisions on the
programming styles available to a user of the language, with
particular emphasis on the ability to incrementatlly construct
modular systems. At each step we exhibit an interactive
meta-circular interpreter for the language under
consideration. Each new interpreter is the result of an
incremental change to the previous interpreter.
We explore the consequences of various variable binding
disciplines and the introduction of side effects. We find that
dynamic scoping is unsuitable for constructing procedural
abstractions, but has another role as an agent of modularity,
being a structured form of side effect. More general side effects
are also found to be necessary to promote modular style. We find
that the notion of side effect and the notion of equality (object
identity) are mutually constraining; to define one is to define
the other.
The interpreters we exhibit are all written in a simple dialect of
LISP, and all implement LISP-like languages. A subset of these
interpreters constitute a partial historical reconstruction of the
actual evolution of LISP."
}
\end{chunk}
\index{Constable, Robert L.}
\begin{chunk}{axiom.bib}
@incollection{Cons03,
author = "Constable, Robert L.",
title = {{Naive Computational Type Theory}},
booktitle = "Proof and System Reliability",
publisher = "unknown",
link = "\url{http://www.nuprl.org/documents/Constable/naive.pdf}",
year = "2003",
paper = "Cons03.pdf"
}
\end{chunk}
\index{Waaldijk, Frank}
\begin{chunk}{axiom.bib}
@misc{Waal03,
author = "Waaldijk, Frank",
title = {{On the Foundations of Constructive Mathematics}},
link = "\url{}",
year = "2003",
abstract =
"We discuss the foundations of constructive mathematics, including
recursive mathematics and intuitionism, in relation to classical
mathematics. There are connections with the foundations of physics,
due to the way in which the different branches of mathematics reflect
reality. Many different axioms and their interrelationship are
discussed. We show that there is a fundamental problem in bish
(Bishop’s school of constructive mathematics) with regard to its
current definition of ‘continuous function’. This problem is closely
related to the definition in bish of ‘locally compact’.
Possible approaches to this problem are discussed. Topology seems to
be a key to understanding many issues. We offer several new
simplifying axioms, which can form bridges between the various
branches of constructive mathematics and classical mathematics
(‘reuniting the antipodes’). We give a simplification of basic
intuitionistic theory, especially with regard to so-called ‘bar
induction’. We then plead for a limited number of axiomatic systems,
which differentiate between the various branches of mathematics.
Finally, in the appendix we offer bish an elegant topological
definition of ‘locally compact’, which unlike the current definition
is equivalent to the usual classical and/or intuitionistic definition
in classical and intuitionistic mathematics respectively.",
paper = "Waal03.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm95b,
author = "Farmer, William M.",
title = {{Reasoning about Partial Functions with the Aid of a
Computer}},
journal = "Erkenntnis",
volume = "43",
number = "3",
pages = "279-294",
year = "1995",
abstract =
"Partial functions are ubiquitous in both mathematics and computer
science. Therefore, it is imperative that the underlying logical
formalism for a general-purpose mechanized mathematics system
provide strong support for reasoning about partial
functions. Unfortunately, the common logical formalisms --
first-order logic, type theory, and set theory -- are usually only
adequate for reasoning about partial functions {\sl in
theory}. However, the approach to partial functions traditionally
employed by mathematicians is quite adequate {\sl in
practice}. This paper shows how the traditional approach to
partial functions can be formalized in a range of formalisms that
includes first-order logic, simple type theory, and Von-Neuman,
Bernays, Godel set theory. It argues that these new formalisms
allow one to directly reason about partial functions; are based on
natural, well-understood, familiar principles; and can be
effectively implemented in mechanized mathematics systems.",
paper = "Farm95b.pdf",
keywords = "printed"
}
\end{chunk}
\index{Fong, Brendan}
\index{Spivak, David I.}
\begin{chunk}{axiom.bib}
@misc{Fong18,
author = "Fong, Brendan and Spivak, David I.",
title = {{Seven Sketches in Compositionality: An Invitation to
Applied Category Theory}},
link = "\url{http://math.mit.edu/~dspivak/teaching/sp18/7Sketches.pdf}",
year = "2018",
paper = "Fong18.pdf"
}
\end{chunk}
\index{Popov, Nikolaj}
\begin{chunk}{axiom.bib}
@misc{Popo03,
author = "Popov, Nikolaj",
title = {{Verification Using Weakest Precondition Strategy}},
comment = "Talk at Comp. Aided Verfication of Information Systems",
year = "2003",
location = "Timisoara, Romania",
abstract =
"We describe the weakest precondition strategy for verifying
programs. This is a method which takes a specification and an
annotated program and generates so-called verification conditions:
mathematical lemmata which have to be proved in order to obtain a
formal correctness proof for the program with respect to its
specification. There are rules for generating the intermediate pre
and post conditions algorithmically. Based on these rules, we have
developed a package for generating verification conditions.",
paper = "Popo03.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kovacs, Laura}
\index{Jebelean, Tudor}
\begin{chunk}{axiom.bib}
@article{Kova04,
author = "Kovacs, Laura and Jebelean, Tudor",
title = {{Practical Aspects of Imperative Program Verification in
Theorema}},
journal = "Analele Universitatu din Timisoara",
volume = "XXXXIX",
number = "2",
year = "2004",
abstract =
"Approaching the problem of imperative program verification from a
practical point of view has certain implications concerning: the style
of specifications, the programming language which is used, the help
provided to the user for finding appropriate loop invariants, the
theoretical frame used for formal verification, the language used
for expressing generated verification theorems as well as the database
of necessary mathematical knowledge, and finally the proving power,
style and language. The Theorema system (www. theorema.org) has
certain capabilities which make it appropriate for such a practical
approach. Our approach to imperative program verification in Theorema
is based on Hoare– Logic and the Weakest Precondition Strategy. In
this paper we present some practical aspect of our work, as well as a
novel method for verification of pro- grams that contain loops, namely
a method based on recurrence equation solvers for generating the
necessary loop invariants and termination terms automatically. We
have tested our system with numerous examples, some of these example
are presented at the end of the paper.",
paper = "Kova04.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kovacs, Laura}
\index{Jebelean, Tudor}
\begin{chunk}{axiom.bib}
@article{Kova04a
author = "Kovacs, Laura and Jebelean, Tudor",
title = {{Automated Generation of Loop Invariants by Recurrence
Solving in Theorema}},
year = "2004",
abstract =
"Most of the properties established during program verification are
either invariants or depend crucially on invariants. The effectiveness
of automated verification of (imperative) programs is therefore
sensitive to the ease with which invariants, even trivial ones, can be
automatically deduced. We present a method for invariant generation
that relies on combinatorial techniques, namely on recurrence solving
and variable elimination. The effectiveness of the method is
demonstrated on examples.",
paper = "Kova04a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kovacs, Laura}
\begin{chunk}{axiom.bib}
@phdthesis{Kova07,
author = "Kovacs, Laura",
title = {{Automated Invariant Generation by Algebraic Techniques for
Imperative Program Verification in Theorema}},
school = "Johannes-Kepler University Linz",
year = "2007",
abstract =
"This thesis presents algebraic and combinatorial approaches for
reasoning about imperative loops with assignments, sequencing and
conditionals.
A certain family of loops, called P-solvable , is defined for which
the value of each program variable can be expressed as a polynomial of
the initial values of variables, the loop counter, and some new
variables where there are algebraic dependencies among the new
variables. For such loops, a systematic method is developed for
generating polynomial invariants. Further, if the bodies of these
loops consist only of assignments and conditional branches, and test
conditions in the loop and conditionals are ignored, the method is
shown to be complete for some special cases. By completeness we mean
that it generates a set of polynomials from which, under additional
assumptions for loops with conditional branches, any polynomial
invariant can be derived. Many non-trivial algorithms working on
numbers can be naturally implemented using P-solvable loops.
By combining advanced techniques from algorithmic combinatorics,
symbolic summation, computer algebra and computational logic, a
framework is developed for generating polynomial invariants for
imperative programs operating on numbers.
Exploiting the symbolic manipulation capabilities of the computer
algebra system Mathematica , these techniques are implemented in a new
software package called Aligator . By using several combinatorial
packages developed at RISC, Aligator includes algorithms for solving
special classes of recurrence relations (those that are either
Gosper-summable or C-finite) and generating polynomial dependencies
among algebraic exponential sequences. Using Aligator , a complete set
of polynomial invariants is successfully generated for numerous
imperative programs working on numbers.
The automatically obtained invariant assertions are subsequently used
for proving the partial correctness of programs by generating
appropriate verification conditions as first-order logical formulas.
Based on Hoare logic and the weakest precondition strategy, this
verification process is supported in an imperative verification
environment implemented in the Theorema system. Theorema is
convenient for such an integration given that it is built on top of
the computer algebra system Mathematica and includes automated methods
for theorem proving in predicate logic, domain specific reasoning and
proving by induction.",
paper = "Kova07.pdf"
}
\end{chunk}
\index{Kovacs, Laura}
\begin{chunk}{axiom.bib}
@inproceedings{Kova08,
author = "Kovacs, Laura",
title = {{Reasoning Algebraiclly About P-Solvable Loops}},
booktitle = "Int. Conf. on Tools and Algorithms for the Construction
and Analysis of Systems",
pages = "249-264",
year = "2008",
abstract =
"We present a method for generating polynomial invariants for a
subfamily of imperative loops operating on numbers, called the
P-solvable loops. The method uses algorithmic combinatorics and
algebraic techniques. The approach is shown to be complete for some
special cases. By completeness we mean that it generates a set of
polynomial invariants from which, under additional assumptions, any
polynomial invariant can be derived. These techniques are implemented
in a new software package Aligator written in Mathematica and
successfully tried on many programs implementing interesting
algorithms working on numbers.",
paper = "Kova08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Oury, Nicolas}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@inproceedings{Oury08,
author = "Oury, Nicolas and Swierstra, Wouter",
title = {{The Power of Pi}},
link = "\url{https://cs.ru.nl/~wouters/Publications/ThePowerOfPi.pdf}",
booktitle = "Int. Conf. on Functional Programming",
year = "2008",
abstract =
"This paper exhibits the power of programming with dependent types by
dint of embedding three domain-specific languages: Cryptol, a
language for cryptographic protocols; a small data description
language; and relational algebra. Each example demonstrates
particular design patterns inherent to dependently-typed programming.
Documenting these techniques paves the way for further research in
domain-specific embedded type systems.",
paper = "Oury08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Graillat, Stef}
\index{Menissier-Morain, Valerie}
\begin{chunk}{axiom.bib}
@inproceedings{Grai07,
author = "Graillat, Stef and Menissier-Morain, Valerie",
title = {{Error-free Transformations in Real and Complex
Floating-point Arithmetic}},
booktitle = "Int. Symp. on Nonlinear Theory and Applications",
year = "2007",
abstract =
"Error-free transformation is a concept that makes it possible to
compute accurate results within a floating point arithmetic. Up to
now, it has only be studied for real floating point arithmetic. In
this short note, we recall the known error-free transformations for
real arithmetic and we propose some new error-free transformations for
complex floating point arithmetic. This will make it possible to
design some new accurate algorithms for summation, dot product and
polynomial evaluation with complex entries.",
paper = "Grai07.pdf"
}
\end{chunk}
\index{Noblel, James}
\index{Black, Andrew P.}
\index{Bruce, Kim B.}
\index{Homer, Michael}
\index{Miller, Mark S.}
\begin{chunk}{axiom.bib}
@inproceedings{Nobl16,
author = "Noble, James and Black, Andrew P. and Bruce, Kim B. and
Homer, Michael and Miller, Mark S.",
title = {{The Left Hand of Equals}},
booktitle = "Int. Symp. of New Ideas, New Paradigms, and Reflections
on Programming and Software",
publisher = "ACM",
pages = "224-237",
year = "2016",
abstract =
"When is one object equal to another object? While object identity is
fundamental to object-oriented systems, object equality, although
tightly intertwined with identity, is harder to pin down. The
distinction between identity and equality is reflected in
object-oriented languages, almost all of which provide two variants of
'equality', while some provide many more. Programmers can usually
override at least one of these forms of equality, and can always
define their own methods to distinguish their own objects.
This essay takes a reflexive journey through fifty years of identity
and equality in object-oriented languages, and ends somewhere we did
not expect: a 'left-handed' equality relying on trust and grace.",
paper = "Nobl16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@article{Avig18a,
author = "Avigad, Jeremy"
title = {{The Mechanization of Mathematics}},
journal = "Notices of the AMS",
volume = "65",
number = "6",
year = "2018",
pages = "681-690",
paper = "Avig18a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Leroy, Xavier}
\begin{chunk}{axiom.bib}
@techreport{Lero90,
author = "Leroy, Xavier",
title = {{The ZINC experiment: An Economical Implementation of the
ML Language.}},
type = "technical report",
institution = "Institut National de Recherche en Informatique et en
Automatique",
number = "117",
year = "1990",
abstract =
"This report details the design and implementation of the ZINC
system. This is an implementation of the ML language, inended to
serve as a test field for various extensions of the language, and
for new implementation techniques as well. This system is strongly
oriented toward separate compilation and the production of small,
standalone programs; type safety is ensured by a Modula-2-like
module system. ZINC uses simple, portable techniques, such as
bytecode interpretation; a sophisticated execution model helps
counterbalance the interpretation overhead. Other highlights
include an efficient implementation of records with inclusion
(subtyping).",
paper = "Lero90.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cohn, Avra Jean}
\begin{chunk}{axiom.bib}
@phdthesis{Cohn79,
author = "Cohn, Avra Jean",
title = {{Machine Assisted Proofs of Recursion Implementation}},
school = "University of Edinburgh",
year = "1979",
link = "\url{https://www.era.lib.ed.ac.uk/handle/1842/6643}",
abstract =
"Three studies in the machine assisted proof of recursion
implementation are described. The verification system used is
Edinburgh LCF (Logic for Computable Functions). Proofs are generated,
in LCF, in a goal-oriented fashion by the application of strategies
reflecting informal proof plans. LCF is introduced in Chapter 1. We
present three case studies in which proof strategies are developed and
(except in the third) tested in LCF. Chapter 2 contains an account of
the machine generated proofs of three program transformations (from
recursive to iterative function schemata). Two of the examples are
taken from Manna and Waldinger. In each case, the recursion is
implemented by the introduction of a new data type, e.g., a stack or
counter. Some progress is made towards the development of a general
strategy for producing the equivalence proofs of recursive and
iterative function schemata by machine. Chapter 3 is concerned with
the machine generated proof of the correctness of a compiling
algorithm. The formulation, borrowed from Russell, includes a simple
imperative language with a while and conditional construct, and a low
level language of labelled statements, including jumps. We have, in
LCF, formalised his denotational descriptions of the two languages and
performed a proof of the preservation of the semantics under
compilation. In Chapter 4, we express and informally prove the
correctness of a compiling algorithm for a language containing
declarations and calls of recursive procedures. We present a low level
language whose semantics model a standard activation stack
implementation. Certain theoretical difficulties (connected with
recursively defined relations) are discussed, and a proposed proof in
LCF is outlined. The emphasis in this work is less on proving original
theorems, or even automatically finding proofs of known theorems, than
on (i) exhibiting and analysing the underlying structure of proofs,
and of machine proof attempts, and (ii) investigating the nature of
the interaction (between a user and a computer system) required to
generate proofs mechanically; that is, the transition from informal
proof plans to behaviours which cause formal proofs to be performed.",
paper = "Cohn79.pdf"
}
\end{chunk}
\index{Russell, Bruce D.}
\begin{chunk}{axiom.bib}
@article{Russ77,
author = "Russell, Bruce D.",
title = "Implementation Correctness Involving a Language with GOTO
statements",
journal = "SIAM Journal of Computing",
volume = "6",
number = "3",
year = "1977",
abstract =
"Two languages, one a simple structured programming language, the
other a simple goto language, are defined. A denotational semantics is
given for each language. An interpreter for the goto language is given
and is proved correct with respect to the denotational semantics. A
compiler from the structured to the goto language is defined and
proved to be a semantically invariant translation of programs. The
proofs are by computational induction.",
paper = "Russ77.pdf",
keywords = "printed"
}
\end{chunk}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@book{Paul87,
author = "Paulson, Lawrence C.",
title = {{Logic and Computation}},
publisher = "Press Synticate of Cambridge University",
year = "1987",
isbn = 0-521-34632-0"
}
\end{chunk}
\index{Shoenfield, Joseph R.}
\begin{chunk}{axiom.bib}
@book{Shoe67,
author = "Shoenfield, Joseph R.",
title = {{Mathematical Logic}},
publisher = "Association for Symbolic Logic",
year = "1967",
isbn = "1-56881-135-7"
}
\end{chunk}
\index{Abramsky, S.}
\index{Gabbay, Dov M.}
\index{Maibaum, T.S.E}
\begin{chunk}{axiom.bib}
@book{Abra92,
author = "Abramsky, S. and Gabbay, Dov M. and Maibaum, T.S.E",
title = {{Handbook of Logic in Computer Science (Volume 2)}},
publisher = "Oxford Science Publications",
year = "1992",
isbn = "0-19-853761-1"
}
\end{chunk}
\index{Basu, Saugata}
\index{Pollack, Richard}
\index{Roy, Marie-Francoise}
\begin{chunk}{axiom.bib}
@book{Basu10,
author = "Basu, Saugata and Pollack, Richard and Roy, Marie-Francoise",
title = {{Algorithms in Real Algebraic Geometry}},
publisher = "Springer-Verlag",
year = "2010",
isbn = "978-3-642-06964-2"
}
\end{chunk}
\index{Harrison, John}
\begin{chunk}{axiom.bib}
@book{Harr09,
author = "Harrison, John",
title = {{Handbook of Practical Logic and Automated Reasoning}},
publisher = "Cambridge University Press",
year = "2009",
isbn = "978-0-521-89957-4"
}
\end{chunk}
\index{Barendregt, Henk}
\index{Dekkers, Wil}
\index{Statman, Richard}
\begin{chunk}{axiom.bib}
@book{Bare13,
author = "Barendregt, Henk and Dekkers, Wil and Statman, Richard",
title = {{Lambda Calculus with Types}}
publisher = "Cambridge University Press",
year = "2013",
isbn = "978-0-521-76614-2"
}
\end{chunk}
\index{Manna, Zohar}
\index{Waldinger, Richard}
\begin{chunk}{axiom.bib}
@book{Mann85,
author = "Manna, Zohar and Waldinger, Richard",
title = {{The Logical Basis for Computer Programming (Vol 1)}},
publisher = "Addison-Wesley",
year = "1985",
isbn = "0-201-15260-2"
}
\end{chunk}
\index{Pierce, Benjamin C.}
\begin{chunk}{axiom.bib}
@book{Pier02,
author = "Pierce, Benjamin C.",
title = {{Types and Programming Languages}},
publisher = "MIT Press",
year = "2002",
isbn = "978-0-262-16209-8"
}
\end{chunk}
\index{Tarski, Alfred}
\begin{chunk}{axiom.bib}
@book{Tars46,
author = "Tarski, Alfred",
title = {{Introduction to Logic}},
publisher = "Dover",
year = "1946",
isbn = "13-978-0-486-28462-0"
}
\end{chunk}
\index{Shankar, N.}
\begin{chunk}{axiom.bib}
@book{Shan94,
author = "Shankar, N.",
title = {{Metamathematics, Machines, and Godel's Proof}},
publisher = "Cambridge University Press",
year = "1994",
isbn = "0-521-58533-3"
}
\end{chunk}
\index{Beeson, Michael J.}
\begin{chunk}{axiom.bib}
@book{Bees80,
author = "Beeson, Michael J.",
title = {{Foundations of Constructive Mathematics}}
publisher = "Springer-Verlag",
year = "1980",
isbn = "3-540-12173-0"
}
\end{chunk}
\index{Shieber, Stuart M.}
\index{Schabes, Yves}
\index{Pereira, Fernando C.N.}
\begin{chunk}{axiom.bib}
@article{Shie95,
author = "Shieber, Stuart M. and Schabes, Yves and Pereira, Fernando C.N.",
title = {{Principles and Implementation of Deductive Parsing}},
journal = "J. Logic Programming",
volume = "24",
number = "1-2",
pages = "3-36",
year = "1995",
abstract =
"We present a system for generating parsers based directly on the
metaphor of parsing as deduction. Parsing algorithms can be
represented directly as deduction systems, and a single deduction
engine can interpret such deduction systems so as to implement the
corresponding parser. The method generalizes easily to parsers for
augmented phrase structure formalisms, such as definite-clause
grammars and other logic grammar formalisms, and has been used for
rapid prototyping of parsing algorithms for a variety of formalisms
including variants of tree-adjoining grammars, categorial grammars,
and lexicalized context-free grammars.",
paper = "Shie95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Greve, David A.}
\index{Kaufmann, Matt}
\index{Manolios, Panagiotis}
\index{Moore, J Strother}
\index{Ray, Sandip}
\index{Ruiz-Reina, Jose Luis}
\index{Sumners, Rob}
\index{Vroon, Daron}
\index{Wilding, Matthew}
\begin{chunk}{axiom.bib}
@article{Grev08,
author = "Greve, David A. and Kaufmann, Matt and Manolios, Panagiotis
and Moore, J Strother and Ray, Sandip and Ruiz-Reina, Jose Luis
and Sumners, Rob and Vroon, Daron and Wilding, Matthew",
title = {{Efficient Execution in an Automated Reasoning Environment}},
journal = "Journal of Functional Programming",
volume = "18",
number = "1",
pages = "15-46",
year = "2008",
abstract =
"We describe a method that permits the user of a mechanized
mathematical logic to write elegant logical definitions while allowing
sound and efficient execution. In particular, the features supporting
this method allow the user to install, in a logically sound way,
alternative executable counterparts for logically defined
functions. These alternatives are often much more efficient than the
logically equivalent terms they replace. These features have been
implemented in the ACL2 theorem prover, and we discuss several
applications of the features in ACL2.",
paper = "Grev08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Appel, Andrew W.}
\index{MacQueen, David B.}
\begin{chunk}{axiom.bib}
@inproceedings{Appe91,
author = "Appel, Andrew W. and MacQueen, David B.",
title = {{Standard ML of New Jersey}},
booktitle = "Int. Symp. on Prog. Lang. Implementations and Logic
Programming",
publisher = "Springer-Verlag",
pages = "1-13",
year = "1991",
abstract =
"The Standard ML of New Jersey compiler has been under development for
five years now. We have developed a robust and complete environment
for Standard ML that supports the implementation of large software
systems and generates efficient code. The compiler has also served as
a laboratory for developing novel implementation techniques for a
sophisticated type and module system, continuation based code
generation, efficient pattern matching, and concurrent programming
features.",
paper = "Appe91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Tofte, Mads}
\begin{chunk}{axiom.bib}
@misc{Toft09,
author = "Tofte, Mads",
title = {{Tips for Computer Scientists on Standard ML (Revised)}},
link = "\url{http://www.itu.dk/people/tofte/publ/tips.pdf}",
year = "2009",
paper = "Toft09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Graham, Paul}
\begin{chunk}{axiom.bib}
@book{Grah93,
author = "Graham, Paul",
title = {{On Lisp}},
publisher = "Prentice Hall",
year = "1993",
link = "\url{http://ep.yimg.com/ty/cdn/paulgraham/onlisp.pdf}",
comment = "code in papers/onlisp.lisp",
isbn = "0130305529",
abstract =
"On Lisp is a comprehensive study of advanced Lisp techniques, with
bottom-up programming as the unifying theme. It gives the first
complete description of macros and macro applications. The book also
covers important subjects related to bottom-up programming, including
functional programming, rapid prototyping, interactive development,
and embedded languages. The final chapter takes a deeper look at
object-oriented programming than previous Lisp books, showing the
step-by-step construction of a working model of the Common Lisp Object
System (CLOS).",
paper = "Grah93.pdf"
}
\end{chunk}
\index{Loh, Andres}
\index{McBride, Conor}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@misc{Lohx18,
author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
title = {{Simply Easy! An Implementation of a Dependently Typed
Lambda Calculus}},
link = "\url{http://strictlypositive.org/Easy.pdf}",
year = "2018",
abstract =
"We present an implementation in Haskell of a dependently-typed
lambda calculus that can be used as the core of a programming
language. We show that a dependently-typed lambda calculus is no
more difficult to implement than other typed lambda calculi. In fact,
our implementation is almost as easy as an implementation of the
simply typed lambda calculus, which we emphasize by discussing
the modifications necessary to go from one to the other. We explain
how to add data types and write simple programs in the core
language, and discuss the steps necessary to build a full-fledged
programming language on top of our simple core.",
paper = "Lohx18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Felty, Amy}
\index{Miller, Dale}
\begin{chunk}{axiom.bib}
@techreport{Felt90,
author = "Felty, Amy and Miller, Dale",
title = {{Encoding a Dependent-Type Lambda Calculus in a Logic
Programming Language}},
type = "technical report",
number = "MS-CIS-90-18",
institution = "University of Pennsylvania",
year = "1990",
abstract =
"Various forms of typed $\lambda$-calculi have been proposed as
specification languages for representing wide varieties of object
logics. The logical framework, LF, is an example of such a
dependent-type $\lambda$-calculus. A small subset of intuitionistic
logic with quantification over simply typed $\lambda$-calculus has
also been proposed as a framework for specifying general logics. The
logic of {\sl hereditary Harrop formulas} with quantification at all
non-predicate types, denoted here as $hh^\omega$ , is such a
meta-logic that has been implemented in both the Isabelle theorem
prover and the $\lambda$Prolog logic programming language. Both
frameworks provide for specifications of logics in which details
involved with free and bound variable occurrences, substitutions,
eigenvariables, and the scope of assumptions within object logics are
handled correctly and elegantly at the 'meta' level. In this paper, we
show how LF ca n be encoded into $hh^\omega$ in a direct and natural
way by mapping the typing judgments in LF in to propositions in the
logic of $hh^\omega$. This translation establishes a very strong
connection between these two languages: the order of quantification in
an LF signature is exactly the order of a set of $hh^\omega$ clauses,
and the proofs in one system correspond directly to proofs in the
other system. Relating these two languages makes it possible to
provide implementations of proof checkers and theorem provers for
logics specified in LF by using standard logic programming techniques
which can be used to implement $hh^\omega$.",
paper = "Felt90.pdf",
keywords = "printed"
}
\end{chunk}
\index{Altenkirch, Thorsten}
\index{McBride, Conor}
\index{McKinna, James}
\begin{chunk}{axiom.bib}
@misc{Alte05,
author = "Altenkirch, Thorsten and McBride, Conor",
title = {{Why Dependent Types Matter}},
link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/ydtm.pdf}",
year = "2005",
abstract =
"We exhibit the rationale behind the design of Epigram, a dependently
typed programming language and interactive program development system,
using refinements of a well known program -- merge sort -- as a running
example. We discuss its relationship with other proposals to introduce
aspects of dependent types into functional programming languages and
sketch some topics for further work in this area.",
paper = "Alte05.pdf",
keywords = "printed"
}
\end{chunk}
\index{Xi, Hongwei}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Xixx99,
author = "Xi, Hongwei and Pfenning, Frank",
title = {{Dependent Types in Practical Programming}},
booktitle = "SIGPLAN-SIGACT Symp. on Principles of Programming
Languages",
year = "1990",
link = "\url{https://www.cs.cmu.edu/~fp/papers/popl99.pdf}",
publisher = "ACM",
abstract =
"We present an approach to enriching the type system of ML with a
restricted form of dependent types, where type index objects are drawn
from a constraint domain C , leading to the DML( C ) language schema.
This allows specification and inference of significantly more precise
type information, facilitating program error detection and compiler
optimization. A major complication resulting from introducing
dependent types is that pure type inference for the enriched system is
no longer possible, but we show that type-checking a suciently
annotated program in DML( C ) can be reduced to constraint
satisfaction in the constraint domain C . We exhibit the
un-obtrusiveness of our approach through practical examples and prove
that DML( C ) is conservative over ML. The main contribution of the
paper lies in our language design, including the formulation of
type-checking rules which makes the approach practical. To our
knowledge, no previous type system for a general purpose programming
language such as ML has combined dependent types with features including
datatype declarations, higher-order functions, general recursions,
let-polymorphism, mutable references, and exceptions. In addition,
we have finished a prototype implementation of DML( C ) for an integer
constraint domain C , where constraints are linear inequalities (Xi
and Pfenning 1998).",
paper = "Xixx99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Altenkirch, Thorsten}
\begin{chunk}{axiom.bib}
@misc{Alte04,
author = "Altenkirch, Thorsten",
title = {{$\lambda$-calculus and types}},
comment = "Lecture Notes; APPSEM Spring School 2004",
link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/mgs04.pdf}",
year = "2004"
paper = "Alte04.pdf",
keywords = "printed",
}
\end{chunk}
\index{Loh, Andres}
\index{McBride, Conor}
\index{Swierstra, Wouter}
\begin{chunk}{axiom.bib}
@article{Lohx01,
author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
title = {{A Tutorial Implementation of a Dependently Typed Lambda
Calculus}},
journal = "Fundamenta Informaticae",
volume = "XXI",
year = "2001",
pages = "1001-1031",
abstract =
"We present the type rules for a dependently typed core calculus
together with a straight-forward implementation in Haskell. We
explicitly highlight the changes necessary to shift from a
simply-typed lambda calculus to the dependently typed lambda
calculus. We also describe how to extend our core language with data
types and write several small example programs. The article is
accompanied by an executable interpreter and example code that allows
immediate experimentation with the system we describe.",
paper = "Lohx01.pdf",
keywords = "printed"
}
\end{chunk}
\index{Roorda, Jan-Willem}
\begin{chunk}{axiom.bib}
@phdthesis{Roor00,
author = "Roorda, Jan-Willem",
title = {{Pure Type Systems for Functional Programming}},
year = "2000",
institution = "University of Utrecht",
abstract =
"We present a functional programming language based on Pure Type
Systems (PTSs). We show how we can define such a language by
extending the PTS framework with algebraic data types, case
expressions and definitions. To be able to experiment with our
language we present an implementation of a type checker and an
interpreter for our language. PTSs are well suited as a basis for a
functional programming language because they are at the top of a
hierarchy of increasingly stronger type systems. The concepts of
`existential types', `rank-n polymorphism' and `dependent types' arise
naturally in functional programming languages based on the systems in
this hierarchy. There is no need for ad-hoc extensions to incorporate
these features. The type system of our language is more powerful than
the Hindley-Milner system. We illustrate this fact by giving a number
of meaningful programs that cannot be typed in Haskell but are typable
in our language. A `real world' example of such a program is the
mapping of a specialisation of a Generic Haskell function to a Haskell
function. Unlike the description of the Henk language by Simon Peyton
Jones and Erik Meijer we give a complete formal definition of the type
system and the operational semantics of our language. Another
difference between Henk and our language is that our language is
defined for a large class of Pure Type Systems instead of only for the
systems of the $\lambda$-cube.",
paper = "Roor00.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jones, Simon Peyton}
\index{Meijer, Erik}
\begin{chunk}{axiom.bib}
@misc{Jone97,
author = "Jones, Simon Peyton and Meijer, Erik",
title = {{Henk: A Typed Intermediate Language}},
year = "1997",
link =
"\url{https://www.microsoft.com/en-us/research/wp-content/uploads/1997/01/henk.pdf}",
abstract =
"There is a growing interest in the use of richly-typed
intermediate languages in sophisticated compilers for
higher-order, typed source languages. These intermediate languages
are typically stratified, involving terms, types, and kinds. As
the sophistication of the type system increases, there three
levels begin to look more and more similar, so an attraictive
approach is to use a single syntax, and a single data type in the
compiler, to represent all three.
The theory of so-called {\sl pure type systems} amkes precisely
such an identification. This paper describes Henk, a new typed
intermediate langugage based closely on a particuarl pure type
system, {\sl the lambda cube}. On the way we give a tutorial
introduction to the lambda cube.",
paper = "Jone97.pdf",
keywords = "printed"
}
\end{chunk}
\index{Morrisett, Greg}
\begin{chunk}{axiom.bib}
@phdthesis{Morr95,
author = "Morrisett, Greg",
title = {{Compiling with Types}},
school = "Carnegie Mellon University",
year = "1995",
comment = "CMU-CS-95-226",
link = "\url{https://www.cs.cmu.edu/~rwh/theses/morrisett.pdf}",
abstract =
"Compilers for monomorphic languages, suc h as C and Pascal, take
advantage of types to determine data representations, alignment,
calling conventions, and register selection. However, these languages
lack important features including polymorphism, abstract datatypes,
and garbage collection. In contrast, modern programming languages
such as Standard ML (SML), provide all of these features, but existing
implementations fail to take full advantage of types. The result is
that performance of SML code is quite bad when compared to C.
In this thesis, I provide a general framework, called
{\sl type-directed compilation}, that allows compiler writeres to
take advantage of types at all stages in compilation. In the
framework, types are used not only to determine efficient
representations and calling conventions, but also to prove the
correctness of the compiler. A key property of type-directed
compilation is that all but the lowest levels of the compiler use
{\sl typed intermediate languages}. An advantage of this approach
is that it provides a means for automatically checking the
integrity of the resulting code.
An mportant contribution of this work is the development of a new,
statically-typed intermediate language, called $\lambda^{ML}_i$.
This language supports {\sl dynamic type dispatch}, providing a
means to select operations based on types at run time. I show how
to use dynamic type dispatch to support polymorphism, ad-hoc
operators, and garbage collection without having to box or tag
values. This allows compilers for SML to take advantage of
techniques used in C compilers, without sacrificing language
features or separate compilation.
TO demonstrate the applicability of my approach, I, along with
others, have constructed a new compiler for SML called TIL that
eliminates most restrictions on the representations of values. The
code produced by TIL is roughly twice as fast as code produced by
the SML/NJ compiler. This is due to at least partially to the use
of natural representations, but primarily to the conventional
optimizer which manipulates typed, $\lambda^{ML}_i$ code. TIL
demonstrates that combining type-directed compilation with dynamic
type dispatch yields a superior architecture for compilers of
modern languages.",
paper = "Morr95.pdf"
}
\end{chunk}
\index{Leroy, Xavier}
\begin{chunk}{axiom.bib}
@article{Lero98,
author = "Leroy, Xavier",
title = {{An Overview of Types in Compilation}},
journal = "LNCS",
volume = "1473",
year = "1998",
publisher = "Springer-Verlang",
pages = "1-8",
paper = "Lero98.pdf",
keywords = "printed"
}
\end{chunk}
\index{Lu, Eric}
\begin{chunk}{axiom.bib}
@misc{Luxx16,
author = "Lu, Eric",
title = {{Barendregt's Cube and Programming with Dependent Types}},
link = "\url{https://www.seas.harvard.edu/courses/cs252/2016fa/15.pdf}",
paper = "Luxx16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Guallart, Nino}
\begin{chunk}{axiom.bib}
@misc{Gual14,
author = "Guallart, Nino",
title = {{An Overview of Type Theories}},
link = "\url{https://arxiv.org/pdf/1411.1029.pdf}",
year = "2014",
abstract =
"Pure type systems arise as a generalisation of simply typed lambda
calculus. The contemporary development of mathematics has renewed
the interest in type theories, as they are not just the object of
mere historical research, but have an active role in the development
of computational science and core mathematics. It is worth exploring
some of them in depth, particularly predicative Martin-Löf’s
intuitionistic type theory and impredicative Coquand’s calculus of
constructions. The logical and philosophical differences and
similarities between them will be studied, showing the relationship
between these type theories and other fields of logic.",
paper = "Gual14.pdf",
keywords = "printed"
}
\end{chunk}
\index{Benini, Marco}
\begin{chunk}{axiom.bib}
@misc{Beni95,
author = "Benini, Marco",
title = {{Barendregt's $\lambda$-Cube in Isabelle}},
link = "\url{}",
year = "1995",
abstract =
"We present an implementation of Barendregt’s $\lambda$-Cube in the
Isabelle proof system. Isabelle provides many facilities for
developing a useful specification and proving environment from the
basic formulation of formal systems. We used those facilities to
provide an environment where the user can describe problems and derive
proofs interactively.",
paper = "Beni95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Hellman, Martin E.}
\begin{chunk}{axiom.bib}
@article{Hell17,
author = "Hellman, Martin E.",
title = {{Cybersecurity, Nuclear Security, Alan Turing, and
Illogical Logic}},
journal = "J. ACM",
volume = "60",
number = "12",
pages = "52-59",
year = "2017",
link = "\url{https://cacm.acm.org/magazines/2017/12/223042-cybersecurity-nuclear-security-alan-turing-and-illogical-logic/fulltext}"
}
\end{chunk}
\index{Winston, Patrick Henry}
\begin{chunk}{axiom.bib}
@inproceedings{Wins12,
author = "Winston, Patrick Henry",
title = {{The Nex 50 Years: A Personal View}},
booktitle = "Biologially Inspired Cognitive Architectures 1",
year = "2012",
pages = "92-99",
publisher = "Elsevier B.V"
abstract =
"I review history, starting with Turing’s seminal paper, reaching
back ultimately to when our species started to outperform other
primates, searching for the questions that will help us develop a
computational account of human intelligence. I answer that the
right questions are: What’s different between us and the other
primates and what’s the same. I answer the {\sl what’s different}
question by saying that we became symbolic in a way that enabled
story understanding, directed perception, and easy communication,
and other species did not. I argue against Turing’s
reasoning-centered suggestions, offering that reasoning is just a
special case of story understanding. I answer the {\sl what’s the
same} question by noting that our brains are largely engineered in
the same exotic way, with information flowing in all directions at
once. By way of example, I illustrate how these answers can
influence a research program, describing the Genesis system, a
system that works with short summaries of stories, provided in
English, together with low-level {\sl common-sense rules} and
higher-level {\sl concept patterns}, likewise expressed in
English. Genesis answers questions, notes abstract concepts such
as {\sl revenge}, tells stories in a listener-aware way, and fills
in story gaps using precedents. I conclude by suggesting,
optimistically, that a genuine computational theory of human
intelligence will emerge in the next 50 years if we stick to the
right, biologically inspired questions, and work toward
biologically informed models.",
paper = "Wins12.pdf"
}
\end{chunk}
\index{Brooks, Rodney A.}
\begin{chunk}{axiom.bib}
@article{Broo87,
author = "Brooks, Rodney A.",
title = {{Intelligence Without Representation}},
journal = "Artificial Intelligence",
volume = "47",
year = "1991",
pages = "139-159",
abstract =
"Artificial intelligence research has foundered on the issue of
representation. When intelligence is approached in an incremental
manner, with strict reliance on interfacing to the real world through
perception and action, reliance on representation disappears. In this
paper we outline our approach to incrementally building complete
intelligent Creatures. The fundamental decomposition of the
intelligent system is not into independent information processing
units which must interface with each other via representations.
Instead, the intelligent system is decomposed into independent and
parallel activity producers which all interface directly to the world
through perception and action, rather than interface to each other
particularly much. The notions of central and peripheral systems
evaporateeverything is both central and peripheral. Based on these
principles we have built a very successful series of mobile robots
which operate without supervision as Creatures in standard office
environments.",
paper = "Broo87.pdf"
}
\end{chunk}
\index{Roy, Peter Van}
\index{Haridi, Seif}
\begin{chunk}{axiom.bib}
@book{Royx03,
author = "Roy, Peter Van and Haridi, Seif",
title = {{Concepts, Techniques, and Models of Computer Programming}},
year = "2003",
link =
"\url{http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.102.7366&rep=rep1&type=pdf}",
publisher = "MIT",
isbn = "978-0262220699",
paper = "Royx03.pdf"
}
\end{chunk}
\index{Boyer, Robert S.}
\index{Moore, J Strother}
\begin{chunk}{axiom.bib}
@article{Boye84,
author = "Boyer, Robert S. and Moore, J Strother",
title = {{Proof-Checking, Theorem-Proving, and Program Verification}},
journal = "Contemporary Mathematics",
volume = "29",
year = "1984",
abstract =
"This article consists or three parts: a tutorial introduction to a
computer program that proves theorems by induction; a brief
description or recent applications or that theorem-prover; and a
discussion or several nontechnical aspects of the problem or building
automatic theorem-provers. The theorem-prover described has proved
theorems such as the uniqueness of prime factorizations, Fermat's
theorem, and the recursive unsolvability or the halting problem.
The article is addressed to those who know nothing about automatic
theorem-proving but would like a glimpse or one such system. This
article definitely does not provide a balanced view or all automatic
theorem-proving, the literature of which is already rather large and
technica1. Nor do we describe the details or our theorem-proving
system, but they can be round in the books, articles, and technical
reports that we reference.
In our opinion, progress in automatic theorem-proving is largely a
function or the mathematical ability or those attempting to build such
systems. We encourage good mathematicians to work in the field.",
paper = "Boye84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@article{Lamp78
author = "Lamport, Leslie",
title = {{The Specification and Proof of Correctness of Interactive
Programs}},
journal = "LNCS",
volume = "75",
year = "1978",
abstract =
"method production assertional rules specified, is proved is modified
correctly to accept and interactive is described, method that a
program A program of specifying and to permit typed its implementation
correct. by and the Floyd-Hoare implements format programs one to
prove its specification. input is formally with a TECO program.",
paper = "Lamp78.pdf",
keywords = "printed"
}
\end{chunk}
\index{Anderson, E.R.}
\index{Belz, F.C.}
\index{Blum, E.K.}
\begin{chunk}{axiom.bib}
@article{Ande78,
author = "Anderson, E.R. and Belz, F.C. and Blum, E.K.",
title = {{Extending an Implementation Language to a Specification Language}},
journal = "LNCS",
volume = "75",
year = "1978",
paper = "Ande78.pdf",
keywords = "printed"
}
\end{chunk}
\index{Tobin-Hochstadt, Sam}
\index{Felleisen, Matthias}
\begin{chunk}{axiom.bib}
@misc{Tobi11,
author = "Tobin-Hochstadt, Sam and Felleisen, Matthias",
title = {{The Design and Implementation of Typed Scheme: From
Scripts to Programs}},
link = "\url{https://arxiv.org/pdf/1106.2575.pdf}",
year = "2011",
abstract =
"When scripts in untyped languages grow into large programs,
maintaining them becomes difficult. A lack of explicit type
annotations in typical scripting languages forces programmers to must
(re)discover critical pieces of design information every time they
wish to change a program. This analysis step both slows down the
maintenance process and may even introduce mistakes due to the
violation of undiscovered invariants.
This paper presents Typed Scheme, an explicitly typed extension of PLT
Scheme, an untyped scripting language. Its type system is based on
the novel notion of occurrence typing, which we formalize and
mechanically prove sound. The implementation of Typed Scheme
additionally borrows elements from a range of approaches, including
recursive types, true unions and subtyping, plus polymorphism combined
with a modicum of local inference.
The formulation of occurrence typing naturally leads to a simple and
expressive version of predicates to describe refinement types. A
Typed Scheme program can use these refinement types to keep track of
arbitrary classes of values via the type system. Further, we show how
the Typed Scheme type system, in conjunction with simple recursive
types, is able to encode refinements of existing datatypes, thus
expressing both proposed variations of refinement types.",
paper = "Tobi11.pdf",
keywords = "printed"
}
\end{chunk}
\index{McCarthy, John}
\index{Painter, James}
\begin{chunk}{axiom.bib}
@inproceedings{Mcca67,
author = "McCarthy, John and Painter, James",
title = {{Correctness of a Compiler for Arithmetic Expressions}},
booktitle = "Proc. Symp. in Applied Mathematics",
publisher = "American Mathematical Society",
year = "1967",
paper = "Mcca67.pdf",
keywords = "printed"
}
\end{chunk}
\index{Vuillemin, Jean}
\begin{chunk}{axiom.bib}
@phdthesis{Vuil73,
author = "Vuillemin, Jean",
title = {{Proof Techniques for Recursive Programs}},
school = "Stanford University",
year = "1973",
abstract =
"The concept of least fixed-point of a continuous function can be
considered as the unifying thread of this dissertation.
The connections between fixed-points and recursive programs are
detailed in chapter 2, providing some insights on practical
implementations of recursion. There are two usual characterizations
of the least fixed-point of a continuous function. To the first
characterization, due to Knaster and Tarski, corresponds a class
of proof techniques for programs, as described in Chapter 3. The
other characterization of least fixed points, better known as
Kleene's first recursion theorem, is discussed in Chapter 4. It
has the advantage of being effective and it leads to a wider class
of proof techniques.",
paper = "Vuil73.pdf"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@misc{Byte79,
author = "Byte Publications",
title = {{LISP}},
link = "\url{https://ia802603.us.archive.org/30/items/byte-magazine-1979-08/1979_08_BYTE_04-08_LISP.pdf}",
publisher = "McGraw-Hill",
year = "1979",
paper = "Byte79.pdf"
}
\end{chunk}
\index{Manna, Zohar}
\index{Vuillemin, Jean}
\begin{chunk}{axiom.bib}
@article{Mann72,
author = "Manna, Zohar and Vuillemin, Jean",
title = {{Fixpoint Approach to the Theory of Computation}},
journal = "Communications of the ACM",
volume = "15",
number = "7",
year = "1972",
pages = "828-836",
abstract =
"Following the fixpoint theory of Scott, the semantics of computer
programs are defined in terms of the least fixpoints of recursive
programs. This allows not only the justification of all existing
verification techniques, but also their extension to the handling, in
a uniform manner of various properties of computer programs, including
correctness, termination, and equivalence.",
paper = "Mann72.pdf",
keywords = "printed"
}
\end{chunk}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@article{Paul83,
author = "Paulson, Lawrence C.",
title = {{A Higher-Order Implementation of Rewriting}},
journal = "Science of Computer Programming",
volume = "3",
pages = "119-149",
year = "1983",
abstract =
"Many automatic theorem-provers rely on rewriting. Using theorems as
rewrite rules helps to simplify the subgoals that arise during a
proof. LCF is an interactive theorem-prover intended For reasoning
about computation. Its implementation of rewriting is presented in
detail. LCF provides a Family of rewriting Functions, and operators to
combine them. A succession of Functions is described, From pattern
matching primitives to the rewriting tool that performs most
inferences in LCF proofs. The design is highly modular. Each function
performs a basic, specific task, such as recognizing a certain form of
tautology. Each operator implements one method of building a rewriting
Function From simpler ones. These pieces can be put together in
numerous ways, yielding a variety of rewriting strategies. The
approach involves programming with higher-order Functions. Rewriting
Functions are data values, produced by computation on other rewriting
Functions. The code is in daily use at Cambridge, demonstrating the
practical use of Functional programming.",
paper = "Paul83.pdf",
keywords = "printed"
}
\end{chunk}
\index{Avigad, Jeremy}
\index{Holzl, Johannes}
\index{Serafin, Luke}
@misc{Avig17c,
author = "Avigad, Jeremy and Holzl, Johannes and Serafin, Luke",
title = {{A Formally Verified Proof of the Central Limit Theorem}},
link = "\url{}",
year = "2017",
abstract =
"We describe a proof of the Central Limit Theorem that has been
formally verified in the Isabelle proof assistant. Our formalization
builds upon and extends Isabelle’s libraries for analysis and
measure-theoretic probability. The proof of the theorem uses
characteristic functions , which are a kind of Fourier transform, to
demonstrate that, under suitable hypotheses, sums of random
variables converge weakly to the st andard normal distribution. We
also discuss the libraries and infrastructure that supported the
formalization, and reflect on some of the lessons we have learned
from the effort.",
paper = "Avig17c.pdf",
keywords = "printed"
}
\end{chunk}
\index{Feferman, Solomon}
\begin{chunk}{axiom.bib}
@misc{Fefe95,
author = "Feferman, Solomon",
title = {{Definedness}},
year = "1995",
link = "\url{https://math.stanford.edu/~feferman/papers/definedness.pdf}",
abstract =
"Questions of definedness are ubiquitous in mathematics. Informally,
these involve reasoning about expressions which may or may not have a
value. This paper surveys work on logics in which such reasoning can
be carried out directly, especially in computational contexts. It
begins with a general logic of 'partial terms', continues with partial
combinatory and lambda calculi, and concludes with an expressively
rich theory of partial functions and polymorphic types, where
termination of functional programs can be established in a natural way.",
paper = "Fefe95.pdf",
keywords = "printed"
}
\end{chumk}
---
books/axiom.bib | 1720 +++++++++++++++++++++++++++++++-
books/bookvolbib.pamphlet | 2141 +++++++++++++++++++++++++++++++++++++++-
changelog | 2 +
patch | 1706 +++++++++++++++++++++++++++++++-
src/axiom-website/patches.html | 2 +
5 files changed, 5544 insertions(+), 27 deletions(-)
diff --git a/books/axiom.bib b/books/axiom.bib
index a75b91a..7a9f6da 100644
--- a/books/axiom.bib
+++ b/books/axiom.bib
@@ -6597,7 +6597,7 @@ paper = "Brea89.pdf"
@book{Wilk63,
author = "Wilkinson, J. H.",
- title = {{Rounding Erroors in Algebraic Processes}},
+ title = {{Rounding Errors in Algebraic Processes}},
publisher = "HMSO",
series = "Notes on Applied Science, No. 32",
year = "1963"
@@ -7479,6 +7479,15 @@ paper = "Brea89.pdf"
keywords = "private communication"
}
+@misc{Avig18,
+ author = "Avigad, Jeremy",
+ title = {{Mathematical Language from a Design Perspective}},
+ year = "2018",
+ link = "\url{http://www.andrew.cmu.edu/user/avigad/Talks/san_diego.pdf}",
+ paper = "Avig18.pdf",
+ keywords = "printed"
+}
+
@article{Bate85,
author = "Bates, Joseph L. and Constable, Robert L.",
title = {{Proofs as Programs}},
@@ -8291,6 +8300,130 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Kova04,
+ author = "Kovacs, Laura and Jebelean, Tudor",
+ title = {{Practical Aspects of Imperative Program Verification in
+ Theorema}},
+ journal = "Analele Universitatu din Timisoara",
+ volume = "XXXXIX",
+ number = "2",
+ year = "2004",
+ abstract =
+ "Approaching the problem of imperative program verification from a
+ practical point of view has certain implications concerning: the style
+ of specifications, the programming language which is used, the help
+ provided to the user for finding appropriate loop invariants, the
+ theoretical frame used for formal verification, the language used
+ for expressing generated verification theorems as well as the database
+ of necessary mathematical knowledge, and finally the proving power,
+ style and language. The Theorema system (www. theorema.org) has
+ certain capabilities which make it appropriate for such a practical
+ approach. Our approach to imperative program verification in Theorema
+ is based on Hoare– Logic and the Weakest Precondition Strategy. In
+ this paper we present some practical aspect of our work, as well as a
+ novel method for verification of pro- grams that contain loops, namely
+ a method based on recurrence equation solvers for generating the
+ necessary loop invariants and termination terms automatically. We
+ have tested our system with numerous examples, some of these example
+ are presented at the end of the paper.",
+ paper = "Kova04.pdf",
+ keywords = "printed"
+}
+
+@article{Kova04a,
+ author = "Kovacs, Laura and Jebelean, Tudor",
+ title = {{Automated Generation of Loop Invariants by Recurrence
+ Solving in Theorema}},
+ year = "2004",
+ abstract =
+ "Most of the properties established during program verification are
+ either invariants or depend crucially on invariants. The effectiveness
+ of automated verification of (imperative) programs is therefore
+ sensitive to the ease with which invariants, even trivial ones, can be
+ automatically deduced. We present a method for invariant generation
+ that relies on combinatorial techniques, namely on recurrence solving
+ and variable elimination. The effectiveness of the method is
+ demonstrated on examples.",
+ paper = "Kova04a.pdf",
+ keywords = "printed"
+}
+
+@phdthesis{Kova07,
+ author = "Kovacs, Laura",
+ title = {{Automated Invariant Generation by Algebraic Techniques for
+ Imperative Program Verification in Theorema}},
+ school = "Johannes-Kepler University Linz",
+ year = "2007",
+ abstract =
+
+ "This thesis presents algebraic and combinatorial approaches for
+ reasoning about imperative loops with assignments, sequencing and
+ conditionals.
+
+ A certain family of loops, called P-solvable , is defined for which
+ the value of each program variable can be expressed as a polynomial of
+ the initial values of variables, the loop counter, and some new
+ variables where there are algebraic dependencies among the new
+ variables. For such loops, a systematic method is developed for
+ generating polynomial invariants. Further, if the bodies of these
+ loops consist only of assignments and conditional branches, and test
+ conditions in the loop and conditionals are ignored, the method is
+ shown to be complete for some special cases. By completeness we mean
+ that it generates a set of polynomials from which, under additional
+ assumptions for loops with conditional branches, any polynomial
+ invariant can be derived. Many non-trivial algorithms working on
+ numbers can be naturally implemented using P-solvable loops.
+
+ By combining advanced techniques from algorithmic combinatorics,
+ symbolic summation, computer algebra and computational logic, a
+ framework is developed for generating polynomial invariants for
+ imperative programs operating on numbers.
+
+ Exploiting the symbolic manipulation capabilities of the computer
+ algebra system Mathematica , these techniques are implemented in a new
+ software package called Aligator . By using several combinatorial
+ packages developed at RISC, Aligator includes algorithms for solving
+ special classes of recurrence relations (those that are either
+ Gosper-summable or C-finite) and generating polynomial dependencies
+ among algebraic exponential sequences. Using Aligator , a complete set
+ of polynomial invariants is successfully generated for numerous
+ imperative programs working on numbers.
+
+ The automatically obtained invariant assertions are subsequently used
+ for proving the partial correctness of programs by generating
+ appropriate verification conditions as first-order logical formulas.
+ Based on Hoare logic and the weakest precondition strategy, this
+ verification process is supported in an imperative verification
+ environment implemented in the Theorema system. Theorema is
+ convenient for such an integration given that it is built on top of
+ the computer algebra system Mathematica and includes automated methods
+ for theorem proving in predicate logic, domain specific reasoning and
+ proving by induction.",
+ paper = "Kova07.pdf"
+}
+
+@inproceedings{Kova08,
+ author = "Kovacs, Laura",
+ title = {{Reasoning Algebraiclly About P-Solvable Loops}},
+ booktitle = "Int. Conf. on Tools and Algorithms for the Construction
+ and Analysis of Systems",
+ pages = "249-264",
+ year = "2008",
+ abstract =
+ "We present a method for generating polynomial invariants for a
+ subfamily of imperative loops operating on numbers, called the
+ P-solvable loops. The method uses algorithmic combinatorics and
+ algebraic techniques. The approach is shown to be complete for some
+ special cases. By completeness we mean that it generates a set of
+ polynomial invariants from which, under additional assumptions, any
+ polynomial invariant can be derived. These techniques are implemented
+ in a new software package Aligator written in Mathematica and
+ successfully tried on many programs implementing interesting
+ algorithms working on numbers.",
+ paper = "Kova08.pdf",
+ keywords = "printed"
+}
+
@article{Kova16,
author = "Kovacs, Laura",
title = {{Symbolic Computation and Automated Reasoning for Program
@@ -8471,6 +8604,8 @@ paper = "Brea89.pdf"
@article{Luox13,
author = "Luo, Zhaohui and Soloviev, Sergei and Xue, Tao",
title = {{Coercive Subtyping: Theory and Implementation}},
+ journal = "Information and Computation",
+ year = "2013",
volume = "223",
pages = "18-42",
link =
@@ -9320,6 +9455,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Abra92,
+ author = "Abramsky, S. and Gabbay, Dov M. and Maibaum, T.S.E",
+ title = {{Handbook of Logic in Computer Science (Volume 2)}},
+ publisher = "Oxford Science Publications",
+ year = "1992",
+ isbn = "0-19-853761-1"
+}
+
@article{Akba08a,
author = "Akbarpour, Behzad and Paulson, Lawrence C.",
title = {{MetiTarski: An Automatic Prover for the Elementary Functions}},
@@ -9338,6 +9481,42 @@ paper = "Brea89.pdf"
paper = "Akba08a.pdf"
}
+@misc{Alte04,
+ author = "Altenkirch, Thorsten",
+ title = {{$\lambda$-calculus and types}},
+ comment = "Lecture Notes; APPSEM Spring School 2004",
+ link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/mgs04.pdf}",
+ year = "2004",
+ paper = "Alte04.pdf",
+ keywords = "printed",
+}
+
+@misc{Alte05,
+ author = "Altenkirch, Thorsten and McBride, Conor",
+ title = {{Why Dependent Types Matter}},
+ link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/ydtm.pdf}",
+ year = "2005",
+ abstract =
+ "We exhibit the rationale behind the design of Epigram, a dependently
+ typed programming language and interactive program development system,
+ using refinements of a well known program -- merge sort -- as a running
+ example. We discuss its relationship with other proposals to introduce
+ aspects of dependent types into functional programming languages and
+ sketch some topics for further work in this area.",
+ paper = "Alte05.pdf",
+ keywords = "printed"
+}
+
+@article{Ande78,
+ author = "Anderson, E.R. and Belz, F.C. and Blum, E.K.",
+ title = {{Extending an Implementation Language to a Specification Language}},
+ journal = "LNCS",
+ volume = "75",
+ year = "1978",
+ paper = "Ande78.pdf",
+ keywords = "printed"
+}
+
@misc{Anon18,
author = "Anonymous",
title = {{A Compiler From Scratch}},
@@ -9407,6 +9586,60 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Appe89,
+ author = "Appel, Andrew W.",
+ title = {{Run-time Tags Aren't Necessary}},
+ journal = "Lisp and Symbolic Computation",
+ volume = "2",
+ pages = "153-162",
+ year = "1989",
+ publisher = "Kluwer",
+ abstract =
+ "Many modern programming environments use tag bits at runtime to
+ distinguish objects of different types. This is particularly
+ common in systems with garbage collection, since the garbage
+ collector must be able to distinguish pointers from non-pointers,
+ and to learn the length of records pointed to.
+
+ The use of tag bits leads to inefficiency. In addition to the
+ obvious space overhead (tag bits and record descriptors occupy
+ memory space), there is a time overhead: tag bits must be stripped
+ off of data before arithmetic operations are performed, and
+ re-attached to the data when it is stored into memory. This takes
+ either extra instructions at runtime, or special tag-handling
+ hardware, or both.
+
+ This paper shows how the use of tag bits, record descriptor words,
+ explicit type parameters, and the like can be avoided in languages
+ (like ML) with static polymorphic typechecking. Through a form of
+ tag will still be required for user-defined variant records, all
+ other type information can be encoded once -- in the program --
+ rather than replicated many times in the data. This can lead to
+ savings in both space and time.",
+ paper = "Appe89.pdf"
+}
+
+@inproceedings{Appe91,
+ author = "Appel, Andrew W. and MacQueen, David B.",
+ title = {{Standard ML of New Jersey}},
+ booktitle = "Int. Symp. on Prog. Lang. Implementations and Logic
+ Programming",
+ publisher = "Springer-Verlag",
+ pages = "1-13",
+ year = "1991",
+ abstract =
+ "The Standard ML of New Jersey compiler has been under development for
+ five years now. We have developed a robust and complete environment
+ for Standard ML that supports the implementation of large software
+ systems and generates efficient code. The compiler has also served as
+ a laboratory for developing novel implementation techniques for a
+ sophisticated type and module system, continuation based code
+ generation, efficient pattern matching, and concurrent programming
+ features.",
+ paper = "Appe91.pdf",
+ keywords = "printed"
+}
+
@book{Appe92,
author = "Appel, Andrew W.",
title = {{Compiling with Continuations}},
@@ -9415,6 +9648,25 @@ paper = "Brea89.pdf"
isbn = "0-521-03311-X"
}
+@article{Appe93,
+ author = "Appel, Andrew W.",
+ title = {{A Critique of Standard ML}},
+ journal = "J. Functional Programming",
+ volume = "3",
+ number = "4",
+ pages = "391-429",
+ year = "1993",
+ abstract =
+ "Standard ML is an excellent language for many kinds of
+ programming. It is safe, efficient, suitably abstract, and
+ concise. There are many aspects of the language that work
+ well. However, nothing is perfect: Standard ML has a few
+ shortcomings. In some cases there are obvious solutions, and in
+ other cases further research is required.",
+ paper = "Appe93.pdf",
+ keywords = "printed"
+}
+
@book{Appe98,
author = "Appel, Andrew W.",
title = {{Modern Compiler Implementation in ML}},
@@ -9423,6 +9675,29 @@ paper = "Brea89.pdf"
isbn = "0-521-60764-7"
}
+@incollection{Aspi05,
+ author = "Aspinall, David and Hofmann, Martin",
+ title = {{Dependent Types}},
+ booktitle = "Advanced Topics in Types and Programming Languages",
+ publisher = "MIT Press",
+ isbn = "0-262-16228-8",
+ pages = "45-46",
+ paper = "Aspi05.pdf",
+ keywords = "printed"
+}
+
+@article{Avig18a,
+ author = "Avigad, Jeremy",
+ title = {{The Mechanization of Mathematics}},
+ journal = "Notices of the AMS",
+ volume = "65",
+ number = "6",
+ year = "2018",
+ pages = "681-690",
+ paper = "Avig18a.pdf",
+ keywords = "printed"
+}
+
@article{Back81,
author = "Back, R.J.R",
title = {{On Correct Refinement of Programs}},
@@ -9535,6 +9810,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Bare13,
+ author = "Barendregt, Henk and Dekkers, Wil and Statman, Richard",
+ title = {{Lambda Calculus with Types}},
+ publisher = "Cambridge University Press",
+ year = "2013",
+ isbn = "978-0-521-76614-2"
+}
+
@techreport{Barr96a,
author = "Barras, Bruno",
title = {{Coq en Coq}},
@@ -9579,6 +9862,14 @@ paper = "Brea89.pdf"
keywords = "printed, reviewed"
}
+@book{Basu10,
+ author = "Basu, Saugata and Pollack, Richard and Roy, Marie-Francoise",
+ title = {{Algorithms in Real Algebraic Geometry}},
+ publisher = "Springer-Verlag",
+ year = "2010",
+ isbn = "978-3-642-06964-2"
+}
+
@inproceedings{Bent07,
author = "Benton, Nick and Zarfaty, Uri",
title = {{Formalizing and Verifying Semantic Type Soundness of a
@@ -9598,6 +9889,30 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Bees80,
+ author = "Beeson, Michael J.",
+ title = {{Foundations of Constructive Mathematics}},
+ publisher = "Springer-Verlag",
+ year = "1980",
+ isbn = "3-540-12173-0"
+}
+
+@misc{Beni95,
+ author = "Benini, Marco",
+ title = {{Barendregt's $\lambda$-Cube in Isabelle}},
+ link = "\url{}",
+ year = "1995",
+ abstract =
+ "We present an implementation of Barendregt’s $\lambda$-Cube in the
+ Isabelle proof system. Isabelle provides many facilities for
+ developing a useful specification and proving environment from the
+ basic formulation of formal systems. We used those facilities to
+ provide an environment where the user can describe problems and derive
+ proofs interactively.",
+ paper = "Beni95.pdf",
+ keywords = "printed"
+}
+
@misc{Bonn18,
author = "Bonnaire-Sergeant, Ambrose",
title = {{Are unsound type systems wrong?}},
@@ -9623,7 +9938,56 @@ paper = "Brea89.pdf"
is described at length. The theorems proved by the program include
that REVERSE is its own inverse and that a particular SORT program is
correct. A list of theorems proved by the program is given.",
- paper = "Boye75.pdf"
+ paper = "Boye75.pdf",
+ keywords = "printed"
+}
+
+@article{Boye84,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{Proof-Checking, Theorem-Proving, and Program Verification}},
+ journal = "Contemporary Mathematics",
+ volume = "29",
+ year = "1984",
+ abstract =
+ "This article consists or three parts: a tutorial introduction to a
+ computer program that proves theorems by induction; a brief
+ description or recent applications or that theorem-prover; and a
+ discussion or several nontechnical aspects of the problem or building
+ automatic theorem-provers. The theorem-prover described has proved
+ theorems such as the uniqueness of prime factorizations, Fermat's
+ theorem, and the recursive unsolvability or the halting problem.
+
+ The article is addressed to those who know nothing about automatic
+ theorem-proving but would like a glimpse or one such system. This
+ article definitely does not provide a balanced view or all automatic
+ theorem-proving, the literature of which is already rather large and
+ technica1. Nor do we describe the details or our theorem-proving
+ system, but they can be round in the books, articles, and technical
+ reports that we reference.
+
+ In our opinion, progress in automatic theorem-proving is largely a
+ function or the mathematical ability or those attempting to build such
+ systems. We encourage good mathematicians to work in the field.",
+ paper = "Boye84.pdf",
+ keywords = "printed"
+}
+
+@article{Boye84a,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{A Mechanical Proof of the Turing Completeness of Pure Lisp}},
+ journal = "Contemporary Mathematics",
+ volume = "29",
+ year = "1984",
+ abstract =
+ "We describe a proof by a computer prosram of theTuring completeness
+ of a computational paradigm akin to Pure LISP. That is, we defme
+ formally the notions of a Turing machine and of a version of Pure
+ LISP and prove that anythin& that can be computed by a Turing machine
+ can be computed by LISP. While this result is straightforward, we
+ believe this is the rarst instance of a machine proving the Turing
+ completeness of another computational paradigm.",
+ paper = "Boye84a.pdf",
+ keywords = "printed"
}
@book{Brod94,
@@ -9635,6 +9999,42 @@ paper = "Brea89.pdf"
paper = "Brod94.pdf"
}
+@article{Broo87a,
+ author = "Brooks, Rodney A.",
+ title = {{Intelligence Without Representation}},
+ journal = "Artificial Intelligence",
+ volume = "47",
+ year = "1991",
+ pages = "139-159",
+ abstract =
+ "Artificial intelligence research has foundered on the issue of
+ representation. When intelligence is approached in an incremental
+ manner, with strict reliance on interfacing to the real world through
+ perception and action, reliance on representation disappears. In this
+ paper we outline our approach to incrementally building complete
+ intelligent Creatures. The fundamental decomposition of the
+ intelligent system is not into independent information processing
+ units which must interface with each other via representations.
+ Instead, the intelligent system is decomposed into independent and
+ parallel activity producers which all interface directly to the world
+ through perception and action, rather than interface to each other
+ particularly much. The notions of central and peripheral systems
+ evaporateeverything is both central and peripheral. Based on these
+ principles we have built a very successful series of mobile robots
+ which operate without supervision as Creatures in standard office
+ environments.",
+ paper = "Broo87a.pdf"
+}
+
+@misc{Byte79,
+ author = "Byte Publications",
+ title = {{LISP}},
+ link = "\url{https://ia802603.us.archive.org/30/items/byte-magazine-1979-08/1979_08_BYTE_04-08_LISP.pdf}",
+ publisher = "McGraw-Hill",
+ year = "1979",
+ paper = "Byte79.pdf"
+}
+
@inproceedings{Cald97,
author = "Caldwell, James L.",
title = {{Moving proofs-as-programs into practice}},
@@ -9768,7 +10168,58 @@ paper = "Brea89.pdf"
and techniques for proving facts about specific programs. We also give
a simpler version of the GiSdeI-Kleene way of representing computable
functions by first order sentences.",
- paper = "Cart78.pdf"
+ paper = "Cart78.pdf",
+ keywords = "printed"
+}
+
+@article{Cart84,
+ author = "Cartwright, Robert",
+ title = {{Recursive Programs as Definitions in First-Order Logic}},
+ journal = "SIAM J. Computing",
+ volume = "13",
+ number = "2",
+ pages = "374-408",
+ year = "1984",
+ abstract =
+ "Despite the reputed limitations of first order logic, it is easy to
+ state and prove almost all interesting properties of recursive
+ programs within a simple first order theory, by using an approach we
+ call 'first order programming logic'. Unlike higher order logics based
+ on fixed-point induction, first order programming logic is founded on
+ deductive principles that are familiar to most programmers. Informal
+ structural induction arguments (such as termination proofs for LISP
+ append, McCarthy’s 91-function, and Ackermann’s function) have direct
+ formalizations within the system.
+
+ The essential elements of first order programming logic are:
+ \begin{enumerate}
+ \item The data domain $D$ must be a finitely generated set that
+ explicitly includes the 'undefined' object $\bot$
+ (representing nontermination) as well as ordinary data objects.
+ \item Recursive programs over $D$ are treated as logical definitions
+ augmenting a first order theory of the data domain.
+ \item The interpretation of a recursive program is the least
+ fixed-point of the functional corresponding to the program.
+ \end{enumerate}
+
+ Since the data domain $D$ is a finitely generated set, the first order
+ axiomatization of $D$ includes a structural induction axiom scheme. This
+ axiom scheme serves as the fundamental 'proof rule' of first order
+ programming logic.
+
+ The major limitation of first order programming logic is that every
+ fixed-point of the functional corresponding to a recursive program is
+ an acceptable interpretation for the program. The logic fails to
+ capture the notion of least fixed-point. To overcome this limitation,
+ we present a simple, effective procedure for transforming an arbitrary
+ recursive program into an equivalent recursive program that has a
+ unique fixed-point, yet retains the logical structure of the
+ original. Given this transformation technique, it is our experience
+ that first order programming logic is sufficiently powerful to prove
+ almost any property of practical interest about the functions computed
+ by recursive programs.",
+ paper = "Cart84.pdf",
+ keywords = "printed"
}
@article{Char08,
@@ -9780,6 +10231,26 @@ paper = "Brea89.pdf"
paper = "Char08.pdf"
}
+@article{Chen08,
+ author = "Chen, William Y.C. and Paule, Peter and Saad, Husam L.",
+ title = {{Converging to Gosper's Algorithm}},
+ journal = "Advances in Applied Mathematics",
+ volume = "41",
+ number = "3",
+ pages = "351-364",
+ year = "2008",
+ abstract =
+ "Given two polynomials, we find a convergence property of the GCD of
+ the rising factorial and the falling factorial. Based on this
+ property, we present a unified approach to computing the universal
+ denominators as given by Gosper's algorithm and Abramov's algorithm
+ for finding rational solutions to linear difference equations with
+ polynomial coefficients. Our approach easily extends to the
+ q-analogues.",
+ paper = "Chen08.pdf",
+ keywords = "printed"
+}
+
@article{Chli07,
author = "Chlipala, Adam J.",
title = {{A Certified Type-Preserving Compiler from the Lambda Calculus
@@ -9821,6 +10292,49 @@ paper = "Brea89.pdf"
paper = "Clin72.pdf"
}
+@phdthesis{Cohn79,
+ author = "Cohn, Avra Jean",
+ title = {{Machine Assisted Proofs of Recursion Implementation}},
+ school = "University of Edinburgh",
+ year = "1979",
+ link = "\url{https://www.era.lib.ed.ac.uk/handle/1842/6643}",
+ abstract =
+ "Three studies in the machine assisted proof of recursion
+ implementation are described. The verification system used is
+ Edinburgh LCF (Logic for Computable Functions). Proofs are generated,
+ in LCF, in a goal-oriented fashion by the application of strategies
+ reflecting informal proof plans. LCF is introduced in Chapter 1. We
+ present three case studies in which proof strategies are developed and
+ (except in the third) tested in LCF. Chapter 2 contains an account of
+ the machine generated proofs of three program transformations (from
+ recursive to iterative function schemata). Two of the examples are
+ taken from Manna and Waldinger. In each case, the recursion is
+ implemented by the introduction of a new data type, e.g., a stack or
+ counter. Some progress is made towards the development of a general
+ strategy for producing the equivalence proofs of recursive and
+ iterative function schemata by machine. Chapter 3 is concerned with
+ the machine generated proof of the correctness of a compiling
+ algorithm. The formulation, borrowed from Russell, includes a simple
+ imperative language with a while and conditional construct, and a low
+ level language of labelled statements, including jumps. We have, in
+ LCF, formalised his denotational descriptions of the two languages and
+ performed a proof of the preservation of the semantics under
+ compilation. In Chapter 4, we express and informally prove the
+ correctness of a compiling algorithm for a language containing
+ declarations and calls of recursive procedures. We present a low level
+ language whose semantics model a standard activation stack
+ implementation. Certain theoretical difficulties (connected with
+ recursively defined relations) are discussed, and a proposed proof in
+ LCF is outlined. The emphasis in this work is less on proving original
+ theorems, or even automatically finding proofs of known theorems, than
+ on (i) exhibiting and analysing the underlying structure of proofs,
+ and of machine proof attempts, and (ii) investigating the nature of
+ the interaction (between a user and a computer system) required to
+ generate proofs mechanically; that is, the transition from informal
+ proof plans to behaviours which cause formal proofs to be performed.",
+ paper = "Cohn79.pdf"
+}
+
@incollection{Como91a,
author = "Comon, Hubert",
title = {{Disunification: a Survey}},
@@ -9847,6 +10361,39 @@ paper = "Brea89.pdf"
paper = "Como01.ps"
}
+@article{Cons92,
+ author = "Constable, Robert L.",
+ title = {{Formal Theories and Software Systems: Fundamental
+ Connections between Computer Science and Logic}},
+ journal = "LNCS",
+ volume = "653",
+ pages = "105-127",
+ year = "1992",
+ abstract =
+ "A formal Theory of Logics is sketched using concepts from a
+ modern proof development sysmte (like Nuprl, Coq or other such
+ software systems). The Theory can be applied to understanding
+ these software systems, and the application suggests a design
+ principle called the {\sl theories-as-systems notion}.
+ Applications of the Theory to automated reasoning have led to
+ an empirical study of the notion of {\sl obvious inference}.
+ Experimental results are cited to explain key constants of a
+ scientific theor of {\sl obvious inference}. The constants appear
+ in what is called here the {\sl deBruijn equation}.",
+ paper = "Cons92.pdf",
+ keywords = "printed",
+}
+
+@incollection{Cons03,
+ author = "Constable, Robert L.",
+ title = {{Naive Computational Type Theory}},
+ booktitle = "Proof and System Reliability",
+ publisher = "unknown",
+ link = "\url{http://www.nuprl.org/documents/Constable/naive.pdf}",
+ year = "2003",
+ paper = "Cons03.pdf"
+}
+
@incollection{Coqu88,
author = "Coquuand, Thierry and Huet, Gerard",
title = {{The Calculus of Constructions}},
@@ -9965,6 +10512,13 @@ paper = "Brea89.pdf"
pages = "175-220"
}
+@misc{Daly18,
+ author = "Daly, Timothy",
+ title = {{Proving Axiom Sane: Survey of CAS and Proof Cooperation}},
+ year = "2018",
+ comment = "In Preparation"
+}
+
@article{Darg07,
author = "Dargaye, Zaynah and Leroy, Xavier",
title = {{Mechanized Verification of CPS Transformations}},
@@ -10204,7 +10758,8 @@ paper = "Brea89.pdf"
a practically-oriented core calculus based on the sequent
calculus, and used it to re-implement a substantial chunk of the
Glasgow Haskell Compiler.",
- paper = "Down16.pdf"
+ paper = "Down16.pdf",
+ keywords = "printed"
}
@techreport{Drey02,
@@ -10292,6 +10847,45 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Espa17,
+ author = "Esparza, Javier",
+ title = {{Automata Theory: An Algorithmic Approach}},
+ publisher = "Esparza, Javier",
+ year = "2017",
+ link = "\url{https://www7.in.tum.de/~esparza/autoskript.pdf}",
+ paper = "Espa17.pdf"
+}
+
+@article{Farm95b,
+ author = "Farmer, William M.",
+ title = {{Reasoning about Partial Functions with the Aid of a
+ Computer}},
+ journal = "Erkenntnis",
+ volume = "43",
+ number = "3",
+ pages = "279-294",
+ year = "1995",
+ abstract =
+ "Partial functions are ubiquitous in both mathematics and computer
+ science. Therefore, it is imperative that the underlying logical
+ formalism for a general-purpose mechanized mathematics system
+ provide strong support for reasoning about partial
+ functions. Unfortunately, the common logical formalisms --
+ first-order logic, type theory, and set theory -- are usually only
+ adequate for reasoning about partial functions {\sl in
+ theory}. However, the approach to partial functions traditionally
+ employed by mathematicians is quite adequate {\sl in
+ practice}. This paper shows how the traditional approach to
+ partial functions can be formalized in a range of formalisms that
+ includes first-order logic, simple type theory, and Von-Neuman,
+ Bernays, Godel set theory. It argues that these new formalisms
+ allow one to directly reason about partial functions; are based on
+ natural, well-understood, familiar principles; and can be
+ effectively implemented in mechanized mathematics systems.",
+ paper = "Farm95b.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Farm00,
author = "Farmer, William M.",
title = {{A Proposal for the Development of an Interactive
@@ -10456,6 +11050,66 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Fefe95,
+ author = "Feferman, Solomon",
+ title = {{Definedness}},
+ year = "1995",
+ link = "\url{https://math.stanford.edu/~feferman/papers/definedness.pdf}",
+ abstract =
+ "Questions of definedness are ubiquitous in mathematics. Informally,
+ these involve reasoning about expressions which may or may not have a
+ value. This paper surveys work on logics in which such reasoning can
+ be carried out directly, especially in computational contexts. It
+ begins with a general logic of 'partial terms', continues with partial
+ combinatory and lambda calculi, and concludes with an expressively
+ rich theory of partial functions and polymorphic types, where
+ termination of functional programs can be established in a natural way.",
+ paper = "Fefe95.pdf",
+ keywords = "printed"
+}
+
+\end{chumk}
+
+\index{Felty, Amy}
+\index{Miller, Dale}
+\begin{chunk}{axiom.bib}
+@techreport{Felt90,
+ author = "Felty, Amy and Miller, Dale",
+ title = {{Encoding a Dependent-Type Lambda Calculus in a Logic
+ Programming Language}},
+ type = "technical report",
+ number = "MS-CIS-90-18",
+ institution = "University of Pennsylvania",
+ year = "1990",
+ abstract =
+ "Various forms of typed $\lambda$-calculi have been proposed as
+ specification languages for representing wide varieties of object
+ logics. The logical framework, LF, is an example of such a
+ dependent-type $\lambda$-calculus. A small subset of intuitionistic
+ logic with quantification over simply typed $\lambda$-calculus has
+ also been proposed as a framework for specifying general logics. The
+ logic of {\sl hereditary Harrop formulas} with quantification at all
+ non-predicate types, denoted here as $hh^\omega$ , is such a
+ meta-logic that has been implemented in both the Isabelle theorem
+ prover and the $\lambda$Prolog logic programming language. Both
+ frameworks provide for specifications of logics in which details
+ involved with free and bound variable occurrences, substitutions,
+ eigenvariables, and the scope of assumptions within object logics are
+ handled correctly and elegantly at the 'meta' level. In this paper, we
+ show how LF ca n be encoded into $hh^\omega$ in a direct and natural
+ way by mapping the typing judgments in LF in to propositions in the
+ logic of $hh^\omega$. This translation establishes a very strong
+ connection between these two languages: the order of quantification in
+ an LF signature is exactly the order of a set of $hh^\omega$ clauses,
+ and the proofs in one system correspond directly to proofs in the
+ other system. Relating these two languages makes it possible to
+ provide implementations of proof checkers and theorem provers for
+ logics specified in LF by using standard logic programming techniques
+ which can be used to implement $hh^\omega$.",
+ paper = "Felt90.pdf",
+ keywords = "printed"
+}
+
@article{Fill03,
author = "Filliatre, Jean-Christophe",
title = {{Verification of Non-Functional Programs using Interpretations
@@ -10570,6 +11224,15 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Fong18,
+ author = "Fong, Brendan and Spivak, David I.",
+ title = {{Seven Sketches in Compositionality: An Invitation to
+ Applied Category Theory}},
+ link = "\url{http://math.mit.edu/~dspivak/teaching/sp18/7Sketches.pdf}",
+ year = "2018",
+ paper = "Fong18.pdf"
+}
+
@inproceedings{Free91,
author = "Freeman, Tim and Pfenning, Frank",
title = {{Refinement Types for ML}},
@@ -10840,6 +11503,70 @@ paper = "Brea89.pdf"
paper = "Gord06.pdf"
}
+@book{Grah93,
+ author = "Graham, Paul",
+ title = {{On Lisp}},
+ publisher = "Prentice Hall",
+ year = "1993",
+ link = "\url{http://ep.yimg.com/ty/cdn/paulgraham/onlisp.pdf}",
+ comment = "code in papers/onlisp.lisp",
+ isbn = "0130305529",
+ abstract =
+ "On Lisp is a comprehensive study of advanced Lisp techniques, with
+ bottom-up programming as the unifying theme. It gives the first
+ complete description of macros and macro applications. The book also
+ covers important subjects related to bottom-up programming, including
+ functional programming, rapid prototyping, interactive development,
+ and embedded languages. The final chapter takes a deeper look at
+ object-oriented programming than previous Lisp books, showing the
+ step-by-step construction of a working model of the Common Lisp Object
+ System (CLOS).",
+ paper = "Grah93.pdf"
+}
+
+@inproceedings{Grai07,
+ author = "Graillat, Stef and Menissier-Morain, Valerie",
+ title = {{Error-free Transformations in Real and Complex
+ Floating-point Arithmetic}},
+ booktitle = "Int. Symp. on Nonlinear Theory and Applications",
+ year = "2007",
+ abstract =
+ "Error-free transformation is a concept that makes it possible to
+ compute accurate results within a floating point arithmetic. Up to
+ now, it has only be studied for real floating point arithmetic. In
+ this short note, we recall the known error-free transformations for
+ real arithmetic and we propose some new error-free transformations for
+ complex floating point arithmetic. This will make it possible to
+ design some new accurate algorithms for summation, dot product and
+ polynomial evaluation with complex entries.",
+ paper = "Grai07.pdf"
+}
+
+@article{Grev08,
+ author = "Greve, David A. and Kaufmann, Matt and Manolios, Panagiotis
+ and Moore, J Strother and Ray, Sandip and Ruiz-Reina, Jose Luis
+ and Sumners, Rob and Vroon, Daron and Wilding, Matthew",
+ title = {{Efficient Execution in an Automated Reasoning Environment}},
+ journal = "Journal of Functional Programming",
+ volume = "18",
+ number = "1",
+ pages = "15-46",
+ year = "2008",
+ abstract =
+ "We describe a method that permits the user of a mechanized
+ mathematical logic to write elegant logical definitions while allowing
+ sound and efficient execution. In particular, the features supporting
+ this method allow the user to install, in a logically sound way,
+ alternative executable counterparts for logically defined
+ functions. These alternatives are often much more efficient than the
+ logically equivalent terms they replace. These features have been
+ implemented in the ACL2 theorem prover, and we discuss several
+ applications of the features in ACL2.",
+ paper = "Grev08.pdf",
+ keywords = "printed"
+
+}
+
@misc{Gros15,
author = "Gross, Jason and Chlipala, Adam",
title = {{Parsing Parses}},
@@ -10861,6 +11588,26 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Gual14,
+ author = "Guallart, Nino",
+ title = {{An Overview of Type Theories}},
+ link = "\url{https://arxiv.org/pdf/1411.1029.pdf}",
+ year = "2014",
+ abstract =
+ "Pure type systems arise as a generalisation of simply typed lambda
+ calculus. The contemporary development of mathematics has renewed
+ the interest in type theories, as they are not just the object of
+ mere historical research, but have an active role in the development
+ of computational science and core mathematics. It is worth exploring
+ some of them in depth, particularly predicative Martin-Löf’s
+ intuitionistic type theory and impredicative Coquand’s calculus of
+ constructions. The logical and philosophical differences and
+ similarities between them will be studied, showing the relationship
+ between these type theories and other fields of logic.",
+ paper = "Gual14.pdf",
+ keywords = "printed"
+}
+
@techreport{Gunt89,
author = "Gunter, Elsa L.",
title = {{Doing Algebra in Simple Type Theory}},
@@ -11006,6 +11753,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Harr09,
+ author = "Harrison, John",
+ title = {{Handbook of Practical Logic and Automated Reasoning}},
+ publisher = "Cambridge University Press",
+ year = "2009",
+ isbn = "978-0-521-89957-4"
+}
+
@article{Hatt97,
author = "Hatton, Les",
title = {{Software Failure: Follies and Fallacies}},
@@ -11015,6 +11770,18 @@ paper = "Brea89.pdf"
year = "1997",
}
+@article{Hell17,
+ author = "Hellman, Martin E.",
+ title = {{Cybersecurity, Nuclear Security, Alan Turing, and
+ Illogical Logic}},
+ journal = "J. ACM",
+ volume = "60",
+ number = "12",
+ pages = "52-59",
+ year = "2017",
+ link = "\url{https://cacm.acm.org/magazines/2017/12/223042-cybersecurity-nuclear-security-alan-turing-and-illogical-logic/fulltext}"
+}
+
@incollection{Hoar72,
author = "Hoare, C.A.R",
title = {{Notes on Data Structuring}},
@@ -11142,6 +11909,31 @@ paper = "Brea89.pdf"
paper = "Jone96.pdf"
}
+@misc{Jone97,
+ author = "Jones, Simon Peyton and Meijer, Erik",
+ title = {{Henk: A Typed Intermediate Language}},
+ year = "1997",
+ link =
+ "\url{https://www.microsoft.com/en-us/research/wp-content/uploads/1997/01/henk.pdf}",
+ abstract =
+ "There is a growing interest in the use of richly-typed
+ intermediate languages in sophisticated compilers for
+ higher-order, typed source languages. These intermediate languages
+ are typically stratified, involving terms, types, and kinds. As
+ the sophistication of the type system increases, there three
+ levels begin to look more and more similar, so an attraictive
+ approach is to use a single syntax, and a single data type in the
+ compiler, to represent all three.
+
+ The theory of so-called {\sl pure type systems} amkes precisely
+ such an identification. This paper describes Henk, a new typed
+ intermediate langugage based closely on a particuarl pure type
+ system, {\sl the lambda cube}. On the way we give a tutorial
+ introduction to the lambda cube.",
+ paper = "Jone97.pdf",
+ keywords = "printed"
+}
+
@article{Kaes88,
author = "Kaes, Stefan",
title = {{Parametric Overloading in Polymorphic Programming Languages}},
@@ -11390,6 +12182,44 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@techreport{Kroh93,
+ author = "Krohnfeldt, Jed and Steury, Craig",
+ title = {{Frolic: Logic Programming wiht Frobs}},
+ type = "technical report",
+ number = "86-08",
+ institution = "University of Utah",
+ comment = "Utah PASS Project OpNote",
+ year = "1993",
+ code = "PAS/frolic.tgz",
+ abstract =
+ "This paper describes Frolic, a logic programming system written
+ in Common Lisp. It provides logic programming capabilities for
+ Lisp programmers. In addition to being a Prolog in Lisp, Frolic
+ has capabilities for reasoning about {\sl frobs}. A frob is a data
+ structure that gives the power and flexibility of both frame and
+ object data types. This paper gives a brief introduction to the
+ use and implementation of Frolic.",
+ paper = "Kroh93.pdf",
+ keywords = "printed"
+}
+
+@article{Lamp78,
+ author = "Lamport, Leslie",
+ title = {{The Specification and Proof of Correctness of Interactive
+ Programs}},
+ journal = "LNCS",
+ volume = "75",
+ year = "1978",
+ abstract =
+ "method production assertional rules specified, is proved is modified
+ correctly to accept and interactive is described, method that a
+ program A program of specifying and to permit typed its implementation
+ correct. by and the Floyd-Hoare implements format programs one to
+ prove its specification. input is formally with a TECO program.",
+ paper = "Lamp78.pdf",
+ keywords = "printed"
+}
+
@article{Lamp81,
author = "Lamport, Leslie and Owicki, Susan",
title = {{Program Logics and Program Verification}},
@@ -11401,6 +12231,43 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@techreport{Lero90,
+ author = "Leroy, Xavier",
+ title = {{The ZINC Experiment: An Economical Implementation of the
+ ML Language.}},
+ type = "technical report",
+ institution = "Institut National de Recherche en Informatique et en
+ Automatique",
+ number = "117",
+ year = "1990",
+ abstract =
+ "This report details the design and implementation of the ZINC
+ system. This is an implementation of the ML language, inended to
+ serve as a test field for various extensions of the language, and
+ for new implementation techniques as well. This system is strongly
+ oriented toward separate compilation and the production of small,
+ standalone programs; type safety is ensured by a Modula-2-like
+ module system. ZINC uses simple, portable techniques, such as
+ bytecode interpretation; a sophisticated execution model helps
+ counterbalance the interpretation overhead. Other highlights
+ include an efficient implementation of records with inclusion
+ (subtyping).",
+ paper = "Lero90.pdf",
+ keywords = "printed"
+}
+
+@article{Lero98,
+ author = "Leroy, Xavier",
+ title = {{An Overview of Types in Compilation}},
+ journal = "LNCS",
+ volume = "1473",
+ year = "1998",
+ publisher = "Springer-Verlang",
+ pages = "1-8",
+ paper = "Lero98.pdf",
+ keywords = "printed"
+}
+
@article{Lero09,
author = "Leroy, Xavier",
title = {{A Formally Verified Compiler Back-end}},
@@ -11443,6 +12310,83 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Lohx01,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{A Tutorial Implementation of a Dependently Typed Lambda
+ Calculus}},
+ journal = "Fundamenta Informaticae",
+ volume = "XXI",
+ year = "2001",
+ pages = "1001-1031",
+ abstract =
+ "We present the type rules for a dependently typed core calculus
+ together with a straight-forward implementation in Haskell. We
+ explicitly highlight the changes necessary to shift from a
+ simply-typed lambda calculus to the dependently typed lambda
+ calculus. We also describe how to extend our core language with data
+ types and write several small example programs. The article is
+ accompanied by an executable interpreter and example code that allows
+ immediate experimentation with the system we describe.",
+ paper = "Lohx01.pdf",
+ keywords = "printed"
+}
+
+@misc{Lohx18,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{Simply Easy! An Implementation of a Dependently Typed
+ Lambda Calculus}},
+ link = "\url{http://strictlypositive.org/Easy.pdf}",
+ year = "2018",
+ abstract =
+ "We present an implementation in Haskell of a dependently-typed
+ lambda calculus that can be used as the core of a programming
+ language. We show that a dependently-typed lambda calculus is no
+ more difficult to implement than other typed lambda calculi. In fact,
+ our implementation is almost as easy as an implementation of the
+ simply typed lambda calculus, which we emphasize by discussing
+ the modifications necessary to go from one to the other. We explain
+ how to add data types and write simple programs in the core
+ language, and discuss the steps necessary to build a full-fledged
+ programming language on top of our simple core.",
+ paper = "Lohx18.pdf",
+ keywords = "printed"
+}
+
+@misc{Luxx16,
+ author = "Lu, Eric",
+ title = {{Barendregt's Cube and Programming with Dependent Types}},
+ link = "\url{https://www.seas.harvard.edu/courses/cs252/2016fa/15.pdf}",
+ paper = "Luxx16.pdf",
+ keywords = "printed"
+}
+
+@article{Mann72,
+ author = "Manna, Zohar and Vuillemin, Jean",
+ title = {{Fixpoint Approach to the Theory of Computation}},
+ journal = "Communications of the ACM",
+ volume = "15",
+ number = "7",
+ year = "1972",
+ pages = "828-836",
+ abstract =
+ "Following the fixpoint theory of Scott, the semantics of computer
+ programs are defined in terms of the least fixpoints of recursive
+ programs. This allows not only the justification of all existing
+ verification techniques, but also their extension to the handling, in
+ a uniform manner of various properties of computer programs, including
+ correctness, termination, and equivalence.",
+ paper = "Mann72.pdf",
+ keywords = "printed"
+}
+
+@book{Mann85,
+ author = "Manna, Zohar and Waldinger, Richard",
+ title = {{The Logical Basis for Computer Programming (Vol 1)}},
+ publisher = "Addison-Wesley",
+ year = "1985",
+ isbn = "0-201-15260-2"
+}
+
@article{Mano03,
author = "Manolios, Panagiotis and Moore, J Strother",
title = {{Partial Functions in ACL2}},
@@ -11506,7 +12450,27 @@ paper = "Brea89.pdf"
title = {{A Basis for a Mathematical Theory of Computation}},
booktitle = "Computer Programming and Formal Systems",
year = "1963",
- paper = "Mcca63.pdf"
+ paper = "Mcca63.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Mcca67,
+ author = "McCarthy, John and Painter, James",
+ title = {{Correctness of a Compiler for Arithmetic Expressions}},
+ booktitle = "Proc. Symp. in Applied Mathematics",
+ publisher = "American Mathematical Society",
+ year = "1967",
+ paper = "Mcca67.pdf",
+ keywords = "printed"
+}
+
+@misc{Mcca96,
+ author = "McCarthy, J.",
+ title = {{Towards a Mathematical Science of Computation}},
+ link = "\url{http://www-formal.stanford.edu/jmc/towards.pdf}",
+ year = "1996",
+ paper = "Mcca96.pdf",
+ keywords = "printed"
}
@inproceedings{Mcdo97,
@@ -11712,6 +12676,57 @@ paper = "Brea89.pdf"
paper = "Morg98.pdf"
}
+@phdthesis{Morr95,
+ author = "Morrisett, Greg",
+ title = {{Compiling with Types}},
+ school = "Carnegie Mellon University",
+ year = "1995",
+ comment = "CMU-CS-95-226",
+ link = "\url{https://www.cs.cmu.edu/~rwh/theses/morrisett.pdf}",
+ abstract =
+ "Compilers for monomorphic languages, suc h as C and Pascal, take
+ advantage of types to determine data representations, alignment,
+ calling conventions, and register selection. However, these languages
+ lack important features including polymorphism, abstract datatypes,
+ and garbage collection. In contrast, modern programming languages
+ such as Standard ML (SML), provide all of these features, but existing
+ implementations fail to take full advantage of types. The result is
+ that performance of SML code is quite bad when compared to C.
+
+ In this thesis, I provide a general framework, called
+ {\sl type-directed compilation}, that allows compiler writeres to
+ take advantage of types at all stages in compilation. In the
+ framework, types are used not only to determine efficient
+ representations and calling conventions, but also to prove the
+ correctness of the compiler. A key property of type-directed
+ compilation is that all but the lowest levels of the compiler use
+ {\sl typed intermediate languages}. An advantage of this approach
+ is that it provides a means for automatically checking the
+ integrity of the resulting code.
+
+ An mportant contribution of this work is the development of a new,
+ statically-typed intermediate language, called $\lambda^{ML}_i$.
+ This language supports {\sl dynamic type dispatch}, providing a
+ means to select operations based on types at run time. I show how
+ to use dynamic type dispatch to support polymorphism, ad-hoc
+ operators, and garbage collection without having to box or tag
+ values. This allows compilers for SML to take advantage of
+ techniques used in C compilers, without sacrificing language
+ features or separate compilation.
+
+ TO demonstrate the applicability of my approach, I, along with
+ others, have constructed a new compiler for SML called TIL that
+ eliminates most restrictions on the representations of values. The
+ code produced by TIL is roughly twice as fast as code produced by
+ the SML/NJ compiler. This is due to at least partially to the use
+ of natural representations, but primarily to the conventional
+ optimizer which manipulates typed, $\lambda^{ML}_i$ code. TIL
+ demonstrates that combining type-directed compilation with dynamic
+ type dispatch yields a superior architecture for compilers of
+ modern languages.",
+ paper = "Morr95.pdf"
+}
+
@inproceedings{Morr99,
author = "Morrisett, Greg and Walker, David and Crary, Karl and Glew, Neil",
title = {{From System F to Typed Assembly Language}},
@@ -11961,6 +12976,32 @@ paper = "Brea89.pdf"
paper = "Niss99.pdf"
}
+@inproceedings{Nobl16,
+ author = "Noble, James and Black, Andrew P. and Bruce, Kim B. and
+ Homer, Michael and Miller, Mark S.",
+ title = {{The Left Hand of Equals}},
+ booktitle = "Int. Symp. of New Ideas, New Paradigms, and Reflections
+ on Programming and Software",
+ publisher = "ACM",
+ pages = "224-237",
+ year = "2016",
+ abstract =
+ "When is one object equal to another object? While object identity is
+ fundamental to object-oriented systems, object equality, although
+ tightly intertwined with identity, is harder to pin down. The
+ distinction between identity and equality is reflected in
+ object-oriented languages, almost all of which provide two variants of
+ 'equality', while some provide many more. Programmers can usually
+ override at least one of these forms of equality, and can always
+ define their own methods to distinguish their own objects.
+
+ This essay takes a reflexive journey through fifty years of identity
+ and equality in object-oriented languages, and ends somewhere we did
+ not expect: a 'left-handed' equality relying on trust and grace.",
+ paper = "Nobl16.pdf",
+ keywords = "printed"
+}
+
@article{Odon81,
author = "O'Donnell, Michael J.",
title = {{A Critique of the Foundations of Hoare-style Programming Logics}},
@@ -12037,6 +13078,24 @@ paper = "Brea89.pdf"
paper = "Okas96.pdf"
}
+@inproceedings{Oury08,
+ author = "Oury, Nicolas and Swierstra, Wouter",
+ title = {{The Power of Pi}},
+ link = "\url{https://cs.ru.nl/~wouters/Publications/ThePowerOfPi.pdf}",
+ booktitle = "Int. Conf. on Functional Programming",
+ year = "2008",
+ abstract =
+ "This paper exhibits the power of programming with dependent types by
+ dint of embedding three domain-specific languages: Cryptol, a
+ language for cryptographic protocols; a small data description
+ language; and relational algebra. Each example demonstrates
+ particular design patterns inherent to dependently-typed programming.
+ Documenting these techniques paves the way for further research in
+ domain-specific embedded type systems.",
+ paper = "Oury08.pdf",
+ keywords = "printed"
+}
+
@article{Parn72,
author = "Parnas, David L.",
title = {{A Technique for Software Module Specification with
@@ -12091,6 +13150,42 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Paul83,
+ author = "Paulson, Lawrence C.",
+ title = {{A Higher-Order Implementation of Rewriting}},
+ journal = "Science of Computer Programming",
+ volume = "3",
+ pages = "119-149",
+ year = "1983",
+ abstract =
+ "Many automatic theorem-provers rely on rewriting. Using theorems as
+ rewrite rules helps to simplify the subgoals that arise during a
+ proof. LCF is an interactive theorem-prover intended For reasoning
+ about computation. Its implementation of rewriting is presented in
+ detail. LCF provides a Family of rewriting Functions, and operators to
+ combine them. A succession of Functions is described, From pattern
+ matching primitives to the rewriting tool that performs most
+ inferences in LCF proofs. The design is highly modular. Each function
+ performs a basic, specific task, such as recognizing a certain form of
+ tautology. Each operator implements one method of building a rewriting
+ Function From simpler ones. These pieces can be put together in
+ numerous ways, yielding a variety of rewriting strategies. The
+ approach involves programming with higher-order Functions. Rewriting
+ Functions are data values, produced by computation on other rewriting
+ Functions. The code is in daily use at Cambridge, demonstrating the
+ practical use of Functional programming.",
+ paper = "Paul83.pdf",
+ keywords = "printed"
+}
+
+@book{Paul87,
+ author = "Paulson, Lawrence C.",
+ title = {{Logic and Computation}},
+ publisher = "Press Synticate of Cambridge University",
+ year = "1987",
+ isbn = "0-521-34632-0"
+}
+
@incollection{Paul90a,
author = "Paulson, Lawrence C.",
title = {{Isabelle: The Next 700 Theorem Provers}},
@@ -12228,6 +13323,14 @@ paper = "Brea89.pdf"
paper = "Phil92.pdf"
}
+@book{Pier02,
+ author = "Pierce, Benjamin C.",
+ title = {{Types and Programming Languages}},
+ publisher = "MIT Press",
+ year = "2002",
+ isbn = "978-0-262-16209-8"
+}
+
@misc{Piro08,
author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
@@ -12345,6 +13448,34 @@ paper = "Brea89.pdf"
paper = "Pott98.pdf"
}
+@misc{Popo03,
+ author = "Popov, Nikolaj",
+ title = {{Verification Using Weakest Precondition Strategy}},
+ comment = "Talk at Comp. Aided Verfication of Information Systems",
+ year = "2003",
+ location = "Timisoara, Romania",
+ abstract =
+ "We describe the weakest precondition strategy for verifying
+ programs. This is a method which takes a specification and an
+ annotated program and generates so-called verification conditions:
+ mathematical lemmata which have to be proved in order to obtain a
+ formal correctness proof for the program with respect to its
+ specification. There are rules for generating the intermediate pre
+ and post conditions algorithmically. Based on these rules, we have
+ developed a package for generating verification conditions.",
+ paper = "Popo03.pdf",
+ keywords = "printed"
+}
+
+@article{Prat79,
+ author = "Pratt, Vaughan",
+ title = {{A Mathematician's View of Lisp}},
+ journal = "Byte Magazine",
+ year = "1979",
+ pages = "162-169",
+ paper = "Byte79.pdf"
+}
+
@misc{Remy17,
author = "Remy, Didier",
title = {{Type Systems for Programming Languages}},
@@ -12384,6 +13515,69 @@ paper = "Brea89.pdf"
isbn = "0-262-18223-8"
}
+@phdthesis{Roor00,
+ author = "Roorda, Jan-Willem",
+ title = {{Pure Type Systems for Functional Programming}},
+ year = "2000",
+ institution = "University of Utrecht",
+ abstract =
+ "We present a functional programming language based on Pure Type
+ Systems (PTSs). We show how we can define such a language by
+ extending the PTS framework with algebraic data types, case
+ expressions and definitions. To be able to experiment with our
+ language we present an implementation of a type checker and an
+ interpreter for our language. PTSs are well suited as a basis for a
+ functional programming language because they are at the top of a
+ hierarchy of increasingly stronger type systems. The concepts of
+ `existential types', `rank-n polymorphism' and `dependent types' arise
+ naturally in functional programming languages based on the systems in
+ this hierarchy. There is no need for ad-hoc extensions to incorporate
+ these features. The type system of our language is more powerful than
+ the Hindley-Milner system. We illustrate this fact by giving a number
+ of meaningful programs that cannot be typed in Haskell but are typable
+ in our language. A `real world' example of such a program is the
+ mapping of a specialisation of a Generic Haskell function to a Haskell
+ function. Unlike the description of the Henk language by Simon Peyton
+ Jones and Erik Meijer we give a complete formal definition of the type
+ system and the operational semantics of our language. Another
+ difference between Henk and our language is that our language is
+ defined for a large class of Pure Type Systems instead of only for the
+ systems of the $\lambda$-cube.",
+ paper = "Roor00.pdf",
+ keywords = "printed"
+}
+
+@book{Royx03,
+ author = "Roy, Peter Van and Haridi, Seif",
+ title = {{Concepts, Techniques, and Models of Computer Programming}},
+ year = "2003",
+ link =
+ "\url{http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.102.7366&rep=rep1&type=pdf}",
+ publisher = "MIT",
+ isbn = "978-0262220699",
+ paper = "Royx03.pdf"
+}
+
+@article{Russ77,
+ author = "Russell, Bruce D.",
+ title = "Implementation Correctness Involving a Language with GOTO
+ statements",
+ journal = "SIAM Journal of Computing",
+ volume = "6",
+ number = "3",
+ year = "1977",
+ abstract =
+ "Two languages, one a simple structured programming language, the
+ other a simple goto language, are defined. A denotational semantics is
+ given for each language. An interpreter for the goto language is given
+ and is proved correct with respect to the denotational semantics. A
+ compiler from the structured to the goto language is defined and
+ proved to be a semantically invariant translation of programs. The
+ proofs are by computational induction.",
+ paper = "Russ77.pdf",
+ keywords = "printed"
+}
+
@techreport{Sabr92,
author = "Sabry, Amr and Felleisen, Matthias",
title = {{Reasoning About Programs in Continuation-Passing Style}},
@@ -12484,6 +13678,29 @@ paper = "Brea89.pdf"
paper = "Scot71.pdf"
}
+@article{Shie95,
+ author = "Shieber, Stuart M. and Schabes, Yves and Pereira, Fernando C.N.",
+ title = {{Principles and Implementation of Deductive Parsing}},
+ journal = "J. Logic Programming",
+ volume = "24",
+ number = "1-2",
+ pages = "3-36",
+ year = "1995",
+ abstract =
+ "We present a system for generating parsers based directly on the
+ metaphor of parsing as deduction. Parsing algorithms can be
+ represented directly as deduction systems, and a single deduction
+ engine can interpret such deduction systems so as to implement the
+ corresponding parser. The method generalizes easily to parsers for
+ augmented phrase structure formalisms, such as definite-clause
+ grammars and other logic grammar formalisms, and has been used for
+ rapid prototyping of parsing algorithms for a variety of formalisms
+ including variants of tree-adjoining grammars, categorial grammars,
+ and lexicalized context-free grammars.",
+ paper = "Shie95.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Shie02,
author = "Shields, Mark and Jones, Simon Peyton",
title = {{First-Class Modules for Haskell}},
@@ -12518,6 +13735,22 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Shan94,
+ author = "Shankar, N.",
+ title = {{Metamathematics, Machines, and Godel's Proof}},
+ publisher = "Cambridge University Press",
+ year = "1994",
+ isbn = "0-521-58533-3"
+}
+
+@book{Shoe67,
+ author = "Shoenfield, Joseph R.",
+ title = {{Mathematical Logic}},
+ publisher = "Association for Symbolic Logic",
+ year = "1967",
+ isbn = "1-56881-135-7"
+}
+
@misc{Smit15,
author = "Smith, Peter",
title = {{Some Big Books on Mathematical Logic}},
@@ -12599,6 +13832,80 @@ paper = "Brea89.pdf"
paper = "Stee78.pdf"
}
+@techreport{Stee78a,
+ author = "Steele, Guy Lewis and Sussman, Gerald Jay",
+ title = {{The Art of the Interpreter}},
+ institution = "MIT",
+ type = "technical report",
+ number = "AI Memo No. 453",
+ year = "1978",
+ abstract =
+ "We examine the effes of various language design decisions on the
+ programming styles available to a user of the language, with
+ particular emphasis on the ability to incrementatlly construct
+ modular systems. At each step we exhibit an interactive
+ meta-circular interpreter for the language under
+ consideration. Each new interpreter is the result of an
+ incremental change to the previous interpreter.
+
+ We explore the consequences of various variable binding
+ disciplines and the introduction of side effects. We find that
+ dynamic scoping is unsuitable for constructing procedural
+ abstractions, but has another role as an agent of modularity,
+ being a structured form of side effect. More general side effects
+ are also found to be necessary to promote modular style. We find
+ that the notion of side effect and the notion of equality (object
+ identity) are mutually constraining; to define one is to define
+ the other.
+
+ The interpreters we exhibit are all written in a simple dialect of
+ LISP, and all implement LISP-like languages. A subset of these
+ interpreters constitute a partial historical reconstruction of the
+ actual evolution of LISP."
+}
+
+@article{Stou12,
+ author = "Stoutemyer, David R.",
+ title = {{Series Crimes}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "46",
+ number = "2",
+ year = "2012",
+ abstract =
+ "Puiseux series are power series in which the exponents can be
+ fractional and/or negative rational numbers. Several computer algebra
+ systems have one or more built-in or loadable functions for computing
+ truncated Puiseux series. Some are generalized to allow coefficients
+ containing functions of the series variable that are dominated by any
+ power of that variable, such as logarithms and nested logarithms of
+ the series variable. Some computer algebra systems also have built-in
+ or loadable functions that compute infinite Puiseuxseries.
+ Unfortunately, there are some little-known pitfalls in
+ computing Puiseux series. The most serious of these is expansions
+ within branch cuts or at branch points that are incorrect for some
+ directions in the complex plane. For example with each series
+ implementation accessible to you:
+
+ Compare the value of $(z^2 + z^3)^{3/2}$ with that of its
+ truncated series expansion about $z = 0$, approximated at
+ $z = −0.01$. Does the series converge to a value that is
+ the negative of the correct value?
+
+ Compare the value of $ln(z^2 + z^3)$ with its truncated series
+ expansion about $z = 0$, approximated at $z= −0.01 + 0.1i$. Does
+ the series converge to a value that is incorrect by $2\pi i$?
+
+ Compare $arctanh(−2 + ln(z)z)$ with its truncated series
+ expansion about $z = 0$, approximated at $z = −0.01$. Does the series
+ converge to a value that is incorrect by about $\pi i$?
+
+ At the time of this writing, most implementations that accommodate
+ such series exhibit such errors. This article describes how to avoid
+ these errors both for manual derivation of series and when
+ implementing series packages.",
+ paper = "Stou12.pdf"
+}
+
@article{Stuc05,
author = "Stuckey, Peter J. and Sulzmann, Martin",
title = {{A Theory of Overloading}},
@@ -12624,6 +13931,17 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Suxx68,
+ author = "Su, Jessica",
+ title = {{Solving Recurrences}},
+ link =
+ "\url{https://web.stanford.edu/class/archive/cs/cs161/cs161.1168/lecture3.pdf}",
+ year = "1968",
+ comment = "Stanford CS161 Lecture 3 class notes",
+ paper = "Suxx68.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Talp92,
author = "Talpin, Jean-Pierre and Jouvelot, Pierre",
title = {{The Type and Effect Discipline}},
@@ -12676,6 +13994,57 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Tars46,
+ author = "Tarski, Alfred",
+ title = {{Introduction to Logic}},
+ publisher = "Dover",
+ year = "1946",
+ isbn = "13-978-0-486-28462-0"
+}
+
+@misc{Tobi11,
+ author = "Tobin-Hochstadt, Sam and Felleisen, Matthias",
+ title = {{The Design and Implementation of Typed Scheme: From
+ Scripts to Programs}},
+ link = "\url{https://arxiv.org/pdf/1106.2575.pdf}",
+ year = "2011",
+ abstract =
+ "When scripts in untyped languages grow into large programs,
+ maintaining them becomes difficult. A lack of explicit type
+ annotations in typical scripting languages forces programmers to must
+ (re)discover critical pieces of design information every time they
+ wish to change a program. This analysis step both slows down the
+ maintenance process and may even introduce mistakes due to the
+ violation of undiscovered invariants.
+
+ This paper presents Typed Scheme, an explicitly typed extension of PLT
+ Scheme, an untyped scripting language. Its type system is based on
+ the novel notion of occurrence typing, which we formalize and
+ mechanically prove sound. The implementation of Typed Scheme
+ additionally borrows elements from a range of approaches, including
+ recursive types, true unions and subtyping, plus polymorphism combined
+ with a modicum of local inference.
+
+ The formulation of occurrence typing naturally leads to a simple and
+ expressive version of predicates to describe refinement types. A
+ Typed Scheme program can use these refinement types to keep track of
+ arbitrary classes of values via the type system. Further, we show how
+ the Typed Scheme type system, in conjunction with simple recursive
+ types, is able to encode refinements of existing datatypes, thus
+ expressing both proposed variations of refinement types.",
+ paper = "Tobi11.pdf",
+ keywords = "printed"
+}
+
+@misc{Toft09,
+ author = "Tofte, Mads",
+ title = {{Tips for Computer Scientists on Standard ML (Revised)}},
+ link = "\url{http://www.itu.dk/people/tofte/publ/tips.pdf}",
+ year = "2009",
+ paper = "Toft09.pdf",
+ keywords = "printed"
+}
+
@misc{Tuho18,
author = "Tuhola, Henri",
title = {{An another MLsub study}},
@@ -12706,6 +14075,59 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@phdthesis{Vuil73,
+ author = "Vuillemin, Jean",
+ title = {{Proof Techniques for Recursive Programs}},
+ school = "Stanford University",
+ year = "1973",
+ abstract =
+ "The concept of least fixed-point of a continuous function can be
+ considered as the unifying thread of this dissertation.
+
+ The connections between fixed-points and recursive programs are
+ detailed in chapter 2, providing some insights on practical
+ implementations of recursion. There are two usual characterizations
+ of the least fixed-point of a continuous function. To the first
+ characterization, due to Knaster and Tarski, corresponds a class
+ of proof techniques for programs, as described in Chapter 3. The
+ other characterization of least fixed points, better known as
+ Kleene's first recursion theorem, is discussed in Chapter 4. It
+ has the advantage of being effective and it leads to a wider class
+ of proof techniques.",
+ paper = "Vuil73.pdf"
+}
+
+@misc{Waal03,
+ author = "Waaldijk, Frank",
+ title = {{On the Foundations of Constructive Mathematics}},
+ link = "\url{}",
+ year = "2003",
+ abstract =
+ "We discuss the foundations of constructive mathematics, including
+ recursive mathematics and intuitionism, in relation to classical
+ mathematics. There are connections with the foundations of physics,
+ due to the way in which the different branches of mathematics reflect
+ reality. Many different axioms and their interrelationship are
+ discussed. We show that there is a fundamental problem in bish
+ (Bishop’s school of constructive mathematics) with regard to its
+ current definition of ‘continuous function’. This problem is closely
+ related to the definition in bish of ‘locally compact’.
+
+ Possible approaches to this problem are discussed. Topology seems to
+ be a key to understanding many issues. We offer several new
+ simplifying axioms, which can form bridges between the various
+ branches of constructive mathematics and classical mathematics
+ (‘reuniting the antipodes’). We give a simplification of basic
+ intuitionistic theory, especially with regard to so-called ‘bar
+ induction’. We then plead for a limited number of axiomatic systems,
+ which differentiate between the various branches of mathematics.
+ Finally, in the appendix we offer bish an elegant topological
+ definition of ‘locally compact’, which unlike the current definition
+ is equivalent to the usual classical and/or intuitionistic definition
+ in classical and intuitionistic mathematics respectively.",
+ paper = "Waal03.pdf"
+}
+
@article{Wadl95,
author = "Wadler, Philip",
title = {{Monads for Functional Programming}},
@@ -12858,6 +14280,41 @@ paper = "Brea89.pdf"
abstract = "Explains the Y-Combinator"
}
+@article{Wied03a,
+ author = "Wiedijk, Freek and Zwanenburg, J.",
+ title = {{First Order Logic with Domain Conditions}},
+ journal = "LNCS",
+ volume = "2758",
+ pages = "221-237",
+ year = "2003",
+ abstract =
+ "This paper addresses the crucial issue in the design of a proof
+ development system of how to deal with partial functions and the
+ related question of how to treat undefined terms. Often the problem is
+ avoided by artificially making all functions total. However, that does
+ not correspond to the practice of everyday mathematics.
+
+ In type theory partial functions are modeled by giving functions extra
+ arguments which are proof objects. In that case it will not be
+ possible to apply functions outside their domain. However, having
+ proofs as first class objects has the disadvantage that it will be
+ unfamiliar to most mathematicians. Also many proof tools (like the
+ theorem prover Otter) will not be usable for such a logic. Finally
+ expressions get difficult to read because of these proof objects.
+
+ The PVS system solves the problem of partial functions
+ differently. PVS generates type-correctness conditions (TCCs) for
+ statements in its language. These are proof obligations that have to
+ be satisfied ‘on the side’ to show that statements are well-formed.
+
+ We propose a TCC-like approach for the treatment of partial functions
+ in type theory. We add domain conditions (DCs) to classical
+ first-order logic and show the equivalence with a first order system
+ that treats partial functions in the style of type theory.",
+ paper = "Wied03a.pdf",
+ keywords = "printed"
+}
+
@misc{Wied18,
author = "Wiedijk, Freek",
title = {{Formaizing 100 Theorems}},
@@ -12873,6 +14330,112 @@ paper = "Brea89.pdf"
paper = "Wied08.pdf"
}
+@inproceedings{Wins12,
+ author = "Winston, Patrick Henry",
+ title = {{The Nex 50 Years: A Personal View}},
+ booktitle = "Biologially Inspired Cognitive Architectures 1",
+ year = "2012",
+ pages = "92-99",
+ publisher = "Elsevier B.V",
+ abstract =
+ "I review history, starting with Turing’s seminal paper, reaching
+ back ultimately to when our species started to outperform other
+ primates, searching for the questions that will help us develop a
+ computational account of human intelligence. I answer that the
+ right questions are: What’s different between us and the other
+ primates and what’s the same. I answer the {\sl what’s different}
+ question by saying that we became symbolic in a way that enabled
+ story understanding, directed perception, and easy communication,
+ and other species did not. I argue against Turing’s
+ reasoning-centered suggestions, offering that reasoning is just a
+ special case of story understanding. I answer the {\sl what’s the
+ same} question by noting that our brains are largely engineered in
+ the same exotic way, with information flowing in all directions at
+ once. By way of example, I illustrate how these answers can
+ influence a research program, describing the Genesis system, a
+ system that works with short summaries of stories, provided in
+ English, together with low-level {\sl common-sense rules} and
+ higher-level {\sl concept patterns}, likewise expressed in
+ English. Genesis answers questions, notes abstract concepts such
+ as {\sl revenge}, tells stories in a listener-aware way, and fills
+ in story gaps using precedents. I conclude by suggesting,
+ optimistically, that a genuine computational theory of human
+ intelligence will emerge in the next 50 years if we stick to the
+ right, biologically inspired questions, and work toward
+ biologically informed models.",
+ paper = "Wins12.pdf"
+}
+
+@inproceedings{Xixx99,
+ author = "Xi, Hongwei and Pfenning, Frank",
+ title = {{Dependent Types in Practical Programming}},
+ booktitle = "SIGPLAN-SIGACT Symp. on Principles of Programming
+ Languages",
+ year = "1990",
+ link = "\url{https://www.cs.cmu.edu/~fp/papers/popl99.pdf}",
+ publisher = "ACM",
+ abstract =
+ "We present an approach to enriching the type system of ML with a
+ restricted form of dependent types, where type index objects are drawn
+ from a constraint domain C , leading to the DML( C ) language schema.
+ This allows specification and inference of significantly more precise
+ type information, facilitating program error detection and compiler
+ optimization. A major complication resulting from introducing
+ dependent types is that pure type inference for the enriched system is
+ no longer possible, but we show that type-checking a suciently
+ annotated program in DML( C ) can be reduced to constraint
+ satisfaction in the constraint domain C . We exhibit the
+ un-obtrusiveness of our approach through practical examples and prove
+ that DML( C ) is conservative over ML. The main contribution of the
+ paper lies in our language design, including the formulation of
+ type-checking rules which makes the approach practical. To our
+ knowledge, no previous type system for a general purpose programming
+ language such as ML has combined dependent types with features including
+ datatype declarations, higher-order functions, general recursions,
+ let-polymorphism, mutable references, and exceptions. In addition,
+ we have finished a prototype implementation of DML( C ) for an integer
+ constraint domain C , where constraints are linear inequalities (Xi
+ and Pfenning 1998).",
+ paper = "Xixx99.pdf",
+ keywords = "printed"
+}
+
+@phdthesis{Xuex13,
+ author = "Xue, Tao",
+ title = {{Theory and Implementation of Coercive Subtyping}},
+ school = "University of London",
+ year = "2013",
+ abstract =
+ "Coercive subtyping is a useful and powerful framework of subtyping
+ for type theories. In this thesis, we point out the problem in the
+ old formulation of coercive subtyping in [Luo99], give a new and
+ adequate formulation $T[C]$, the system that extends a type theory $T$
+ with coercive subtyping based on a set $C$ of basic subtyping
+ judgements, and show that coercive subtyping is a conservative
+ extension and, in a more general sense, a definitional extension.
+
+ We introduce an intermediate system, the star-calculus $T[C]^∗$ , in
+ which the positions that require coercion insertions are marked, and
+ show that $T[C]^∗$ is a conservative extension of $T$ and that $T[C]^*$
+ is equivalent to $T[C]$. Further more, in order to capture all the
+ properties of the coercive subtyping framework, we introduce another
+ intermediate system $T[C]_{0K}$ which does not contain the coercion
+ application rules. We show that $T[C]^∗$ is actually a definitional
+ extension of $T[C]_{0K}$, which is a conservative extension of $T$.
+ This makes clear what we mean by coercive subtyping being a
+ conservative extension and amends a technical problem that has led to
+ a gap in the earlier conservativity proof.
+
+ Another part of the work in this thesis concerns the implementation of
+ coercive subtyping in the proof assistant Plastic. Coercive subtyping
+ was implemented in Plastic by Paul Callaghan [CL01]. We have done
+ some improvement based on that work, fixed some problems of Plastic,
+ and implemented a new kind of data type called {\sl dot-types}, which are
+ special data types useful in formal semantics to describe interesting
+ linguistic phenomena such as copredication, in Plastic.",
+ paper = "Xuex13.pdf"
+}
+
@phdthesis{Zeil09,
author = "Zeilberger, Noam",
title = {{The Logical Basis of Evaluation Order and Pattern-Matching}},
@@ -13409,6 +14972,26 @@ paper = "Brea89.pdf"
paper = "Avig17.pdf"
}
+@misc{Avig17c,
+ author = "Avigad, Jeremy and Holzl, Johannes and Serafin, Luke",
+ title = {{A Formally Verified Proof of the Central Limit Theorem}},
+ link = "\url{}",
+ year = "2017",
+ abstract =
+ "We describe a proof of the Central Limit Theorem that has been
+ formally verified in the Isabelle proof assistant. Our formalization
+ builds upon and extends Isabelle’s libraries for analysis and
+ measure-theoretic probability. The proof of the theorem uses
+ characteristic functions , which are a kind of Fourier transform, to
+ demonstrate that, under suitable hypotheses, sums of random
+ variables converge weakly to the st andard normal distribution. We
+ also discuss the libraries and infrastructure that supported the
+ formalization, and reflect on some of the lessons we have learned
+ from the effort.",
+ paper = "Avig17c.pdf",
+ keywords = "printed"
+}
+
@techreport{Back73,
author = "Backus, John",
title = {{Programming Language Semantics and Closed Applicative Language}},
@@ -18103,7 +19686,7 @@ paper = "Brea89.pdf"
author = "Paulson, Lawrence C.",
title = {{ISABELLE: A Generic Theorem Prover}},
year = "1994",
- publisher = "Springer",
+ publisher = "Springer-Verlag",
isbn = "978-3-540-58244-1",
}
@@ -30048,7 +31631,7 @@ paper = "Brea89.pdf"
with this in the Axiom computer algebra system. As a result of this
experience, we suggest certain strengthenings of the algorithm.",
paper = "Dave92.pdf",
- keywords = "axiomref",
+ keywords = "axiomref, printed",
beebe = "Davenport:1993:PTR"
}
@@ -30110,6 +31693,16 @@ paper = "Brea89.pdf"
keywords = "axiomref"
}
+@article{Dave12b,
+ author = "Davenport, James H.",
+ title = {{Small Algorithms for Small Systems}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "46",
+ number = "1",
+ year = "2012",
+ paper = "Dave12b.pdf"
+}
+
@misc{Dave15,
author = "Davenport, James H.",
title = {{SIAM AAG 15 and ICIAM 2015}},
@@ -30815,7 +32408,9 @@ paper = "Brea89.pdf"
This paper considers the link to the NAG Fortran Library from version
2.0 of Axiom and shows how we can build on this to extend and simplify
the interface using an expert system for choosing and using the
- numerical routines."
+ numerical routines.",
+ paper = "Dupe05.pdf",
+ ksyaodxz = "axiomref"
}
@article{Duva92,
@@ -38679,7 +40274,8 @@ paper = "Brea89.pdf"
author = "Zippel, Richard",
title = {{The Weyl Computer Algebra Substrate}},
year = "1989",
- link = "\url{https://ecommons.cornell.edu/bitstream/handle/1813/6917/90-1077.pdf}",
+ link =
+ "\url{https://ecommons.cornell.edu/bitstream/handle/1813/6917/90-1077.pdf}",
paper = "Zipp89.pdf",
keywords = "axiomref, printed"
}
@@ -38726,8 +40322,8 @@ paper = "Brea89.pdf"
computer algebra systems (polynomial, rational functions, matrices,
etc.), domains (e.g., Z, Q[x, y, z]) are also first class objects in
Weyl.",
- paper = "Zipp93.pdf, printed",
- keywords = "axiomref"
+ paper = "Zipp93.pdf",
+ keywords = "axiomref, printed"
}
@article{Abbo95,
@@ -38790,6 +40386,13 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Alfo92,
+ author = "Alford, W.R. and Granville, A. and Pomerance, C.",
+ title = {{There are Infinitely Many Carmichael Numbers}},
+ year = "1992",
+ comment = "Preprint"
+}
+
@book{Altm05,
author = "Altmann, Simon L.",
title = {{Rotations, Quaternions, and Double Groups}},
@@ -38824,6 +40427,15 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Arna91,
+ author = "Arnault, F.",
+ title = {{Le Test de Primalite de Rabin-Miller: Un Nombre compose
+ qui le "passe"}},
+ comment = "Report 61",
+ university = "Universite de Poitiers Departement de Mathematiques",
+ year = "1991"
+}
+
@article{Aste02,
author = "Astesiano, Egidio and Bidoit, Michel and Kirchner, Helene and
Krieg-Bruckner, Bernd and Mosses, Peter D. and Sannella, Donald
@@ -39292,6 +40904,16 @@ paper = "Brea89.pdf"
paper = "Bitt94.pdf"
}
+@misc{Blai71,
+ author = "Blair, Fred W. and Griesmer, James H. and Harry, Joseph and
+ Pivovonsky, Mark",
+ title = {{Design and Development Document for Lisp on Several S/360
+ Operating Systems}},
+ year = "1971",
+ publisher = "IBM Research",
+ paper = "Blai71.pdf"
+}
+
@book{Boge77,
author = "Bogen, Richard",
title = {{MACSYMA Reference Manual, Version 9}},
@@ -40078,6 +41700,13 @@ paper = "Brea89.pdf"
paper = "Daly88.pdf"
}
+@misc{Damg91,
+ author = "Damgard, I. and Landrock, P.",
+ title = {{Improved Bounds for the Rabin Primality Test}},
+ booktitle = "Proc. 3rd IMA Conf. on Coding and Cryptography",
+ year = "1991"
+}
+
@misc{Dave80b,
author = "Davenport, J.H. and Jenks, R.D.",
title = {{SCRATCHPAD/370: Modes and Domains}},
@@ -40108,6 +41737,15 @@ paper = "Brea89.pdf"
paper = "Dave81b.pdf"
}
+@misc{Dave87,
+ author = "Davenport, James H. and Smith, G.C.",
+ title = {{Rabin's Primality Testing Algorithm -- a Group Theory View}},
+ school = "University of Bath",
+ type = "technical report",
+ number = "87-04",
+ year = "1987"
+}
+
@techreport{Deme79,
author = "Demers, A. and Donahue, J.",
title = {{Revised Report on RUSSELL}},
@@ -40838,7 +42476,7 @@ paper = "Brea89.pdf"
computation, distributed problem-solving environments, and the
coupling of tools for specific applications.",
paper = "Gray98.pdf",
- keywords = "printed"
+ keywords = "printed, axiomref"
}
@artcile{Grie11,
@@ -41116,6 +42754,15 @@ paper = "Brea89.pdf"
paper = "Hebi10.pdf"
}
+@misc{Here96,
+ author = "Hereman, Willy",
+ title = {{The Incredible World of Symbolic Mathematics.
+ A Review of Computer Algebra Systems}},
+ year = "1996",
+ link = "\url{https://inside.mines.edu/~whereman/papers/Hereman-PhysicsWorld-9-March1996.pdf}",
+ paper = "Here96.pdf"
+}
+
@article{Homa94,
author = "Homann, Karsten and Calmet, Jacques",
title = {{Combining Theorem Proving and Symbolic Mathematical Computing}},
@@ -41319,6 +42966,13 @@ paper = "Brea89.pdf"
\newline\refto{category MONADWU MonadWithUnit}"
}
+@misc{Jaes91,
+ author = "Jaeschke, G.",
+ title = {{Private Communication}},
+ comment = "to James Davenport",
+ year = "1991"
+}
+
@techreport{Jenk86c,
author = "Jenks, Richard D.",
title = {{A History of the SCRATCHPAD Project (1977-1986)}},
@@ -41634,6 +43288,13 @@ paper = "Brea89.pdf"
isbn = "0-937073-81-4"
}
+@book{Kobl87,
+ author = "Koblitz, N.",
+ title = {{A Cource in Number Theory and Cryptography}},
+ publisher = "Springer-Verlog",
+ year = "1987"
+}
+
@article{Kohl16,
author = "Kohlhase, Michael and Rabe, Florian",
title = {{QED Reloaded: Towards a Pluralistic Formal Library of
@@ -41883,6 +43544,13 @@ paper = "Brea89.pdf"
pages = "231--253"
}
+@misc{Leec92,
+ author = "Leech, J.",
+ title = {{Private Communication}},
+ comment = "to James Davenport",
+ year = "1992"
+}
+
@book{Lege11,
author = "Legendre, George L. and Grazini, Stefano",
title = {{Pasta by Design}},
@@ -41891,6 +43559,16 @@ paper = "Brea89.pdf"
year = "2011"
}
+@article{Lens81,
+ author = "Lenstra Jr., H.W.",
+ title = {{Primality Testing Algorithms}},
+ journal = "Lecture Notes in Mathematics",
+ volume = "901",
+ publisher = "Springer-Verlag",
+ pages = "243-257",
+ year = "1981"
+}
+
@article{Lesc87,
author = "Lescanne, Pierre",
title = {{Current Trends in Rewriting Techniques and Related Problems}},
@@ -42816,6 +44494,15 @@ paper = "Brea89.pdf"
publisher = "Cambridge University Press"
}
+@article{Pome80,
+ author = "Pomerance, C. and Slefridge, J.L. and Wagstaff Jr., S.S.",
+ title = {{The Pseudoprimes up to $25\cdot 10^9$}},
+ journal = "Math. Comp.",
+ volume = "35",
+ pages = "1003-1026",
+ year = "1980"
+}
+
@inproceedings{Popo09,
author = "Popov, Nikolaj and Jebelean, Tudor",
title = {{Functional Program Verification in Theorema Soundness and
@@ -43008,6 +44695,15 @@ paper = "Brea89.pdf"
paper = "Rabe17.pdf, printed"
}
+@article{Rabi80,
+ author = "Rabin, M.O.",
+ title = {{Probabilistic Algorithm for Testing Primality}},
+ journal = "J. Number Theory",
+ volume = "12",
+ pages = "128-138",
+ year = "1980"
+}
+
@phdthesis{Rave91,
author = "Ravenscroft, Robert Andrews Jr.",
title = {{Generating Function Algorithms for Symbolic Computation}},
diff --git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index b60cc35..894da03 100644
--- a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ -8965,7 +8965,7 @@ when shown in factored form.
\begin{chunk}{axiom.bib}
@book{Wilk63,
author = "Wilkinson, J. H.",
- title = {{Rounding Erroors in Algebraic Processes}},
+ title = {{Rounding Errors in Algebraic Processes}},
publisher = "HMSO",
series = "Notes on Applied Science, No. 32",
year = "1963"
@@ -10104,6 +10104,19 @@ when shown in factored form.
\end{chunk}
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Avig18,
+ author = "Avigad, Jeremy",
+ title = {{Mathematical Language from a Design Perspective}},
+ year = "2018",
+ link = "\url{http://www.andrew.cmu.edu/user/avigad/Talks/san_diego.pdf}",
+ paper = "Avig18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Bates, Joseph L.}
\index{Constable, Robert L.}
\begin{chunk}{axiom.bib}
@@ -11098,6 +11111,148 @@ when shown in factored form.
\end{chunk}
\index{Kovacs, Laura}
+\index{Jebelean, Tudor}
+\begin{chunk}{axiom.bib}
+@article{Kova04,
+ author = "Kovacs, Laura and Jebelean, Tudor",
+ title = {{Practical Aspects of Imperative Program Verification in
+ Theorema}},
+ journal = "Analele Universitatu din Timisoara",
+ volume = "XXXXIX",
+ number = "2",
+ year = "2004",
+ abstract =
+ "Approaching the problem of imperative program verification from a
+ practical point of view has certain implications concerning: the style
+ of specifications, the programming language which is used, the help
+ provided to the user for finding appropriate loop invariants, the
+ theoretical frame used for formal verification, the language used
+ for expressing generated verification theorems as well as the database
+ of necessary mathematical knowledge, and finally the proving power,
+ style and language. The Theorema system (www. theorema.org) has
+ certain capabilities which make it appropriate for such a practical
+ approach. Our approach to imperative program verification in Theorema
+ is based on Hoare– Logic and the Weakest Precondition Strategy. In
+ this paper we present some practical aspect of our work, as well as a
+ novel method for verification of pro- grams that contain loops, namely
+ a method based on recurrence equation solvers for generating the
+ necessary loop invariants and termination terms automatically. We
+ have tested our system with numerous examples, some of these example
+ are presented at the end of the paper.",
+ paper = "Kova04.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\index{Jebelean, Tudor}
+\begin{chunk}{axiom.bib}
+@article{Kova04a,
+ author = "Kovacs, Laura and Jebelean, Tudor",
+ title = {{Automated Generation of Loop Invariants by Recurrence
+ Solving in Theorema}},
+ year = "2004",
+ abstract =
+ "Most of the properties established during program verification are
+ either invariants or depend crucially on invariants. The effectiveness
+ of automated verification of (imperative) programs is therefore
+ sensitive to the ease with which invariants, even trivial ones, can be
+ automatically deduced. We present a method for invariant generation
+ that relies on combinatorial techniques, namely on recurrence solving
+ and variable elimination. The effectiveness of the method is
+ demonstrated on examples.",
+ paper = "Kova04a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\begin{chunk}{axiom.bib}
+@phdthesis{Kova07,
+ author = "Kovacs, Laura",
+ title = {{Automated Invariant Generation by Algebraic Techniques for
+ Imperative Program Verification in Theorema}},
+ school = "Johannes-Kepler University Linz",
+ year = "2007",
+ abstract =
+
+ "This thesis presents algebraic and combinatorial approaches for
+ reasoning about imperative loops with assignments, sequencing and
+ conditionals.
+
+ A certain family of loops, called P-solvable , is defined for which
+ the value of each program variable can be expressed as a polynomial of
+ the initial values of variables, the loop counter, and some new
+ variables where there are algebraic dependencies among the new
+ variables. For such loops, a systematic method is developed for
+ generating polynomial invariants. Further, if the bodies of these
+ loops consist only of assignments and conditional branches, and test
+ conditions in the loop and conditionals are ignored, the method is
+ shown to be complete for some special cases. By completeness we mean
+ that it generates a set of polynomials from which, under additional
+ assumptions for loops with conditional branches, any polynomial
+ invariant can be derived. Many non-trivial algorithms working on
+ numbers can be naturally implemented using P-solvable loops.
+
+ By combining advanced techniques from algorithmic combinatorics,
+ symbolic summation, computer algebra and computational logic, a
+ framework is developed for generating polynomial invariants for
+ imperative programs operating on numbers.
+
+ Exploiting the symbolic manipulation capabilities of the computer
+ algebra system Mathematica , these techniques are implemented in a new
+ software package called Aligator . By using several combinatorial
+ packages developed at RISC, Aligator includes algorithms for solving
+ special classes of recurrence relations (those that are either
+ Gosper-summable or C-finite) and generating polynomial dependencies
+ among algebraic exponential sequences. Using Aligator , a complete set
+ of polynomial invariants is successfully generated for numerous
+ imperative programs working on numbers.
+
+ The automatically obtained invariant assertions are subsequently used
+ for proving the partial correctness of programs by generating
+ appropriate verification conditions as first-order logical formulas.
+ Based on Hoare logic and the weakest precondition strategy, this
+ verification process is supported in an imperative verification
+ environment implemented in the Theorema system. Theorema is
+ convenient for such an integration given that it is built on top of
+ the computer algebra system Mathematica and includes automated methods
+ for theorem proving in predicate logic, domain specific reasoning and
+ proving by induction.",
+ paper = "Kova07.pdf"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kova08,
+ author = "Kovacs, Laura",
+ title = {{Reasoning Algebraiclly About P-Solvable Loops}},
+ booktitle = "Int. Conf. on Tools and Algorithms for the Construction
+ and Analysis of Systems",
+ pages = "249-264",
+ year = "2008",
+ abstract =
+ "We present a method for generating polynomial invariants for a
+ subfamily of imperative loops operating on numbers, called the
+ P-solvable loops. The method uses algorithmic combinatorics and
+ algebraic techniques. The approach is shown to be complete for some
+ special cases. By completeness we mean that it generates a set of
+ polynomial invariants from which, under additional assumptions, any
+ polynomial invariant can be derived. These techniques are implemented
+ in a new software package Aligator written in Mathematica and
+ successfully tried on many programs implementing interesting
+ algorithms working on numbers.",
+ paper = "Kova08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
\begin{chunk}{axiom.bib}
@article{Kova16,
author = "Kovacs, Laura",
@@ -11318,6 +11473,8 @@ when shown in factored form.
@article{Luox13,
author = "Luo, Zhaohui and Soloviev, Sergei and Xue, Tao",
title = {{Coercive Subtyping: Theory and Implementation}},
+ journal = "Information and Computation",
+ year = "2013",
volume = "223",
pages = "18-42",
link =
@@ -12364,6 +12521,20 @@ when shown in factored form.
\end{chunk}
+\index{Abramsky, S.}
+\index{Gabbay, Dov M.}
+\index{Maibaum, T.S.E}
+\begin{chunk}{axiom.bib}
+@book{Abra92,
+ author = "Abramsky, S. and Gabbay, Dov M. and Maibaum, T.S.E",
+ title = {{Handbook of Logic in Computer Science (Volume 2)}},
+ publisher = "Oxford Science Publications",
+ year = "1992",
+ isbn = "0-19-853761-1"
+}
+
+\end{chunk}
+
\index{Akbarpour, Behzad}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@@ -12387,6 +12558,58 @@ when shown in factored form.
\end{chunk}
+\index{Altenkirch, Thorsten}
+\begin{chunk}{axiom.bib}
+@misc{Alte04,
+ author = "Altenkirch, Thorsten",
+ title = {{$\lambda$-calculus and types}},
+ comment = "Lecture Notes; APPSEM Spring School 2004",
+ link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/mgs04.pdf}",
+ year = "2004",
+ paper = "Alte04.pdf",
+ keywords = "printed",
+}
+
+\end{chunk}
+
+\index{Altenkirch, Thorsten}
+\index{McBride, Conor}
+\index{McKinna, James}
+\begin{chunk}{axiom.bib}
+@misc{Alte05,
+ author = "Altenkirch, Thorsten and McBride, Conor",
+ title = {{Why Dependent Types Matter}},
+ link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/ydtm.pdf}",
+ year = "2005",
+ abstract =
+ "We exhibit the rationale behind the design of Epigram, a dependently
+ typed programming language and interactive program development system,
+ using refinements of a well known program -- merge sort -- as a running
+ example. We discuss its relationship with other proposals to introduce
+ aspects of dependent types into functional programming languages and
+ sketch some topics for further work in this area.",
+ paper = "Alte05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Anderson, E.R.}
+\index{Belz, F.C.}
+\index{Blum, E.K.}
+\begin{chunk}{axiom.bib}
+@article{Ande78,
+ author = "Anderson, E.R. and Belz, F.C. and Blum, E.K.",
+ title = {{Extending an Implementation Language to a Specification Language}},
+ journal = "LNCS",
+ volume = "75",
+ year = "1978",
+ paper = "Ande78.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\begin{chunk}{axiom.bib}
@misc{Anon18,
author = "Anonymous",
@@ -12471,6 +12694,69 @@ when shown in factored form.
\index{Appel, Andrew W.}
\begin{chunk}{axiom.bib}
+@article{Appe89,
+ author = "Appel, Andrew W.",
+ title = {{Run-time Tags Aren't Necessary}},
+ journal = "Lisp and Symbolic Computation",
+ volume = "2",
+ pages = "153-162",
+ year = "1989",
+ publisher = "Kluwer",
+ abstract =
+ "Many modern programming environments use tag bits at runtime to
+ distinguish objects of different types. This is particularly
+ common in systems with garbage collection, since the garbage
+ collector must be able to distinguish pointers from non-pointers,
+ and to learn the length of records pointed to.
+
+ The use of tag bits leads to inefficiency. In addition to the
+ obvious space overhead (tag bits and record descriptors occupy
+ memory space), there is a time overhead: tag bits must be stripped
+ off of data before arithmetic operations are performed, and
+ re-attached to the data when it is stored into memory. This takes
+ either extra instructions at runtime, or special tag-handling
+ hardware, or both.
+
+ This paper shows how the use of tag bits, record descriptor words,
+ explicit type parameters, and the like can be avoided in languages
+ (like ML) with static polymorphic typechecking. Through a form of
+ tag will still be required for user-defined variant records, all
+ other type information can be encoded once -- in the program --
+ rather than replicated many times in the data. This can lead to
+ savings in both space and time.",
+ paper = "Appe89.pdf"
+}
+
+\end{chunk}
+
+\index{Appel, Andrew W.}
+\index{MacQueen, David B.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Appe91,
+ author = "Appel, Andrew W. and MacQueen, David B.",
+ title = {{Standard ML of New Jersey}},
+ booktitle = "Int. Symp. on Prog. Lang. Implementations and Logic
+ Programming",
+ publisher = "Springer-Verlag",
+ pages = "1-13",
+ year = "1991",
+ abstract =
+ "The Standard ML of New Jersey compiler has been under development for
+ five years now. We have developed a robust and complete environment
+ for Standard ML that supports the implementation of large software
+ systems and generates efficient code. The compiler has also served as
+ a laboratory for developing novel implementation techniques for a
+ sophisticated type and module system, continuation based code
+ generation, efficient pattern matching, and concurrent programming
+ features.",
+ paper = "Appe91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Appel, Andrew W.}
+\begin{chunk}{axiom.bib}
@book{Appe92,
author = "Appel, Andrew W.",
title = {{Compiling with Continuations}},
@@ -12483,6 +12769,29 @@ when shown in factored form.
\index{Appel, Andrew W.}
\begin{chunk}{axiom.bib}
+@article{Appe93,
+ author = "Appel, Andrew W.",
+ title = {{A Critique of Standard ML}},
+ journal = "J. Functional Programming",
+ volume = "3",
+ number = "4",
+ pages = "391-429",
+ year = "1993",
+ abstract =
+ "Standard ML is an excellent language for many kinds of
+ programming. It is safe, efficient, suitably abstract, and
+ concise. There are many aspects of the language that work
+ well. However, nothing is perfect: Standard ML has a few
+ shortcomings. In some cases there are obvious solutions, and in
+ other cases further research is required.",
+ paper = "Appe93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Appel, Andrew W.}
+\begin{chunk}{axiom.bib}
@book{Appe98,
author = "Appel, Andrew W.",
title = {{Modern Compiler Implementation in ML}},
@@ -12493,6 +12802,38 @@ when shown in factored form.
\end{chunk}
+\index{Aspinall, David}
+\index{Hofmann, Martin}
+\begin{chunk}{axiom.bib}
+@incollection{Aspi05,
+ author = "Aspinall, David and Hofmann, Martin",
+ title = {{Dependent Types}},
+ booktitle = "Advanced Topics in Types and Programming Languages",
+ publisher = "MIT Press",
+ isbn = "0-262-16228-8",
+ pages = "45-46",
+ paper = "Aspi05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@article{Avig18a,
+ author = "Avigad, Jeremy",
+ title = {{The Mechanization of Mathematics}},
+ journal = "Notices of the AMS",
+ volume = "65",
+ number = "6",
+ year = "2018",
+ pages = "681-690",
+ paper = "Avig18a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Back, R.J.R}
@@ -12630,6 +12971,20 @@ when shown in factored form.
\end{chunk}
+\index{Barendregt, Henk}
+\index{Dekkers, Wil}
+\index{Statman, Richard}
+\begin{chunk}{axiom.bib}
+@book{Bare13,
+ author = "Barendregt, Henk and Dekkers, Wil and Statman, Richard",
+ title = {{Lambda Calculus with Types}},
+ publisher = "Cambridge University Press",
+ year = "2013",
+ isbn = "978-0-521-76614-2"
+}
+
+\end{chunk}
+
\index{Barras, Bruno}
\begin{chunk}{axiom.bib}
@techreport{Barr96a,
@@ -12683,6 +13038,20 @@ when shown in factored form.
\end{chunk}
+\index{Basu, Saugata}
+\index{Pollack, Richard}
+\index{Roy, Marie-Francoise}
+\begin{chunk}{axiom.bib}
+@book{Basu10,
+ author = "Basu, Saugata and Pollack, Richard and Roy, Marie-Francoise",
+ title = {{Algorithms in Real Algebraic Geometry}},
+ publisher = "Springer-Verlag",
+ year = "2010",
+ isbn = "978-3-642-06964-2"
+}
+
+\end{chunk}
+
\index{Benton, Nick}
\index{Zarfaty, Uri}
\begin{chunk}{axiom.bib}
@@ -12707,6 +13076,38 @@ when shown in factored form.
\end{chunk}
+\index{Beeson, Michael J.}
+\begin{chunk}{axiom.bib}
+@book{Bees80,
+ author = "Beeson, Michael J.",
+ title = {{Foundations of Constructive Mathematics}},
+ publisher = "Springer-Verlag",
+ year = "1980",
+ isbn = "3-540-12173-0"
+}
+
+\end{chunk}
+
+\index{Benini, Marco}
+\begin{chunk}{axiom.bib}
+@misc{Beni95,
+ author = "Benini, Marco",
+ title = {{Barendregt's $\lambda$-Cube in Isabelle}},
+ link = "\url{}",
+ year = "1995",
+ abstract =
+ "We present an implementation of Barendregt’s $\lambda$-Cube in the
+ Isabelle proof system. Isabelle provides many facilities for
+ developing a useful specification and proving environment from the
+ basic formulation of formal systems. We used those facilities to
+ provide an environment where the user can describe problems and derive
+ proofs interactively.",
+ paper = "Beni95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Bonnaire-Sergeant, Ambrose}
\begin{chunk}{axiom.bib}
@misc{Bonn18,
@@ -12739,7 +13140,66 @@ when shown in factored form.
is described at length. The theorems proved by the program include
that REVERSE is its own inverse and that a particular SORT program is
correct. A list of theorems proved by the program is given.",
- paper = "Boye75.pdf"
+ paper = "Boye75.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Boyer, Robert S.}
+\index{Moore, J Strother}
+\begin{chunk}{axiom.bib}
+@article{Boye84,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{Proof-Checking, Theorem-Proving, and Program Verification}},
+ journal = "Contemporary Mathematics",
+ volume = "29",
+ year = "1984",
+ abstract =
+ "This article consists or three parts: a tutorial introduction to a
+ computer program that proves theorems by induction; a brief
+ description or recent applications or that theorem-prover; and a
+ discussion or several nontechnical aspects of the problem or building
+ automatic theorem-provers. The theorem-prover described has proved
+ theorems such as the uniqueness of prime factorizations, Fermat's
+ theorem, and the recursive unsolvability or the halting problem.
+
+ The article is addressed to those who know nothing about automatic
+ theorem-proving but would like a glimpse or one such system. This
+ article definitely does not provide a balanced view or all automatic
+ theorem-proving, the literature of which is already rather large and
+ technica1. Nor do we describe the details or our theorem-proving
+ system, but they can be round in the books, articles, and technical
+ reports that we reference.
+
+ In our opinion, progress in automatic theorem-proving is largely a
+ function or the mathematical ability or those attempting to build such
+ systems. We encourage good mathematicians to work in the field.",
+ paper = "Boye84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Boyer, Robert S.}
+\index{Moore, J Strother}
+\begin{chunk}{axiom.bib}
+@article{Boye84a,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{A Mechanical Proof of the Turing Completeness of Pure Lisp}},
+ journal = "Contemporary Mathematics",
+ volume = "29",
+ year = "1984",
+ abstract =
+ "We describe a proof by a computer prosram of theTuring completeness
+ of a computational paradigm akin to Pure LISP. That is, we defme
+ formally the notions of a Turing machine and of a version of Pure
+ LISP and prove that anythin& that can be computed by a Turing machine
+ can be computed by LISP. While this result is straightforward, we
+ believe this is the rarst instance of a machine proving the Turing
+ completeness of another computational paradigm.",
+ paper = "Boye84a.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ -12760,6 +13220,49 @@ when shown in factored form.
\end{chunk}
+\index{Brooks, Rodney A.}
+\begin{chunk}{axiom.bib}
+@article{Broo87a,
+ author = "Brooks, Rodney A.",
+ title = {{Intelligence Without Representation}},
+ journal = "Artificial Intelligence",
+ volume = "47",
+ year = "1991",
+ pages = "139-159",
+ abstract =
+ "Artificial intelligence research has foundered on the issue of
+ representation. When intelligence is approached in an incremental
+ manner, with strict reliance on interfacing to the real world through
+ perception and action, reliance on representation disappears. In this
+ paper we outline our approach to incrementally building complete
+ intelligent Creatures. The fundamental decomposition of the
+ intelligent system is not into independent information processing
+ units which must interface with each other via representations.
+ Instead, the intelligent system is decomposed into independent and
+ parallel activity producers which all interface directly to the world
+ through perception and action, rather than interface to each other
+ particularly much. The notions of central and peripheral systems
+ evaporateeverything is both central and peripheral. Based on these
+ principles we have built a very successful series of mobile robots
+ which operate without supervision as Creatures in standard office
+ environments.",
+ paper = "Broo87a.pdf"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@misc{Byte79,
+ author = "Byte Publications",
+ title = {{LISP}},
+ link = "\url{https://ia802603.us.archive.org/30/items/byte-magazine-1979-08/1979_08_BYTE_04-08_LISP.pdf}",
+ publisher = "McGraw-Hill",
+ year = "1979",
+ paper = "Byte79.pdf"
+}
+
+\end{chunk}
+
\subsection{C} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Caldwell, James L.}
@@ -12920,7 +13423,62 @@ when shown in factored form.
and techniques for proving facts about specific programs. We also give
a simpler version of the GiSdeI-Kleene way of representing computable
functions by first order sentences.",
- paper = "Cart78.pdf"
+ paper = "Cart78.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cartwright, Robert}
+\begin{chunk}{axiom.bib}
+@article{Cart84,
+ author = "Cartwright, Robert",
+ title = {{Recursive Programs as Definitions in First-Order Logic}},
+ journal = "SIAM J. Computing",
+ volume = "13",
+ number = "2",
+ pages = "374-408",
+ year = "1984",
+ abstract =
+ "Despite the reputed limitations of first order logic, it is easy to
+ state and prove almost all interesting properties of recursive
+ programs within a simple first order theory, by using an approach we
+ call 'first order programming logic'. Unlike higher order logics based
+ on fixed-point induction, first order programming logic is founded on
+ deductive principles that are familiar to most programmers. Informal
+ structural induction arguments (such as termination proofs for LISP
+ append, McCarthy’s 91-function, and Ackermann’s function) have direct
+ formalizations within the system.
+
+ The essential elements of first order programming logic are:
+ \begin{enumerate}
+ \item The data domain $D$ must be a finitely generated set that
+ explicitly includes the 'undefined' object $\bot$
+ (representing nontermination) as well as ordinary data objects.
+ \item Recursive programs over $D$ are treated as logical definitions
+ augmenting a first order theory of the data domain.
+ \item The interpretation of a recursive program is the least
+ fixed-point of the functional corresponding to the program.
+ \end{enumerate}
+
+ Since the data domain $D$ is a finitely generated set, the first order
+ axiomatization of $D$ includes a structural induction axiom scheme. This
+ axiom scheme serves as the fundamental 'proof rule' of first order
+ programming logic.
+
+ The major limitation of first order programming logic is that every
+ fixed-point of the functional corresponding to a recursive program is
+ an acceptable interpretation for the program. The logic fails to
+ capture the notion of least fixed-point. To overcome this limitation,
+ we present a simple, effective procedure for transforming an arbitrary
+ recursive program into an equivalent recursive program that has a
+ unique fixed-point, yet retains the logical structure of the
+ original. Given this transformation technique, it is our experience
+ that first order programming logic is sufficiently powerful to prove
+ almost any property of practical interest about the functions computed
+ by recursive programs.",
+ paper = "Cart84.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ -12939,6 +13497,32 @@ when shown in factored form.
\end{chunk}
+\index{Chen, William Y.C.}
+\index{Paule, Peter}
+\index{Saad, Husam L.}
+\begin{chunk}{axiom.bib}
+@article{Chen08,
+ author = "Chen, William Y.C. and Paule, Peter and Saad, Husam L.",
+ title = {{Converging to Gosper's Algorithm}},
+ journal = "Advances in Applied Mathematics",
+ volume = "41",
+ number = "3",
+ pages = "351-364",
+ year = "2008",
+ abstract =
+ "Given two polynomials, we find a convergence property of the GCD of
+ the rising factorial and the falling factorial. Based on this
+ property, we present a unified approach to computing the universal
+ denominators as given by Gosper's algorithm and Abramov's algorithm
+ for finding rational solutions to linear difference equations with
+ polynomial coefficients. Our approach easily extends to the
+ q-analogues.",
+ paper = "Chen08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Chlipala, Adam J.}
\begin{chunk}{axiom.bib}
@article{Chli07,
@@ -12989,6 +13573,53 @@ when shown in factored form.
\end{chunk}
+\index{Cohn, Avra Jean}
+\begin{chunk}{axiom.bib}
+@phdthesis{Cohn79,
+ author = "Cohn, Avra Jean",
+ title = {{Machine Assisted Proofs of Recursion Implementation}},
+ school = "University of Edinburgh",
+ year = "1979",
+ link = "\url{https://www.era.lib.ed.ac.uk/handle/1842/6643}",
+ abstract =
+ "Three studies in the machine assisted proof of recursion
+ implementation are described. The verification system used is
+ Edinburgh LCF (Logic for Computable Functions). Proofs are generated,
+ in LCF, in a goal-oriented fashion by the application of strategies
+ reflecting informal proof plans. LCF is introduced in Chapter 1. We
+ present three case studies in which proof strategies are developed and
+ (except in the third) tested in LCF. Chapter 2 contains an account of
+ the machine generated proofs of three program transformations (from
+ recursive to iterative function schemata). Two of the examples are
+ taken from Manna and Waldinger. In each case, the recursion is
+ implemented by the introduction of a new data type, e.g., a stack or
+ counter. Some progress is made towards the development of a general
+ strategy for producing the equivalence proofs of recursive and
+ iterative function schemata by machine. Chapter 3 is concerned with
+ the machine generated proof of the correctness of a compiling
+ algorithm. The formulation, borrowed from Russell, includes a simple
+ imperative language with a while and conditional construct, and a low
+ level language of labelled statements, including jumps. We have, in
+ LCF, formalised his denotational descriptions of the two languages and
+ performed a proof of the preservation of the semantics under
+ compilation. In Chapter 4, we express and informally prove the
+ correctness of a compiling algorithm for a language containing
+ declarations and calls of recursive procedures. We present a low level
+ language whose semantics model a standard activation stack
+ implementation. Certain theoretical difficulties (connected with
+ recursively defined relations) are discussed, and a proposed proof in
+ LCF is outlined. The emphasis in this work is less on proving original
+ theorems, or even automatically finding proofs of known theorems, than
+ on (i) exhibiting and analysing the underlying structure of proofs,
+ and of machine proof attempts, and (ii) investigating the nature of
+ the interaction (between a user and a computer system) required to
+ generate proofs mechanically; that is, the transition from informal
+ proof plans to behaviours which cause formal proofs to be performed.",
+ paper = "Cohn79.pdf"
+}
+
+\end{chunk}
+
\index{Comon, Hubert}
\begin{chunk}{axiom.bib}
@incollection{Como91a,
@@ -13023,6 +13654,47 @@ when shown in factored form.
\end{chunk}
+\index{Constable, Robert L.}
+\begin{chunk}{axiom.bib}
+@article{Cons92,
+ author = "Constable, Robert L.",
+ title = {{Formal Theories and Software Systems: Fundamental
+ Connections between Computer Science and Logic}},
+ journal = "LNCS",
+ volume = "653",
+ pages = "105-127",
+ year = "1992",
+ abstract =
+ "A formal Theory of Logics is sketched using concepts from a
+ modern proof development sysmte (like Nuprl, Coq or other such
+ software systems). The Theory can be applied to understanding
+ these software systems, and the application suggests a design
+ principle called the {\sl theories-as-systems notion}.
+ Applications of the Theory to automated reasoning have led to
+ an empirical study of the notion of {\sl obvious inference}.
+ Experimental results are cited to explain key constants of a
+ scientific theor of {\sl obvious inference}. The constants appear
+ in what is called here the {\sl deBruijn equation}.",
+ paper = "Cons92.pdf",
+ keywords = "printed",
+}
+
+\end{chunk}
+
+\index{Constable, Robert L.}
+\begin{chunk}{axiom.bib}
+@incollection{Cons03,
+ author = "Constable, Robert L.",
+ title = {{Naive Computational Type Theory}},
+ booktitle = "Proof and System Reliability",
+ publisher = "unknown",
+ link = "\url{http://www.nuprl.org/documents/Constable/naive.pdf}",
+ year = "2003",
+ paper = "Cons03.pdf"
+}
+
+\end{chunk}
+
\index{Coquuand, Thierry}
\index{Huet, Gerard}
\begin{chunk}{axiom.bib}
@@ -13176,6 +13848,17 @@ when shown in factored form.
\end{chunk}
+\index{Daly, Timothy}
+\begin{chunk}{axiom.bib}
+@misc{Daly18,
+ author = "Daly, Timothy",
+ title = {{Proving Axiom Sane: Survey of CAS and Proof Cooperation}},
+ year = "2018",
+ comment = "In Preparation"
+}
+
+\end{chunk}
+
\index{Dargaye, Zaynah}
\index{Leroy, Xavier}
\begin{chunk}{axiom.bib}
@@ -13466,7 +14149,8 @@ when shown in factored form.
a practically-oriented core calculus based on the sequent
calculus, and used it to re-implement a substantial chunk of the
Glasgow Haskell Compiler.",
- paper = "Down16.pdf"
+ paper = "Down16.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ -13583,10 +14267,57 @@ when shown in factored form.
\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Esparza, Javier}
+\begin{chunk}{axiom.bib}
+@book{Espa17,
+ author = "Esparza, Javier",
+ title = {{Automata Theory: An Algorithmic Approach}},
+ publisher = "Esparza, Javier",
+ year = "2017",
+ link = "\url{https://www7.in.tum.de/~esparza/autoskript.pdf}",
+ paper = "Espa17.pdf"
+}
+
+\end{chunk}
+
\subsection{F} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
+@article{Farm95b,
+ author = "Farmer, William M.",
+ title = {{Reasoning about Partial Functions with the Aid of a
+ Computer}},
+ journal = "Erkenntnis",
+ volume = "43",
+ number = "3",
+ pages = "279-294",
+ year = "1995",
+ abstract =
+ "Partial functions are ubiquitous in both mathematics and computer
+ science. Therefore, it is imperative that the underlying logical
+ formalism for a general-purpose mechanized mathematics system
+ provide strong support for reasoning about partial
+ functions. Unfortunately, the common logical formalisms --
+ first-order logic, type theory, and set theory -- are usually only
+ adequate for reasoning about partial functions {\sl in
+ theory}. However, the approach to partial functions traditionally
+ employed by mathematicians is quite adequate {\sl in
+ practice}. This paper shows how the traditional approach to
+ partial functions can be formalized in a range of formalisms that
+ includes first-order logic, simple type theory, and Von-Neuman,
+ Bernays, Godel set theory. It argues that these new formalisms
+ allow one to directly reason about partial functions; are based on
+ natural, well-understood, familiar principles; and can be
+ effectively implemented in mechanized mathematics systems.",
+ paper = "Farm95b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
@inproceedings{Farm00,
author = "Farmer, William M.",
title = {{A Proposal for the Development of an Interactive
@@ -13779,6 +14510,70 @@ when shown in factored form.
\end{chunk}
+\index{Feferman, Solomon}
+\begin{chunk}{axiom.bib}
+@misc{Fefe95,
+ author = "Feferman, Solomon",
+ title = {{Definedness}},
+ year = "1995",
+ link = "\url{https://math.stanford.edu/~feferman/papers/definedness.pdf}",
+ abstract =
+ "Questions of definedness are ubiquitous in mathematics. Informally,
+ these involve reasoning about expressions which may or may not have a
+ value. This paper surveys work on logics in which such reasoning can
+ be carried out directly, especially in computational contexts. It
+ begins with a general logic of 'partial terms', continues with partial
+ combinatory and lambda calculi, and concludes with an expressively
+ rich theory of partial functions and polymorphic types, where
+ termination of functional programs can be established in a natural way.",
+ paper = "Fefe95.pdf",
+ keywords = "printed"
+}
+
+\end{chumk}
+
+\index{Felty, Amy}
+\index{Miller, Dale}
+\begin{chunk}{axiom.bib}
+@techreport{Felt90,
+ author = "Felty, Amy and Miller, Dale",
+ title = {{Encoding a Dependent-Type Lambda Calculus in a Logic
+ Programming Language}},
+ type = "technical report",
+ number = "MS-CIS-90-18",
+ institution = "University of Pennsylvania",
+ year = "1990",
+ abstract =
+ "Various forms of typed $\lambda$-calculi have been proposed as
+ specification languages for representing wide varieties of object
+ logics. The logical framework, LF, is an example of such a
+ dependent-type $\lambda$-calculus. A small subset of intuitionistic
+ logic with quantification over simply typed $\lambda$-calculus has
+ also been proposed as a framework for specifying general logics. The
+ logic of {\sl hereditary Harrop formulas} with quantification at all
+ non-predicate types, denoted here as $hh^\omega$ , is such a
+ meta-logic that has been implemented in both the Isabelle theorem
+ prover and the $\lambda$Prolog logic programming language. Both
+ frameworks provide for specifications of logics in which details
+ involved with free and bound variable occurrences, substitutions,
+ eigenvariables, and the scope of assumptions within object logics are
+ handled correctly and elegantly at the 'meta' level. In this paper, we
+ show how LF ca n be encoded into $hh^\omega$ in a direct and natural
+ way by mapping the typing judgments in LF in to propositions in the
+ logic of $hh^\omega$. This translation establishes a very strong
+ connection between these two languages: the order of quantification in
+ an LF signature is exactly the order of a set of $hh^\omega$ clauses,
+ and the proofs in one system correspond directly to proofs in the
+ other system. Relating these two languages makes it possible to
+ provide implementations of proof checkers and theorem provers for
+ logics specified in LF by using standard logic programming techniques
+ which can be used to implement $hh^\omega$.",
+ paper = "Felt90.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Filliatre, Jean-Christophe}
\begin{chunk}{axiom.bib}
@article{Fill03,
@@ -13921,6 +14716,20 @@ when shown in factored form.
\end{chunk}
+\index{Fong, Brendan}
+\index{Spivak, David I.}
+\begin{chunk}{axiom.bib}
+@misc{Fong18,
+ author = "Fong, Brendan and Spivak, David I.",
+ title = {{Seven Sketches in Compositionality: An Invitation to
+ Applied Category Theory}},
+ link = "\url{http://math.mit.edu/~dspivak/teaching/sp18/7Sketches.pdf}",
+ year = "2018",
+ paper = "Fong18.pdf"
+}
+
+\end{chunk}
+
\index{Freeman, Tim}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@@ -14261,6 +15070,91 @@ when shown in factored form.
\end{chunk}
+\index{Graham, Paul}
+\begin{chunk}{axiom.bib}
+@book{Grah93,
+ author = "Graham, Paul",
+ title = {{On Lisp}},
+ publisher = "Prentice Hall",
+ year = "1993",
+ link = "\url{http://ep.yimg.com/ty/cdn/paulgraham/onlisp.pdf}",
+ comment = "code in papers/onlisp.lisp",
+ isbn = "0130305529",
+ abstract =
+ "On Lisp is a comprehensive study of advanced Lisp techniques, with
+ bottom-up programming as the unifying theme. It gives the first
+ complete description of macros and macro applications. The book also
+ covers important subjects related to bottom-up programming, including
+ functional programming, rapid prototyping, interactive development,
+ and embedded languages. The final chapter takes a deeper look at
+ object-oriented programming than previous Lisp books, showing the
+ step-by-step construction of a working model of the Common Lisp Object
+ System (CLOS).",
+ paper = "Grah93.pdf"
+}
+
+\end{chunk}
+
+\index{Graillat, Stef}
+\index{Menissier-Morain, Valerie}
+\begin{chunk}{axiom.bib}
+@inproceedings{Grai07,
+ author = "Graillat, Stef and Menissier-Morain, Valerie",
+ title = {{Error-free Transformations in Real and Complex
+ Floating-point Arithmetic}},
+ booktitle = "Int. Symp. on Nonlinear Theory and Applications",
+ year = "2007",
+ abstract =
+ "Error-free transformation is a concept that makes it possible to
+ compute accurate results within a floating point arithmetic. Up to
+ now, it has only be studied for real floating point arithmetic. In
+ this short note, we recall the known error-free transformations for
+ real arithmetic and we propose some new error-free transformations for
+ complex floating point arithmetic. This will make it possible to
+ design some new accurate algorithms for summation, dot product and
+ polynomial evaluation with complex entries.",
+ paper = "Grai07.pdf"
+}
+
+\end{chunk}
+
+\index{Greve, David A.}
+\index{Kaufmann, Matt}
+\index{Manolios, Panagiotis}
+\index{Moore, J Strother}
+\index{Ray, Sandip}
+\index{Ruiz-Reina, Jose Luis}
+\index{Sumners, Rob}
+\index{Vroon, Daron}
+\index{Wilding, Matthew}
+\begin{chunk}{axiom.bib}
+@article{Grev08,
+ author = "Greve, David A. and Kaufmann, Matt and Manolios, Panagiotis
+ and Moore, J Strother and Ray, Sandip and Ruiz-Reina, Jose Luis
+ and Sumners, Rob and Vroon, Daron and Wilding, Matthew",
+ title = {{Efficient Execution in an Automated Reasoning Environment}},
+ journal = "Journal of Functional Programming",
+ volume = "18",
+ number = "1",
+ pages = "15-46",
+ year = "2008",
+ abstract =
+ "We describe a method that permits the user of a mechanized
+ mathematical logic to write elegant logical definitions while allowing
+ sound and efficient execution. In particular, the features supporting
+ this method allow the user to install, in a logically sound way,
+ alternative executable counterparts for logically defined
+ functions. These alternatives are often much more efficient than the
+ logically equivalent terms they replace. These features have been
+ implemented in the ACL2 theorem prover, and we discuss several
+ applications of the features in ACL2.",
+ paper = "Grev08.pdf",
+ keywords = "printed"
+
+}
+
+\end{chunk}
+
\index{Gross, Jason}
\index{Chlipala, Adam}
\begin{chunk}{axiom.bib}
@@ -14287,6 +15181,30 @@ when shown in factored form.
\end{chunk}
+\index{Guallart, Nino}
+\begin{chunk}{axiom.bib}
+@misc{Gual14,
+ author = "Guallart, Nino",
+ title = {{An Overview of Type Theories}},
+ link = "\url{https://arxiv.org/pdf/1411.1029.pdf}",
+ year = "2014",
+ abstract =
+ "Pure type systems arise as a generalisation of simply typed lambda
+ calculus. The contemporary development of mathematics has renewed
+ the interest in type theories, as they are not just the object of
+ mere historical research, but have an active role in the development
+ of computational science and core mathematics. It is worth exploring
+ some of them in depth, particularly predicative Martin-Löf’s
+ intuitionistic type theory and impredicative Coquand’s calculus of
+ constructions. The logical and philosophical differences and
+ similarities between them will be studied, showing the relationship
+ between these type theories and other fields of logic.",
+ paper = "Gual14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Gunter, Elsa L.}
\begin{chunk}{axiom.bib}
@techreport{Gunt89,
@@ -14461,6 +15379,18 @@ when shown in factored form.
\end{chunk}
+\index{Harrison, John}
+\begin{chunk}{axiom.bib}
+@book{Harr09,
+ author = "Harrison, John",
+ title = {{Handbook of Practical Logic and Automated Reasoning}},
+ publisher = "Cambridge University Press",
+ year = "2009",
+ isbn = "978-0-521-89957-4"
+}
+
+\end{chunk}
+
\index{Hatton, Les}
\begin{chunk}{axiom.bib}
@article{Hatt97,
@@ -14474,6 +15404,22 @@ when shown in factored form.
\end{chunk}
+\index{Hellman, Martin E.}
+\begin{chunk}{axiom.bib}
+@article{Hell17,
+ author = "Hellman, Martin E.",
+ title = {{Cybersecurity, Nuclear Security, Alan Turing, and
+ Illogical Logic}},
+ journal = "J. ACM",
+ volume = "60",
+ number = "12",
+ pages = "52-59",
+ year = "2017",
+ link = "\url{https://cacm.acm.org/magazines/2017/12/223042-cybersecurity-nuclear-security-alan-turing-and-illogical-logic/fulltext}"
+}
+
+\end{chunk}
+
\index{Hoare, C.A.R}
\begin{chunk}{axiom.bib}
@incollection{Hoar72,
@@ -14605,7 +15551,7 @@ when shown in factored form.
\end{chunk}
-\index{Jones, Simon > Peyton}
+\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@inproceedings{Jone96,
author = "Jones, Simon > Peyton",
@@ -14630,6 +15576,36 @@ when shown in factored form.
\end{chunk}
+\index{Jones, Simon Peyton}
+\index{Meijer, Erik}
+\begin{chunk}{axiom.bib}
+@misc{Jone97,
+ author = "Jones, Simon Peyton and Meijer, Erik",
+ title = {{Henk: A Typed Intermediate Language}},
+ year = "1997",
+ link =
+ "\url{https://www.microsoft.com/en-us/research/wp-content/uploads/1997/01/henk.pdf}",
+ abstract =
+ "There is a growing interest in the use of richly-typed
+ intermediate languages in sophisticated compilers for
+ higher-order, typed source languages. These intermediate languages
+ are typically stratified, involving terms, types, and kinds. As
+ the sophistication of the type system increases, there three
+ levels begin to look more and more similar, so an attraictive
+ approach is to use a single syntax, and a single data type in the
+ compiler, to represent all three.
+
+ The theory of so-called {\sl pure type systems} amkes precisely
+ such an identification. This paper describes Henk, a new typed
+ intermediate langugage based closely on a particuarl pure type
+ system, {\sl the lambda cube}. On the way we give a tutorial
+ introduction to the lambda cube.",
+ paper = "Jone97.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Kaes, Stefan}
@@ -14935,9 +15911,56 @@ when shown in factored form.
\end{chunk}
+\index{Krohnfeldt, Jed}
+\index{Steury, Craig}
+\begin{chunk}{axiom.bib}
+@techreport{Kroh93,
+ author = "Krohnfeldt, Jed and Steury, Craig",
+ title = {{Frolic: Logic Programming wiht Frobs}},
+ type = "technical report",
+ number = "86-08",
+ institution = "University of Utah",
+ comment = "Utah PASS Project OpNote",
+ year = "1993",
+ code = "PAS/frolic.tgz",
+ abstract =
+ "This paper describes Frolic, a logic programming system written
+ in Common Lisp. It provides logic programming capabilities for
+ Lisp programmers. In addition to being a Prolog in Lisp, Frolic
+ has capabilities for reasoning about {\sl frobs}. A frob is a data
+ structure that gives the power and flexibility of both frame and
+ object data types. This paper gives a brief introduction to the
+ use and implementation of Frolic.",
+ paper = "Kroh93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{L} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Lamport, Leslie}
+\begin{chunk}{axiom.bib}
+@article{Lamp78,
+ author = "Lamport, Leslie",
+ title = {{The Specification and Proof of Correctness of Interactive
+ Programs}},
+ journal = "LNCS",
+ volume = "75",
+ year = "1978",
+ abstract =
+ "method production assertional rules specified, is proved is modified
+ correctly to accept and interactive is described, method that a
+ program A program of specifying and to permit typed its implementation
+ correct. by and the Floyd-Hoare implements format programs one to
+ prove its specification. input is formally with a TECO program.",
+ paper = "Lamp78.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lamport, Leslie}
\index{Owicki, Susan}
\begin{chunk}{axiom.bib}
@article{Lamp81,
@@ -14955,6 +15978,51 @@ when shown in factored form.
\index{Leroy, Xavier}
\begin{chunk}{axiom.bib}
+@techreport{Lero90,
+ author = "Leroy, Xavier",
+ title = {{The ZINC Experiment: An Economical Implementation of the
+ ML Language.}},
+ type = "technical report",
+ institution = "Institut National de Recherche en Informatique et en
+ Automatique",
+ number = "117",
+ year = "1990",
+ abstract =
+ "This report details the design and implementation of the ZINC
+ system. This is an implementation of the ML language, inended to
+ serve as a test field for various extensions of the language, and
+ for new implementation techniques as well. This system is strongly
+ oriented toward separate compilation and the production of small,
+ standalone programs; type safety is ensured by a Modula-2-like
+ module system. ZINC uses simple, portable techniques, such as
+ bytecode interpretation; a sophisticated execution model helps
+ counterbalance the interpretation overhead. Other highlights
+ include an efficient implementation of records with inclusion
+ (subtyping).",
+ paper = "Lero90.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Leroy, Xavier}
+\begin{chunk}{axiom.bib}
+@article{Lero98,
+ author = "Leroy, Xavier",
+ title = {{An Overview of Types in Compilation}},
+ journal = "LNCS",
+ volume = "1473",
+ year = "1998",
+ publisher = "Springer-Verlang",
+ pages = "1-8",
+ paper = "Lero98.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Leroy, Xavier}
+\begin{chunk}{axiom.bib}
@article{Lero09,
author = "Leroy, Xavier",
title = {{A Formally Verified Compiler Back-end}},
@@ -15003,8 +16071,111 @@ when shown in factored form.
\end{chunk}
+\index{Loh, Andres}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@article{Lohx01,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{A Tutorial Implementation of a Dependently Typed Lambda
+ Calculus}},
+ journal = "Fundamenta Informaticae",
+ volume = "XXI",
+ year = "2001",
+ pages = "1001-1031",
+ abstract =
+ "We present the type rules for a dependently typed core calculus
+ together with a straight-forward implementation in Haskell. We
+ explicitly highlight the changes necessary to shift from a
+ simply-typed lambda calculus to the dependently typed lambda
+ calculus. We also describe how to extend our core language with data
+ types and write several small example programs. The article is
+ accompanied by an executable interpreter and example code that allows
+ immediate experimentation with the system we describe.",
+ paper = "Lohx01.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Loh, Andres}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@misc{Lohx18,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{Simply Easy! An Implementation of a Dependently Typed
+ Lambda Calculus}},
+ link = "\url{http://strictlypositive.org/Easy.pdf}",
+ year = "2018",
+ abstract =
+ "We present an implementation in Haskell of a dependently-typed
+ lambda calculus that can be used as the core of a programming
+ language. We show that a dependently-typed lambda calculus is no
+ more difficult to implement than other typed lambda calculi. In fact,
+ our implementation is almost as easy as an implementation of the
+ simply typed lambda calculus, which we emphasize by discussing
+ the modifications necessary to go from one to the other. We explain
+ how to add data types and write simple programs in the core
+ language, and discuss the steps necessary to build a full-fledged
+ programming language on top of our simple core.",
+ paper = "Lohx18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lu, Eric}
+\begin{chunk}{axiom.bib}
+@misc{Luxx16,
+ author = "Lu, Eric",
+ title = {{Barendregt's Cube and Programming with Dependent Types}},
+ link = "\url{https://www.seas.harvard.edu/courses/cs252/2016fa/15.pdf}",
+ paper = "Luxx16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{M} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Manna, Zohar}
+\index{Vuillemin, Jean}
+\begin{chunk}{axiom.bib}
+@article{Mann72,
+ author = "Manna, Zohar and Vuillemin, Jean",
+ title = {{Fixpoint Approach to the Theory of Computation}},
+ journal = "Communications of the ACM",
+ volume = "15",
+ number = "7",
+ year = "1972",
+ pages = "828-836",
+ abstract =
+ "Following the fixpoint theory of Scott, the semantics of computer
+ programs are defined in terms of the least fixpoints of recursive
+ programs. This allows not only the justification of all existing
+ verification techniques, but also their extension to the handling, in
+ a uniform manner of various properties of computer programs, including
+ correctness, termination, and equivalence.",
+ paper = "Mann72.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Manna, Zohar}
+\index{Waldinger, Richard}
+\begin{chunk}{axiom.bib}
+@book{Mann85,
+ author = "Manna, Zohar and Waldinger, Richard",
+ title = {{The Logical Basis for Computer Programming (Vol 1)}},
+ publisher = "Addison-Wesley",
+ year = "1985",
+ isbn = "0-201-15260-2"
+}
+
+\end{chunk}
+
\index{Manolios, Panagiotis}
\index{Moore, J Strother}
\begin{chunk}{axiom.bib}
@@ -15084,7 +16255,36 @@ when shown in factored form.
title = {{A Basis for a Mathematical Theory of Computation}},
booktitle = "Computer Programming and Formal Systems",
year = "1963",
- paper = "Mcca63.pdf"
+ paper = "Mcca63.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{McCarthy, John}
+\index{Painter, James}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mcca67,
+ author = "McCarthy, John and Painter, James",
+ title = {{Correctness of a Compiler for Arithmetic Expressions}},
+ booktitle = "Proc. Symp. in Applied Mathematics",
+ publisher = "American Mathematical Society",
+ year = "1967",
+ paper = "Mcca67.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{McCarthy, J.}
+\begin{chunk}{axiom.bib}
+@misc{Mcca96,
+ author = "McCarthy, J.",
+ title = {{Towards a Mathematical Science of Computation}},
+ link = "\url{http://www-formal.stanford.edu/jmc/towards.pdf}",
+ year = "1996",
+ paper = "Mcca96.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ -15331,6 +16531,61 @@ when shown in factored form.
\end{chunk}
\index{Morrisett, Greg}
+\begin{chunk}{axiom.bib}
+@phdthesis{Morr95,
+ author = "Morrisett, Greg",
+ title = {{Compiling with Types}},
+ school = "Carnegie Mellon University",
+ year = "1995",
+ comment = "CMU-CS-95-226",
+ link = "\url{https://www.cs.cmu.edu/~rwh/theses/morrisett.pdf}",
+ abstract =
+ "Compilers for monomorphic languages, suc h as C and Pascal, take
+ advantage of types to determine data representations, alignment,
+ calling conventions, and register selection. However, these languages
+ lack important features including polymorphism, abstract datatypes,
+ and garbage collection. In contrast, modern programming languages
+ such as Standard ML (SML), provide all of these features, but existing
+ implementations fail to take full advantage of types. The result is
+ that performance of SML code is quite bad when compared to C.
+
+ In this thesis, I provide a general framework, called
+ {\sl type-directed compilation}, that allows compiler writeres to
+ take advantage of types at all stages in compilation. In the
+ framework, types are used not only to determine efficient
+ representations and calling conventions, but also to prove the
+ correctness of the compiler. A key property of type-directed
+ compilation is that all but the lowest levels of the compiler use
+ {\sl typed intermediate languages}. An advantage of this approach
+ is that it provides a means for automatically checking the
+ integrity of the resulting code.
+
+ An mportant contribution of this work is the development of a new,
+ statically-typed intermediate language, called $\lambda^{ML}_i$.
+ This language supports {\sl dynamic type dispatch}, providing a
+ means to select operations based on types at run time. I show how
+ to use dynamic type dispatch to support polymorphism, ad-hoc
+ operators, and garbage collection without having to box or tag
+ values. This allows compilers for SML to take advantage of
+ techniques used in C compilers, without sacrificing language
+ features or separate compilation.
+
+ TO demonstrate the applicability of my approach, I, along with
+ others, have constructed a new compiler for SML called TIL that
+ eliminates most restrictions on the representations of values. The
+ code produced by TIL is roughly twice as fast as code produced by
+ the SML/NJ compiler. This is due to at least partially to the use
+ of natural representations, but primarily to the conventional
+ optimizer which manipulates typed, $\lambda^{ML}_i$ code. TIL
+ demonstrates that combining type-directed compilation with dynamic
+ type dispatch yields a superior architecture for compilers of
+ modern languages.",
+ paper = "Morr95.pdf"
+}
+
+\end{chunk}
+
+\index{Morrisett, Greg}
\index{Walker, David}
\index{Crary, Karl}
\index{Glew, Neil}
@@ -15635,6 +16890,40 @@ when shown in factored form.
\end{chunk}
+\index{Noblel, James}
+\index{Black, Andrew P.}
+\index{Bruce, Kim B.}
+\index{Homer, Michael}
+\index{Miller, Mark S.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Nobl16,
+ author = "Noble, James and Black, Andrew P. and Bruce, Kim B. and
+ Homer, Michael and Miller, Mark S.",
+ title = {{The Left Hand of Equals}},
+ booktitle = "Int. Symp. of New Ideas, New Paradigms, and Reflections
+ on Programming and Software",
+ publisher = "ACM",
+ pages = "224-237",
+ year = "2016",
+ abstract =
+ "When is one object equal to another object? While object identity is
+ fundamental to object-oriented systems, object equality, although
+ tightly intertwined with identity, is harder to pin down. The
+ distinction between identity and equality is reflected in
+ object-oriented languages, almost all of which provide two variants of
+ 'equality', while some provide many more. Programmers can usually
+ override at least one of these forms of equality, and can always
+ define their own methods to distinguish their own objects.
+
+ This essay takes a reflexive journey through fifty years of identity
+ and equality in object-oriented languages, and ends somewhere we did
+ not expect: a 'left-handed' equality relying on trust and grace.",
+ paper = "Nobl16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{O} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{O'Donnell, Michael J.}
@@ -15721,6 +17010,29 @@ when shown in factored form.
\end{chunk}
+\index{Oury, Nicolas}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@inproceedings{Oury08,
+ author = "Oury, Nicolas and Swierstra, Wouter",
+ title = {{The Power of Pi}},
+ link = "\url{https://cs.ru.nl/~wouters/Publications/ThePowerOfPi.pdf}",
+ booktitle = "Int. Conf. on Functional Programming",
+ year = "2008",
+ abstract =
+ "This paper exhibits the power of programming with dependent types by
+ dint of embedding three domain-specific languages: Cryptol, a
+ language for cryptographic protocols; a small data description
+ language; and relational algebra. Each example demonstrates
+ particular design patterns inherent to dependently-typed programming.
+ Documenting these techniques paves the way for further research in
+ domain-specific embedded type systems.",
+ paper = "Oury08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{P} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Parnas, David L.}
@@ -15787,6 +17099,50 @@ when shown in factored form.
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
+@article{Paul83,
+ author = "Paulson, Lawrence C.",
+ title = {{A Higher-Order Implementation of Rewriting}},
+ journal = "Science of Computer Programming",
+ volume = "3",
+ pages = "119-149",
+ year = "1983",
+ abstract =
+ "Many automatic theorem-provers rely on rewriting. Using theorems as
+ rewrite rules helps to simplify the subgoals that arise during a
+ proof. LCF is an interactive theorem-prover intended For reasoning
+ about computation. Its implementation of rewriting is presented in
+ detail. LCF provides a Family of rewriting Functions, and operators to
+ combine them. A succession of Functions is described, From pattern
+ matching primitives to the rewriting tool that performs most
+ inferences in LCF proofs. The design is highly modular. Each function
+ performs a basic, specific task, such as recognizing a certain form of
+ tautology. Each operator implements one method of building a rewriting
+ Function From simpler ones. These pieces can be put together in
+ numerous ways, yielding a variety of rewriting strategies. The
+ approach involves programming with higher-order Functions. Rewriting
+ Functions are data values, produced by computation on other rewriting
+ Functions. The code is in daily use at Cambridge, demonstrating the
+ practical use of Functional programming.",
+ paper = "Paul83.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@book{Paul87,
+ author = "Paulson, Lawrence C.",
+ title = {{Logic and Computation}},
+ publisher = "Press Synticate of Cambridge University",
+ year = "1987",
+ isbn = "0-521-34632-0"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
@incollection{Paul90a,
author = "Paulson, Lawrence C.",
title = {{Isabelle: The Next 700 Theorem Provers}},
@@ -15956,6 +17312,18 @@ when shown in factored form.
\end{chunk}
+\index{Pierce, Benjamin C.}
+\begin{chunk}{axiom.bib}
+@book{Pier02,
+ author = "Pierce, Benjamin C.",
+ title = {{Types and Programming Languages}},
+ publisher = "MIT Press",
+ year = "2002",
+ isbn = "978-0-262-16209-8"
+}
+
+\end{chunk}
+
\index{Piroi, Florina}
\index{Buchberger, Bruno}
\index{Rosenkranz, Camelia}
@@ -16091,6 +17459,42 @@ when shown in factored form.
\end{chunk}
+\index{Popov, Nikolaj}
+\begin{chunk}{axiom.bib}
+@misc{Popo03,
+ author = "Popov, Nikolaj",
+ title = {{Verification Using Weakest Precondition Strategy}},
+ comment = "Talk at Comp. Aided Verfication of Information Systems",
+ year = "2003",
+ location = "Timisoara, Romania",
+ abstract =
+ "We describe the weakest precondition strategy for verifying
+ programs. This is a method which takes a specification and an
+ annotated program and generates so-called verification conditions:
+ mathematical lemmata which have to be proved in order to obtain a
+ formal correctness proof for the program with respect to its
+ specification. There are rules for generating the intermediate pre
+ and post conditions algorithmically. Based on these rules, we have
+ developed a package for generating verification conditions.",
+ paper = "Popo03.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pratt, Vaughan}
+\begin{chunk}{axiom.bib}
+@article{Prat79,
+ author = "Pratt, Vaughan",
+ title = {{A Mathematician's View of Lisp}},
+ journal = "Byte Magazine",
+ year = "1979",
+ pages = "162-169",
+ paper = "Byte79.pdf"
+}
+
+\end{chunk}
+
\subsection{Q} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{R} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -16147,6 +17551,82 @@ when shown in factored form.
\end{chunk}
+\index{Roorda, Jan-Willem}
+\begin{chunk}{axiom.bib}
+@phdthesis{Roor00,
+ author = "Roorda, Jan-Willem",
+ title = {{Pure Type Systems for Functional Programming}},
+ year = "2000",
+ institution = "University of Utrecht",
+ abstract =
+ "We present a functional programming language based on Pure Type
+ Systems (PTSs). We show how we can define such a language by
+ extending the PTS framework with algebraic data types, case
+ expressions and definitions. To be able to experiment with our
+ language we present an implementation of a type checker and an
+ interpreter for our language. PTSs are well suited as a basis for a
+ functional programming language because they are at the top of a
+ hierarchy of increasingly stronger type systems. The concepts of
+ `existential types', `rank-n polymorphism' and `dependent types' arise
+ naturally in functional programming languages based on the systems in
+ this hierarchy. There is no need for ad-hoc extensions to incorporate
+ these features. The type system of our language is more powerful than
+ the Hindley-Milner system. We illustrate this fact by giving a number
+ of meaningful programs that cannot be typed in Haskell but are typable
+ in our language. A `real world' example of such a program is the
+ mapping of a specialisation of a Generic Haskell function to a Haskell
+ function. Unlike the description of the Henk language by Simon Peyton
+ Jones and Erik Meijer we give a complete formal definition of the type
+ system and the operational semantics of our language. Another
+ difference between Henk and our language is that our language is
+ defined for a large class of Pure Type Systems instead of only for the
+ systems of the $\lambda$-cube.",
+ paper = "Roor00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Roy, Peter Van}
+\index{Haridi, Seif}
+\begin{chunk}{axiom.bib}
+@book{Royx03,
+ author = "Roy, Peter Van and Haridi, Seif",
+ title = {{Concepts, Techniques, and Models of Computer Programming}},
+ year = "2003",
+ link =
+ "\url{http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.102.7366&rep=rep1&type=pdf}",
+ publisher = "MIT",
+ isbn = "978-0262220699",
+ paper = "Royx03.pdf"
+}
+
+\end{chunk}
+
+\index{Russell, Bruce D.}
+\begin{chunk}{axiom.bib}
+@article{Russ77,
+ author = "Russell, Bruce D.",
+ title = "Implementation Correctness Involving a Language with GOTO
+ statements",
+ journal = "SIAM Journal of Computing",
+ volume = "6",
+ number = "3",
+ year = "1977",
+ abstract =
+ "Two languages, one a simple structured programming language, the
+ other a simple goto language, are defined. A denotational semantics is
+ given for each language. An interpreter for the goto language is given
+ and is proved correct with respect to the denotational semantics. A
+ compiler from the structured to the goto language is defined and
+ proved to be a semantically invariant translation of programs. The
+ proofs are by computational induction.",
+ paper = "Russ77.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{S} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Sabry, Amr}
@@ -16268,6 +17748,35 @@ when shown in factored form.
\end{chunk}
+\index{Shieber, Stuart M.}
+\index{Schabes, Yves}
+\index{Pereira, Fernando C.N.}
+\begin{chunk}{axiom.bib}
+@article{Shie95,
+ author = "Shieber, Stuart M. and Schabes, Yves and Pereira, Fernando C.N.",
+ title = {{Principles and Implementation of Deductive Parsing}},
+ journal = "J. Logic Programming",
+ volume = "24",
+ number = "1-2",
+ pages = "3-36",
+ year = "1995",
+ abstract =
+ "We present a system for generating parsers based directly on the
+ metaphor of parsing as deduction. Parsing algorithms can be
+ represented directly as deduction systems, and a single deduction
+ engine can interpret such deduction systems so as to implement the
+ corresponding parser. The method generalizes easily to parsers for
+ augmented phrase structure formalisms, such as definite-clause
+ grammars and other logic grammar formalisms, and has been used for
+ rapid prototyping of parsing algorithms for a variety of formalisms
+ including variants of tree-adjoining grammars, categorial grammars,
+ and lexicalized context-free grammars.",
+ paper = "Shie95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Shields, Mark}
\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@@ -16307,6 +17816,30 @@ when shown in factored form.
\end{chunk}
+\index{Shankar, N.}
+\begin{chunk}{axiom.bib}
+@book{Shan94,
+ author = "Shankar, N.",
+ title = {{Metamathematics, Machines, and Godel's Proof}},
+ publisher = "Cambridge University Press",
+ year = "1994",
+ isbn = "0-521-58533-3"
+}
+
+\end{chunk}
+
+\index{Shoenfield, Joseph R.}
+\begin{chunk}{axiom.bib}
+@book{Shoe67,
+ author = "Shoenfield, Joseph R.",
+ title = {{Mathematical Logic}},
+ publisher = "Association for Symbolic Logic",
+ year = "1967",
+ isbn = "1-56881-135-7"
+}
+
+\end{chunk}
+
\index{Smith, Peter}
\begin{chunk}{axiom.bib}
@misc{Smit15,
@@ -16400,6 +17933,89 @@ when shown in factored form.
\end{chunk}
+\index{Steele, Guy Lewis}
+\index{Sussman, Gerald Jay}
+\begin{chunk}{axiom.bib}
+@techreport{Stee78a,
+ author = "Steele, Guy Lewis and Sussman, Gerald Jay",
+ title = {{The Art of the Interpreter}},
+ institution = "MIT",
+ type = "technical report",
+ number = "AI Memo No. 453",
+ year = "1978",
+ abstract =
+ "We examine the effes of various language design decisions on the
+ programming styles available to a user of the language, with
+ particular emphasis on the ability to incrementatlly construct
+ modular systems. At each step we exhibit an interactive
+ meta-circular interpreter for the language under
+ consideration. Each new interpreter is the result of an
+ incremental change to the previous interpreter.
+
+ We explore the consequences of various variable binding
+ disciplines and the introduction of side effects. We find that
+ dynamic scoping is unsuitable for constructing procedural
+ abstractions, but has another role as an agent of modularity,
+ being a structured form of side effect. More general side effects
+ are also found to be necessary to promote modular style. We find
+ that the notion of side effect and the notion of equality (object
+ identity) are mutually constraining; to define one is to define
+ the other.
+
+ The interpreters we exhibit are all written in a simple dialect of
+ LISP, and all implement LISP-like languages. A subset of these
+ interpreters constitute a partial historical reconstruction of the
+ actual evolution of LISP."
+}
+
+\end{chunk}
+
+\index{Stoutemyer, David R.}
+\begin{chunk}{axiom.bib}
+@article{Stou12,
+ author = "Stoutemyer, David R.",
+ title = {{Series Crimes}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "46",
+ number = "2",
+ year = "2012",
+ abstract =
+ "Puiseux series are power series in which the exponents can be
+ fractional and/or negative rational numbers. Several computer algebra
+ systems have one or more built-in or loadable functions for computing
+ truncated Puiseux series. Some are generalized to allow coefficients
+ containing functions of the series variable that are dominated by any
+ power of that variable, such as logarithms and nested logarithms of
+ the series variable. Some computer algebra systems also have built-in
+ or loadable functions that compute infinite Puiseuxseries.
+ Unfortunately, there are some little-known pitfalls in
+ computing Puiseux series. The most serious of these is expansions
+ within branch cuts or at branch points that are incorrect for some
+ directions in the complex plane. For example with each series
+ implementation accessible to you:
+
+ Compare the value of $(z^2 + z^3)^{3/2}$ with that of its
+ truncated series expansion about $z = 0$, approximated at
+ $z = −0.01$. Does the series converge to a value that is
+ the negative of the correct value?
+
+ Compare the value of $ln(z^2 + z^3)$ with its truncated series
+ expansion about $z = 0$, approximated at $z= −0.01 + 0.1i$. Does
+ the series converge to a value that is incorrect by $2\pi i$?
+
+ Compare $arctanh(−2 + ln(z)z)$ with its truncated series
+ expansion about $z = 0$, approximated at $z = −0.01$. Does the series
+ converge to a value that is incorrect by about $\pi i$?
+
+ At the time of this writing, most implementations that accommodate
+ such series exhibit such errors. This article describes how to avoid
+ these errors both for manual derivation of series and when
+ implementing series packages.",
+ paper = "Stou12.pdf"
+}
+
+\end{chunk}
+
\index{Stuckey, Peter J.}
\index{Sulzmann, Martin}
\begin{chunk}{axiom.bib}
@@ -16430,6 +18046,21 @@ when shown in factored form.
\end{chunk}
+\index{Su, Jessica}
+\begin{chunk}{axiom.bib}
+@misc{Suxx68,
+ author = "Su, Jessica",
+ title = {{Solving Recurrences}},
+ link =
+ "\url{https://web.stanford.edu/class/archive/cs/cs161/cs161.1168/lecture3.pdf}",
+ year = "1968",
+ comment = "Stanford CS161 Lecture 3 class notes",
+ paper = "Suxx68.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Talpin, Jean-Pierre}
@@ -16495,6 +18126,70 @@ when shown in factored form.
\end{chunk}
+\index{Tarski, Alfred}
+\begin{chunk}{axiom.bib}
+@book{Tars46,
+ author = "Tarski, Alfred",
+ title = {{Introduction to Logic}},
+ publisher = "Dover",
+ year = "1946",
+ isbn = "13-978-0-486-28462-0"
+}
+
+\end{chunk}
+
+\index{Tobin-Hochstadt, Sam}
+\index{Felleisen, Matthias}
+\begin{chunk}{axiom.bib}
+@misc{Tobi11,
+ author = "Tobin-Hochstadt, Sam and Felleisen, Matthias",
+ title = {{The Design and Implementation of Typed Scheme: From
+ Scripts to Programs}},
+ link = "\url{https://arxiv.org/pdf/1106.2575.pdf}",
+ year = "2011",
+ abstract =
+ "When scripts in untyped languages grow into large programs,
+ maintaining them becomes difficult. A lack of explicit type
+ annotations in typical scripting languages forces programmers to must
+ (re)discover critical pieces of design information every time they
+ wish to change a program. This analysis step both slows down the
+ maintenance process and may even introduce mistakes due to the
+ violation of undiscovered invariants.
+
+ This paper presents Typed Scheme, an explicitly typed extension of PLT
+ Scheme, an untyped scripting language. Its type system is based on
+ the novel notion of occurrence typing, which we formalize and
+ mechanically prove sound. The implementation of Typed Scheme
+ additionally borrows elements from a range of approaches, including
+ recursive types, true unions and subtyping, plus polymorphism combined
+ with a modicum of local inference.
+
+ The formulation of occurrence typing naturally leads to a simple and
+ expressive version of predicates to describe refinement types. A
+ Typed Scheme program can use these refinement types to keep track of
+ arbitrary classes of values via the type system. Further, we show how
+ the Typed Scheme type system, in conjunction with simple recursive
+ types, is able to encode refinements of existing datatypes, thus
+ expressing both proposed variations of refinement types.",
+ paper = "Tobi11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tofte, Mads}
+\begin{chunk}{axiom.bib}
+@misc{Toft09,
+ author = "Tofte, Mads",
+ title = {{Tips for Computer Scientists on Standard ML (Revised)}},
+ link = "\url{http://www.itu.dk/people/tofte/publ/tips.pdf}",
+ year = "2009",
+ paper = "Toft09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Tuhola, Henri}
\begin{chunk}{axiom.bib}
@misc{Tuho18,
@@ -16533,8 +18228,69 @@ when shown in factored form.
\end{chunk}
+\index{Vuillemin, Jean}
+\begin{chunk}{axiom.bib}
+@phdthesis{Vuil73,
+ author = "Vuillemin, Jean",
+ title = {{Proof Techniques for Recursive Programs}},
+ school = "Stanford University",
+ year = "1973",
+ abstract =
+ "The concept of least fixed-point of a continuous function can be
+ considered as the unifying thread of this dissertation.
+
+ The connections between fixed-points and recursive programs are
+ detailed in chapter 2, providing some insights on practical
+ implementations of recursion. There are two usual characterizations
+ of the least fixed-point of a continuous function. To the first
+ characterization, due to Knaster and Tarski, corresponds a class
+ of proof techniques for programs, as described in Chapter 3. The
+ other characterization of least fixed points, better known as
+ Kleene's first recursion theorem, is discussed in Chapter 4. It
+ has the advantage of being effective and it leads to a wider class
+ of proof techniques.",
+ paper = "Vuil73.pdf"
+}
+
+\end{chunk}
+
\subsection{W} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Waaldijk, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Waal03,
+ author = "Waaldijk, Frank",
+ title = {{On the Foundations of Constructive Mathematics}},
+ link = "\url{}",
+ year = "2003",
+ abstract =
+ "We discuss the foundations of constructive mathematics, including
+ recursive mathematics and intuitionism, in relation to classical
+ mathematics. There are connections with the foundations of physics,
+ due to the way in which the different branches of mathematics reflect
+ reality. Many different axioms and their interrelationship are
+ discussed. We show that there is a fundamental problem in bish
+ (Bishop’s school of constructive mathematics) with regard to its
+ current definition of ‘continuous function’. This problem is closely
+ related to the definition in bish of ‘locally compact’.
+
+ Possible approaches to this problem are discussed. Topology seems to
+ be a key to understanding many issues. We offer several new
+ simplifying axioms, which can form bridges between the various
+ branches of constructive mathematics and classical mathematics
+ (‘reuniting the antipodes’). We give a simplification of basic
+ intuitionistic theory, especially with regard to so-called ‘bar
+ induction’. We then plead for a limited number of axiomatic systems,
+ which differentiate between the various branches of mathematics.
+ Finally, in the appendix we offer bish an elegant topological
+ definition of ‘locally compact’, which unlike the current definition
+ is equivalent to the usual classical and/or intuitionistic definition
+ in classical and intuitionistic mathematics respectively.",
+ paper = "Waal03.pdf"
+}
+
+\end{chunk}
+
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@article{Wadl95,
@@ -16718,6 +18474,46 @@ when shown in factored form.
\end{chunk}
\index{Wiedijk, Freek}
+\index{Zwanenburg, J.}
+\begin{chunk}{axiom.bib}
+@article{Wied03a,
+ author = "Wiedijk, Freek and Zwanenburg, J.",
+ title = {{First Order Logic with Domain Conditions}},
+ journal = "LNCS",
+ volume = "2758",
+ pages = "221-237",
+ year = "2003",
+ abstract =
+ "This paper addresses the crucial issue in the design of a proof
+ development system of how to deal with partial functions and the
+ related question of how to treat undefined terms. Often the problem is
+ avoided by artificially making all functions total. However, that does
+ not correspond to the practice of everyday mathematics.
+
+ In type theory partial functions are modeled by giving functions extra
+ arguments which are proof objects. In that case it will not be
+ possible to apply functions outside their domain. However, having
+ proofs as first class objects has the disadvantage that it will be
+ unfamiliar to most mathematicians. Also many proof tools (like the
+ theorem prover Otter) will not be usable for such a logic. Finally
+ expressions get difficult to read because of these proof objects.
+
+ The PVS system solves the problem of partial functions
+ differently. PVS generates type-correctness conditions (TCCs) for
+ statements in its language. These are proof obligations that have to
+ be satisfied ‘on the side’ to show that statements are well-formed.
+
+ We propose a TCC-like approach for the treatment of partial functions
+ in type theory. We add domain conditions (DCs) to classical
+ first-order logic and show the equivalence with a first order system
+ that treats partial functions in the style of type theory.",
+ paper = "Wied03a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@misc{Wied18,
author = "Wiedijk, Freek",
@@ -16740,8 +18536,127 @@ when shown in factored form.
\end{chunk}
+\index{Winston, Patrick Henry}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wins12,
+ author = "Winston, Patrick Henry",
+ title = {{The Nex 50 Years: A Personal View}},
+ booktitle = "Biologially Inspired Cognitive Architectures 1",
+ year = "2012",
+ pages = "92-99",
+ publisher = "Elsevier B.V",
+ abstract =
+ "I review history, starting with Turing’s seminal paper, reaching
+ back ultimately to when our species started to outperform other
+ primates, searching for the questions that will help us develop a
+ computational account of human intelligence. I answer that the
+ right questions are: What’s different between us and the other
+ primates and what’s the same. I answer the {\sl what’s different}
+ question by saying that we became symbolic in a way that enabled
+ story understanding, directed perception, and easy communication,
+ and other species did not. I argue against Turing’s
+ reasoning-centered suggestions, offering that reasoning is just a
+ special case of story understanding. I answer the {\sl what’s the
+ same} question by noting that our brains are largely engineered in
+ the same exotic way, with information flowing in all directions at
+ once. By way of example, I illustrate how these answers can
+ influence a research program, describing the Genesis system, a
+ system that works with short summaries of stories, provided in
+ English, together with low-level {\sl common-sense rules} and
+ higher-level {\sl concept patterns}, likewise expressed in
+ English. Genesis answers questions, notes abstract concepts such
+ as {\sl revenge}, tells stories in a listener-aware way, and fills
+ in story gaps using precedents. I conclude by suggesting,
+ optimistically, that a genuine computational theory of human
+ intelligence will emerge in the next 50 years if we stick to the
+ right, biologically inspired questions, and work toward
+ biologically informed models.",
+ paper = "Wins12.pdf"
+}
+
+\end{chunk}
+
\subsection{X} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Xi, Hongwei}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@inproceedings{Xixx99,
+ author = "Xi, Hongwei and Pfenning, Frank",
+ title = {{Dependent Types in Practical Programming}},
+ booktitle = "SIGPLAN-SIGACT Symp. on Principles of Programming
+ Languages",
+ year = "1990",
+ link = "\url{https://www.cs.cmu.edu/~fp/papers/popl99.pdf}",
+ publisher = "ACM",
+ abstract =
+ "We present an approach to enriching the type system of ML with a
+ restricted form of dependent types, where type index objects are drawn
+ from a constraint domain C , leading to the DML( C ) language schema.
+ This allows specification and inference of significantly more precise
+ type information, facilitating program error detection and compiler
+ optimization. A major complication resulting from introducing
+ dependent types is that pure type inference for the enriched system is
+ no longer possible, but we show that type-checking a suciently
+ annotated program in DML( C ) can be reduced to constraint
+ satisfaction in the constraint domain C . We exhibit the
+ un-obtrusiveness of our approach through practical examples and prove
+ that DML( C ) is conservative over ML. The main contribution of the
+ paper lies in our language design, including the formulation of
+ type-checking rules which makes the approach practical. To our
+ knowledge, no previous type system for a general purpose programming
+ language such as ML has combined dependent types with features including
+ datatype declarations, higher-order functions, general recursions,
+ let-polymorphism, mutable references, and exceptions. In addition,
+ we have finished a prototype implementation of DML( C ) for an integer
+ constraint domain C , where constraints are linear inequalities (Xi
+ and Pfenning 1998).",
+ paper = "Xixx99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Xue, Tao}
+\begin{chunk}{axiom.bib}
+@phdthesis{Xuex13,
+ author = "Xue, Tao",
+ title = {{Theory and Implementation of Coercive Subtyping}},
+ school = "University of London",
+ year = "2013",
+ abstract =
+ "Coercive subtyping is a useful and powerful framework of subtyping
+ for type theories. In this thesis, we point out the problem in the
+ old formulation of coercive subtyping in [Luo99], give a new and
+ adequate formulation $T[C]$, the system that extends a type theory $T$
+ with coercive subtyping based on a set $C$ of basic subtyping
+ judgements, and show that coercive subtyping is a conservative
+ extension and, in a more general sense, a definitional extension.
+
+ We introduce an intermediate system, the star-calculus $T[C]^∗$ , in
+ which the positions that require coercion insertions are marked, and
+ show that $T[C]^∗$ is a conservative extension of $T$ and that $T[C]^*$
+ is equivalent to $T[C]$. Further more, in order to capture all the
+ properties of the coercive subtyping framework, we introduce another
+ intermediate system $T[C]_{0K}$ which does not contain the coercion
+ application rules. We show that $T[C]^∗$ is actually a definitional
+ extension of $T[C]_{0K}$, which is a conservative extension of $T$.
+ This makes clear what we mean by coercive subtyping being a
+ conservative extension and amends a technical problem that has led to
+ a gap in the earlier conservativity proof.
+
+ Another part of the work in this thesis concerns the implementation of
+ coercive subtyping in the proof assistant Plastic. Coercive subtyping
+ was implemented in Plastic by Paul Callaghan [CL01]. We have done
+ some improvement based on that work, fixed some problems of Plastic,
+ and implemented a new kind of data type called {\sl dot-types}, which are
+ special data types useful in formal semantics to describe interesting
+ linguistic phenomena such as copredication, in Plastic.",
+ paper = "Xuex13.pdf"
+}
+
+\end{chunk}
+
\subsection{Y} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{Z} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -17441,6 +19356,32 @@ when shown in factored form.
\end{chunk}
+\index{Avigad, Jeremy}
+\index{Holzl, Johannes}
+\index{Serafin, Luke}
+\begin{chunk}{axiom.bib}
+@misc{Avig17c,
+ author = "Avigad, Jeremy and Holzl, Johannes and Serafin, Luke",
+ title = {{A Formally Verified Proof of the Central Limit Theorem}},
+ link = "\url{}",
+ year = "2017",
+ abstract =
+ "We describe a proof of the Central Limit Theorem that has been
+ formally verified in the Isabelle proof assistant. Our formalization
+ builds upon and extends Isabelle’s libraries for analysis and
+ measure-theoretic probability. The proof of the theorem uses
+ characteristic functions , which are a kind of Fourier transform, to
+ demonstrate that, under suitable hypotheses, sums of random
+ variables converge weakly to the st andard normal distribution. We
+ also discuss the libraries and infrastructure that supported the
+ formalization, and reflect on some of the lessons we have learned
+ from the effort.",
+ paper = "Avig17c.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Backus, John}
@@ -23400,7 +25341,7 @@ when shown in factored form.
author = "Paulson, Lawrence C.",
title = {{ISABELLE: A Generic Theorem Prover}},
year = "1994",
- publisher = "Springer",
+ publisher = "Springer-Verlag",
isbn = "978-3-540-58244-1",
}
@@ -40991,7 +42932,7 @@ December 1992.
with this in the Axiom computer algebra system. As a result of this
experience, we suggest certain strengthenings of the algorithm.",
paper = "Dave92.pdf",
- keywords = "axiomref",
+ keywords = "axiomref, printed",
beebe = "Davenport:1993:PTR"
}
@@ -41121,6 +43062,20 @@ May 1984
\end{chunk}
\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave12b,
+ author = "Davenport, James H.",
+ title = {{Small Algorithms for Small Systems}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "46",
+ number = "1",
+ year = "2012",
+ paper = "Dave12b.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
\begin{chunk}{ignore}
\bibitem[Davenport 12]{Dav12} Davenport, J.H.
title = {{Computer Algebra}},
@@ -42106,7 +44061,9 @@ Grant citation GR/L48256 Nov 1, 1997-Feb 28, 2001
This paper considers the link to the NAG Fortran Library from version
2.0 of Axiom and shows how we can build on this to extend and simplify
the interface using an expert system for choosing and using the
- numerical routines."
+ numerical routines.",
+ paper = "Dupe05.pdf",
+ ksyaodxz = "axiomref"
}
\end{chunk}
@@ -55017,7 +56974,8 @@ Oxford University Press (2000) ISBN0-19-512516-9
author = "Zippel, Richard",
title = {{The Weyl Computer Algebra Substrate}},
year = "1989",
- link = "\url{https://ecommons.cornell.edu/bitstream/handle/1813/6917/90-1077.pdf}",
+ link =
+ "\url{https://ecommons.cornell.edu/bitstream/handle/1813/6917/90-1077.pdf}",
paper = "Zipp89.pdf",
keywords = "axiomref, printed"
}
@@ -55072,8 +57030,8 @@ Oxford University Press (2000) ISBN0-19-512516-9
computer algebra systems (polynomial, rational functions, matrices,
etc.), domains (e.g., Z, Q[x, y, z]) are also first class objects in
Weyl.",
- paper = "Zipp93.pdf, printed",
- keywords = "axiomref"
+ paper = "Zipp93.pdf",
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ -55190,6 +57148,19 @@ Dover Publications. (1968)
\end{chunk}
+\index{Alford, W.R.}
+\index{Granville, A.}
+\index{Pomerance, C.}
+\begin{chunk}{axiom.bib}
+@misc{Alfo92,
+ author = "Alford, W.R. and Granville, A. and Pomerance, C.",
+ title = {{There are Infinitely Many Carmichael Numbers}},
+ year = "1992",
+ comment = "Preprint"
+}
+
+\end{chunk}
+
\index{Altmann, Simon L.}
\begin{chunk}{axiom.bib}
@book{Altm05,
@@ -55270,6 +57241,19 @@ National Physical Laboratory. (1982)
\end{chunk}
+\index{Arnault, F.}
+\begin{chunk}{axiom.bib}
+@misc{Arna91,
+ author = "Arnault, F.",
+ title = {{Le Test de Primalite de Rabin-Miller: Un Nombre compose
+ qui le "passe"}},
+ comment = "Report 61",
+ university = "Universite de Poitiers Departement de Mathematiques",
+ year = "1991"
+}
+
+\end{chunk}
+
\index{Astesiano, Egidio}
\index{Bidoit, Michel}
\index{Kirchner, Helene}
@@ -56000,6 +57984,23 @@ Ginn \& Co., Boston and New York. (1962)
\end{chunk}
+\index{Blair, Fred W.}
+\index{Griesmer, James H.}
+\index{Harry, Joseph}
+\index{Pivovonsky, Mark}
+\begin{chunk}{axiom.bib}
+@misc{Blai71,
+ author = "Blair, Fred W. and Griesmer, James H. and Harry, Joseph and
+ Pivovonsky, Mark",
+ title = {{Design and Development Document for Lisp on Several S/360
+ Operating Systems}},
+ year = "1971",
+ publisher = "IBM Research",
+ paper = "Blai71.pdf"
+}
+
+\end{chunk}
+
\index{Bogen, Richard}
\begin{chunk}{axiom.bib}
@book{Boge77,
@@ -57335,6 +59336,18 @@ Projet SAFIR, INRIA Sophia Antipolis Nov 25, 1998
\end{chunk}
+\index{Damgard, I.}
+\index{Landrock, P.}
+\begin{chunk}{axiom.bib}
+@misc{Damg91,
+ author = "Damgard, I. and Landrock, P.",
+ title = {{Improved Bounds for the Rabin Primality Test}},
+ booktitle = "Proc. 3rd IMA Conf. on Coding and Cryptography",
+ year = "1991"
+}
+
+\end{chunk}
+
\index{Dantzig, G. B.}
\begin{chunk}{ignore}
\bibitem[Dantzig 63]{Dan63} Dantzig G B
@@ -57383,6 +59396,20 @@ Princeton University Press. (1963)
\end{chunk}
\index{Davenport, James H.}
+\index{Smith, G.C.}
+\begin{chunk}{axiom.bib}
+@misc{Dave87,
+ author = "Davenport, James H. and Smith, G.C.",
+ title = {{Rabin's Primality Testing Algorithm -- a Group Theory View}},
+ school = "University of Bath",
+ type = "technical report",
+ number = "87-04",
+ year = "1987"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
\begin{chunk}{ignore}
\bibitem[Davenport]{Dav} Davenport, James
title = {{On Brillhart Irreducibility.}},
@@ -58942,7 +60969,7 @@ Grove, IL, USA and Oxford, UK, 1992
computation, distributed problem-solving environments, and the
coupling of tools for specific applications.",
paper = "Gray98.pdf",
- keywords = "printed"
+ keywords = "printed, axiomref"
}
\end{chunk}
@@ -59407,6 +61434,19 @@ January 1956, 10-15
\end{chunk}
+\index{Hereman, Willy}
+\begin{chunk}{axiom.bib}
+@misc{Here96,
+ author = "Hereman, Willy",
+ title = {{The Incredible World of Symbolic Mathematics.
+ A Review of Computer Algebra Systems}},
+ year = "1996",
+ link = "\url{https://inside.mines.edu/~whereman/papers/Hereman-PhysicsWorld-9-March1996.pdf}",
+ paper = "Here96.pdf"
+}
+
+\end{chunk}
+
\index{Hock, W.}
\index{Schittkowski, K.}
\begin{chunk}{ignore}
@@ -59706,6 +61746,17 @@ Dept. G68, P.O. Box 1900, Boulder, Colorado, USA 80301-9191.
\end{chunk}
+\index{Jaeschke, G.}
+\begin{chunk}{axiom.bib}
+@misc{Jaes91,
+ author = "Jaeschke, G.",
+ title = {{Private Communication}},
+ comment = "to James Davenport",
+ year = "1991"
+}
+
+\end{chunk}
+
\index{James, Gordon}
\index{Kerber, Adalbert}
\begin{chunk}{ignore}
@@ -60213,6 +62264,17 @@ J. Symbolic Computations 8, 545-552 (1989)
\end{chunk}
+\index{Koblitz, N.}
+\begin{chunk}{axiom.bib}
+@book{Kobl87,
+ author = "Koblitz, N.",
+ title = {{A Cource in Number Theory and Cryptography}},
+ publisher = "Springer-Verlog",
+ year = "1987"
+}
+
+\end{chunk}
+
\index{Kohlhase, Michael}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@@ -60596,6 +62658,17 @@ Prentice-Hall. (1974)
\end{chunk}
+\index{Leech, J.}
+\begin{chunk}{axiom.bib}
+@misc{Leec92,
+ author = "Leech, J.",
+ title = {{Private Communication}},
+ comment = "to James Davenport",
+ year = "1992"
+}
+
+\end{chunk}
+
\index{Legendre, George L.}
\index{Grazini, Stefano}
\begin{chunk}{axiom.bib}
@@ -60609,6 +62682,20 @@ Prentice-Hall. (1974)
\end{chunk}
+\index{Lenstra Jr., H.W.}
+\begin{chunk}{axiom.bib}
+@article{Lens81,
+ author = "Lenstra Jr., H.W.",
+ title = {{Primality Testing Algorithms}},
+ journal = "Lecture Notes in Mathematics",
+ volume = "901",
+ publisher = "Springer-Verlag",
+ pages = "243-257",
+ year = "1981"
+}
+
+\end{chunk}
+
\index{Lescanne, Pierre}
\begin{chunk}{axiom.bib}
@article{Lesc87,
@@ -62210,6 +64297,21 @@ Acta Math. 68 (1937) 145-254.
\end{chunk}
+\index{Pomerance, C.}
+\index{Slefridge, J.L.}
+\index{Wagstaff Jr., S.S.}
+\begin{chunk}{axiom.bib}
+@article{Pome80,
+ author = "Pomerance, C. and Slefridge, J.L. and Wagstaff Jr., S.S.",
+ title = {{The Pseudoprimes up to $25\cdot 10^9$}},
+ journal = "Math. Comp.",
+ volume = "35",
+ pages = "1003-1026",
+ year = "1980"
+}
+
+\end{chunk}
+
\index{Popov, Nikolaj}
\index{Jebelean, Tudor}
\begin{chunk}{axiom.bib}
@@ -62510,6 +64612,19 @@ ACM Transactions on Mathematical Software, 32(2):180-194, June 2006.
\end{chunk}
+\index{Rabin, M.O.}
+\begin{chunk}{axiom.bib}
+@article{Rabi80,
+ author = "Rabin, M.O.",
+ title = {{Probabilistic Algorithm for Testing Primality}},
+ journal = "J. Number Theory",
+ volume = "12",
+ pages = "128-138",
+ year = "1980"
+}
+
+\end{chunk}
+
\index{Rabinowitz, P.}
\begin{chunk}{ignore}
\bibitem[Rabinowitz 70]{Rab70} Rabinowitz P.
diff --git a/changelog b/changelog
index 44b857a..ec25526 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,5 @@
+20180628 tpd src/axiom-website/patches.html 20180628.01.tpd.patch
+20180628 tpd books/bookvolbib add references
20180606 tpd src/axiom-website/patches.html 20180606.03.tpd.patch
20180606 tpd books/bookvol0 fix minor typo to citation
20180606 tpd src/axiom-website/patches.html 20180606.02.tpd.patch
diff --git a/patch b/patch
index 44b0830..f8cda65 100644
--- a/patch
+++ b/patch
@@ -1,3 +1,1705 @@
-books/bookvol0 fix minor typo to citation
+books/bookvolbib add references
-Goal: Axiom Maintenance
+Goal: Proving Axiom Sane
+
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave12b,
+ author = "Davenport, James H.",
+ title = {{Small Algorithms for Small Systems}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "46",
+ number = "1",
+ year = "2012",
+ paper = "Dave12b.pdf"
+}
+
+\end{chunk}
+
+\index{Stoutemyer, David R.}
+\begin{chunk}{axiom.bib}
+@article{Stou12,
+ author = "Stoutemyer, David R.",
+ title = {{Series Crimes}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "46",
+ number = "2",
+ year = "2012",
+ abstract =
+ "Puiseux series are power series in which the exponents can be
+ fractional and/or negative rational numbers. Several computer algebra
+ systems have one or more built-in or loadable functions for computing
+ truncated Puiseux series. Some are generalized to allow coefficients
+ containing functions of the series variable that are dominated by any
+ power of that variable, such as logarithms and nested logarithms of
+ the series variable. Some computer algebra systems also have built-in
+ or loadable functions that compute infinite Puiseuxseries.
+ Unfortunately, there are some little-known pitfalls in
+ computing Puiseux series. The most serious of these is expansions
+ within branch cuts or at branch points that are incorrect for some
+ directions in the complex plane. For example with each series
+ implementation accessible to you:
+
+ Compare the value of $(z^2 + z^3)^{3/2}$ with that of its
+ truncated series expansion about $z = 0$, approximated at
+ $z = −0.01$. Does the series converge to a value that is
+ the negative of the correct value?
+
+ Compare the value of $ln(z^2 + z^3)$ with its truncated series
+ expansion about $z = 0$, approximated at $z= −0.01 + 0.1i$. Does
+ the series converge to a value that is incorrect by $2\pi i$?
+
+ Compare $arctanh(−2 + ln(z)z)$ with its truncated series
+ expansion about $z = 0$, approximated at $z = −0.01$. Does the series
+ converge to a value that is incorrect by about $\pi i$?
+
+ At the time of this writing, most implementations that accommodate
+ such series exhibit such errors. This article describes how to avoid
+ these errors both for manual derivation of series and when
+ implementing series packages.",
+ paper = "Stou12.pdf"
+}
+
+\end{chunk}
+
+\index{Blair, Fred W.}
+\index{Griesmer, James H.}
+\index{Harry, Joseph}
+\index{Pivovonsky, Mark}
+\begin{chunk}{axiom.bib}
+@misc{Blai71,
+ author = "Blair, Fred W. and Griesmer, James H. and Harry, Joseph and
+ Pivovonsky, Mark",
+ title = {{Design and Development Document for Lisp on Several S/360
+ Operating Systems}},
+ year = "1971",
+ publisher = "IBM Research",
+ paper = "Blai71.pdf"
+}
+
+\end{chunk}
+
+\index{Alford, W.R.}
+\index{Granville, A.}
+\index{Pomerance, C.}
+\begin{chunk}{axiom.bib}
+@misc{Alfo92,
+ author = "Alford, W.R. and Granville, A. and Pomerance, C.",
+ title = {{There are Infinitely Many Carmichael Numbers}},
+ year = "1992",
+ comment = "Preprint"
+}
+
+\end{chunk}
+
+\index{Arnault, F.}
+\begin{chunk}{axiom.bib}
+@misc{Arna91,
+ author = "Arnault, F.",
+ title = {{Le Test de Primalite de Rabin-Miller: Un Nombre compose
+ qui le "passe"}},
+ comment = "Report 61",
+ university = "Universite de Poitiers Departement de Mathematiques",
+ year = "1991"
+}
+
+\end{chunk}
+
+\index{Damgard, I.}
+\index{Landrock, P.}
+\begin{chunk}{axiom.bib}
+@misc{Damg91,
+ author = "Damgard, I. and Landrock, P.",
+ title = {{Improved Bounds for the Rabin Primality Test}},
+ booktitle = "Proc. 3rd IMA Conf. on Coding and Cryptography",
+ year = "1991"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\index{Smith, G.C.}
+\begin{chunk}{axiom.bib}
+@misc{Dave87,
+ author = "Davenport, James H. and Smith, G.C.",
+ title = {{Rabin's Primality Testing Algorithm -- a Group Theory View}},
+ school = "University of Bath",
+ type = "technical report",
+ number = "87-04",
+ year = "1987"
+}
+
+\end{chunk}
+
+\index{Jaeschke, G.}
+\begin{chunk}{axiom.bib}
+@misc{Jaes91,
+ author = "Jaeschke, G.",
+ title = {{Private Communication}},
+ comment = "to James Davenport",
+ year = "1991"
+}
+
+\end{chunk}
+
+\index{Leech, J.}
+\begin{chunk}{axiom.bib}
+@misc{Leec92,
+ author = "Leech, J.",
+ title = {{Private Communication}},
+ comment = "to James Davenport",
+ year = "1992"
+}
+
+\end{chunk}
+
+\index{Koblitz, N.}
+\begin{chunk}{axiom.bib}
+@book{Kobl87,
+ author = "Koblitz, N.",
+ title = {{A Cource in Number Theory and Cryptography}},
+ publisher = "Springer-Verlog",
+ year = "1987"
+}
+
+\end{chunk}
+
+\index{Lenstra Jr., H.W.}
+\begin{chunk}{axiom.bib}
+@article{Lens81,
+ author = "Lenstra Jr., H.W.",
+ title = {{Primality Testing Algorithms}},
+ journal = "Lecture Notes in Mathematics",
+ volume = "901",
+ publisher = "Springer-Verlag",
+ pages = "243-257",
+ year = "1981"
+}
+
+\end{chunk}
+
+\index{Pomerance, C.}
+\index{Slefridge, J.L.}
+\index{Wagstaff Jr., S.S.}
+\begin{chunk}{axiom.bib}
+@article{Pome80,
+ author = "Pomerance, C. and Slefridge, J.L. and Wagstaff Jr., S.S.",
+ title = {{The Pseudoprimes up to $25\cdot 10^9$}},
+ journal = "Math. Comp.",
+ volume = "35",
+ pages = "1003-1026",
+ year = "1980"
+}
+
+\end{chunk}
+
+\index{Rabin, M.O.}
+\begin{chunk}{axiom.bib}
+@article{Rabi80,
+ author = "Rabin, M.O.",
+ title = {{Probabilistic Algorithm for Testing Primality}},
+ journal = "J. Number Theory",
+ volume = "12",
+ pages = "128-138",
+ year = "1980"
+}
+
+\end{chunk}
+
+\index{Steele, Guy Lewis}
+\index{Sussman, Gerald Jay}
+\begin{chunk}{axiom.bib}
+@techreport{Stee78a,
+ author = "Steele, Guy Lewis and Sussman, Gerald Jay",
+ title = {{The Art of the Interpreter}},
+ institution = "MIT",
+ type = "technical report",
+ number = "AI Memo No. 453",
+ year = "1978",
+ abstract =
+ "We examine the effes of various language design decisions on the
+ programming styles available to a user of the language, with
+ particular emphasis on the ability to incrementatlly construct
+ modular systems. At each step we exhibit an interactive
+ meta-circular interpreter for the language under
+ consideration. Each new interpreter is the result of an
+ incremental change to the previous interpreter.
+
+ We explore the consequences of various variable binding
+ disciplines and the introduction of side effects. We find that
+ dynamic scoping is unsuitable for constructing procedural
+ abstractions, but has another role as an agent of modularity,
+ being a structured form of side effect. More general side effects
+ are also found to be necessary to promote modular style. We find
+ that the notion of side effect and the notion of equality (object
+ identity) are mutually constraining; to define one is to define
+ the other.
+
+ The interpreters we exhibit are all written in a simple dialect of
+ LISP, and all implement LISP-like languages. A subset of these
+ interpreters constitute a partial historical reconstruction of the
+ actual evolution of LISP."
+}
+
+\end{chunk}
+
+\index{Constable, Robert L.}
+\begin{chunk}{axiom.bib}
+@incollection{Cons03,
+ author = "Constable, Robert L.",
+ title = {{Naive Computational Type Theory}},
+ booktitle = "Proof and System Reliability",
+ publisher = "unknown",
+ link = "\url{http://www.nuprl.org/documents/Constable/naive.pdf}",
+ year = "2003",
+ paper = "Cons03.pdf"
+}
+
+\end{chunk}
+
+\index{Waaldijk, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Waal03,
+ author = "Waaldijk, Frank",
+ title = {{On the Foundations of Constructive Mathematics}},
+ link = "\url{}",
+ year = "2003",
+ abstract =
+ "We discuss the foundations of constructive mathematics, including
+ recursive mathematics and intuitionism, in relation to classical
+ mathematics. There are connections with the foundations of physics,
+ due to the way in which the different branches of mathematics reflect
+ reality. Many different axioms and their interrelationship are
+ discussed. We show that there is a fundamental problem in bish
+ (Bishop’s school of constructive mathematics) with regard to its
+ current definition of ‘continuous function’. This problem is closely
+ related to the definition in bish of ‘locally compact’.
+
+ Possible approaches to this problem are discussed. Topology seems to
+ be a key to understanding many issues. We offer several new
+ simplifying axioms, which can form bridges between the various
+ branches of constructive mathematics and classical mathematics
+ (‘reuniting the antipodes’). We give a simplification of basic
+ intuitionistic theory, especially with regard to so-called ‘bar
+ induction’. We then plead for a limited number of axiomatic systems,
+ which differentiate between the various branches of mathematics.
+ Finally, in the appendix we offer bish an elegant topological
+ definition of ‘locally compact’, which unlike the current definition
+ is equivalent to the usual classical and/or intuitionistic definition
+ in classical and intuitionistic mathematics respectively.",
+ paper = "Waal03.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm95b,
+ author = "Farmer, William M.",
+ title = {{Reasoning about Partial Functions with the Aid of a
+ Computer}},
+ journal = "Erkenntnis",
+ volume = "43",
+ number = "3",
+ pages = "279-294",
+ year = "1995",
+ abstract =
+ "Partial functions are ubiquitous in both mathematics and computer
+ science. Therefore, it is imperative that the underlying logical
+ formalism for a general-purpose mechanized mathematics system
+ provide strong support for reasoning about partial
+ functions. Unfortunately, the common logical formalisms --
+ first-order logic, type theory, and set theory -- are usually only
+ adequate for reasoning about partial functions {\sl in
+ theory}. However, the approach to partial functions traditionally
+ employed by mathematicians is quite adequate {\sl in
+ practice}. This paper shows how the traditional approach to
+ partial functions can be formalized in a range of formalisms that
+ includes first-order logic, simple type theory, and Von-Neuman,
+ Bernays, Godel set theory. It argues that these new formalisms
+ allow one to directly reason about partial functions; are based on
+ natural, well-understood, familiar principles; and can be
+ effectively implemented in mechanized mathematics systems.",
+ paper = "Farm95b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Fong, Brendan}
+\index{Spivak, David I.}
+\begin{chunk}{axiom.bib}
+@misc{Fong18,
+ author = "Fong, Brendan and Spivak, David I.",
+ title = {{Seven Sketches in Compositionality: An Invitation to
+ Applied Category Theory}},
+ link = "\url{http://math.mit.edu/~dspivak/teaching/sp18/7Sketches.pdf}",
+ year = "2018",
+ paper = "Fong18.pdf"
+}
+
+\end{chunk}
+
+\index{Popov, Nikolaj}
+\begin{chunk}{axiom.bib}
+@misc{Popo03,
+ author = "Popov, Nikolaj",
+ title = {{Verification Using Weakest Precondition Strategy}},
+ comment = "Talk at Comp. Aided Verfication of Information Systems",
+ year = "2003",
+ location = "Timisoara, Romania",
+ abstract =
+ "We describe the weakest precondition strategy for verifying
+ programs. This is a method which takes a specification and an
+ annotated program and generates so-called verification conditions:
+ mathematical lemmata which have to be proved in order to obtain a
+ formal correctness proof for the program with respect to its
+ specification. There are rules for generating the intermediate pre
+ and post conditions algorithmically. Based on these rules, we have
+ developed a package for generating verification conditions.",
+ paper = "Popo03.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\index{Jebelean, Tudor}
+\begin{chunk}{axiom.bib}
+@article{Kova04,
+ author = "Kovacs, Laura and Jebelean, Tudor",
+ title = {{Practical Aspects of Imperative Program Verification in
+ Theorema}},
+ journal = "Analele Universitatu din Timisoara",
+ volume = "XXXXIX",
+ number = "2",
+ year = "2004",
+ abstract =
+ "Approaching the problem of imperative program verification from a
+ practical point of view has certain implications concerning: the style
+ of specifications, the programming language which is used, the help
+ provided to the user for finding appropriate loop invariants, the
+ theoretical frame used for formal verification, the language used
+ for expressing generated verification theorems as well as the database
+ of necessary mathematical knowledge, and finally the proving power,
+ style and language. The Theorema system (www. theorema.org) has
+ certain capabilities which make it appropriate for such a practical
+ approach. Our approach to imperative program verification in Theorema
+ is based on Hoare– Logic and the Weakest Precondition Strategy. In
+ this paper we present some practical aspect of our work, as well as a
+ novel method for verification of pro- grams that contain loops, namely
+ a method based on recurrence equation solvers for generating the
+ necessary loop invariants and termination terms automatically. We
+ have tested our system with numerous examples, some of these example
+ are presented at the end of the paper.",
+ paper = "Kova04.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\index{Jebelean, Tudor}
+\begin{chunk}{axiom.bib}
+@article{Kova04a
+ author = "Kovacs, Laura and Jebelean, Tudor",
+ title = {{Automated Generation of Loop Invariants by Recurrence
+ Solving in Theorema}},
+ year = "2004",
+ abstract =
+ "Most of the properties established during program verification are
+ either invariants or depend crucially on invariants. The effectiveness
+ of automated verification of (imperative) programs is therefore
+ sensitive to the ease with which invariants, even trivial ones, can be
+ automatically deduced. We present a method for invariant generation
+ that relies on combinatorial techniques, namely on recurrence solving
+ and variable elimination. The effectiveness of the method is
+ demonstrated on examples.",
+ paper = "Kova04a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\begin{chunk}{axiom.bib}
+@phdthesis{Kova07,
+ author = "Kovacs, Laura",
+ title = {{Automated Invariant Generation by Algebraic Techniques for
+ Imperative Program Verification in Theorema}},
+ school = "Johannes-Kepler University Linz",
+ year = "2007",
+ abstract =
+
+ "This thesis presents algebraic and combinatorial approaches for
+ reasoning about imperative loops with assignments, sequencing and
+ conditionals.
+
+ A certain family of loops, called P-solvable , is defined for which
+ the value of each program variable can be expressed as a polynomial of
+ the initial values of variables, the loop counter, and some new
+ variables where there are algebraic dependencies among the new
+ variables. For such loops, a systematic method is developed for
+ generating polynomial invariants. Further, if the bodies of these
+ loops consist only of assignments and conditional branches, and test
+ conditions in the loop and conditionals are ignored, the method is
+ shown to be complete for some special cases. By completeness we mean
+ that it generates a set of polynomials from which, under additional
+ assumptions for loops with conditional branches, any polynomial
+ invariant can be derived. Many non-trivial algorithms working on
+ numbers can be naturally implemented using P-solvable loops.
+
+ By combining advanced techniques from algorithmic combinatorics,
+ symbolic summation, computer algebra and computational logic, a
+ framework is developed for generating polynomial invariants for
+ imperative programs operating on numbers.
+
+ Exploiting the symbolic manipulation capabilities of the computer
+ algebra system Mathematica , these techniques are implemented in a new
+ software package called Aligator . By using several combinatorial
+ packages developed at RISC, Aligator includes algorithms for solving
+ special classes of recurrence relations (those that are either
+ Gosper-summable or C-finite) and generating polynomial dependencies
+ among algebraic exponential sequences. Using Aligator , a complete set
+ of polynomial invariants is successfully generated for numerous
+ imperative programs working on numbers.
+
+ The automatically obtained invariant assertions are subsequently used
+ for proving the partial correctness of programs by generating
+ appropriate verification conditions as first-order logical formulas.
+ Based on Hoare logic and the weakest precondition strategy, this
+ verification process is supported in an imperative verification
+ environment implemented in the Theorema system. Theorema is
+ convenient for such an integration given that it is built on top of
+ the computer algebra system Mathematica and includes automated methods
+ for theorem proving in predicate logic, domain specific reasoning and
+ proving by induction.",
+ paper = "Kova07.pdf"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kova08,
+ author = "Kovacs, Laura",
+ title = {{Reasoning Algebraiclly About P-Solvable Loops}},
+ booktitle = "Int. Conf. on Tools and Algorithms for the Construction
+ and Analysis of Systems",
+ pages = "249-264",
+ year = "2008",
+ abstract =
+ "We present a method for generating polynomial invariants for a
+ subfamily of imperative loops operating on numbers, called the
+ P-solvable loops. The method uses algorithmic combinatorics and
+ algebraic techniques. The approach is shown to be complete for some
+ special cases. By completeness we mean that it generates a set of
+ polynomial invariants from which, under additional assumptions, any
+ polynomial invariant can be derived. These techniques are implemented
+ in a new software package Aligator written in Mathematica and
+ successfully tried on many programs implementing interesting
+ algorithms working on numbers.",
+ paper = "Kova08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Oury, Nicolas}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@inproceedings{Oury08,
+ author = "Oury, Nicolas and Swierstra, Wouter",
+ title = {{The Power of Pi}},
+ link = "\url{https://cs.ru.nl/~wouters/Publications/ThePowerOfPi.pdf}",
+ booktitle = "Int. Conf. on Functional Programming",
+ year = "2008",
+ abstract =
+ "This paper exhibits the power of programming with dependent types by
+ dint of embedding three domain-specific languages: Cryptol, a
+ language for cryptographic protocols; a small data description
+ language; and relational algebra. Each example demonstrates
+ particular design patterns inherent to dependently-typed programming.
+ Documenting these techniques paves the way for further research in
+ domain-specific embedded type systems.",
+ paper = "Oury08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Graillat, Stef}
+\index{Menissier-Morain, Valerie}
+\begin{chunk}{axiom.bib}
+@inproceedings{Grai07,
+ author = "Graillat, Stef and Menissier-Morain, Valerie",
+ title = {{Error-free Transformations in Real and Complex
+ Floating-point Arithmetic}},
+ booktitle = "Int. Symp. on Nonlinear Theory and Applications",
+ year = "2007",
+ abstract =
+ "Error-free transformation is a concept that makes it possible to
+ compute accurate results within a floating point arithmetic. Up to
+ now, it has only be studied for real floating point arithmetic. In
+ this short note, we recall the known error-free transformations for
+ real arithmetic and we propose some new error-free transformations for
+ complex floating point arithmetic. This will make it possible to
+ design some new accurate algorithms for summation, dot product and
+ polynomial evaluation with complex entries.",
+ paper = "Grai07.pdf"
+}
+
+\end{chunk}
+
+\index{Noblel, James}
+\index{Black, Andrew P.}
+\index{Bruce, Kim B.}
+\index{Homer, Michael}
+\index{Miller, Mark S.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Nobl16,
+ author = "Noble, James and Black, Andrew P. and Bruce, Kim B. and
+ Homer, Michael and Miller, Mark S.",
+ title = {{The Left Hand of Equals}},
+ booktitle = "Int. Symp. of New Ideas, New Paradigms, and Reflections
+ on Programming and Software",
+ publisher = "ACM",
+ pages = "224-237",
+ year = "2016",
+ abstract =
+ "When is one object equal to another object? While object identity is
+ fundamental to object-oriented systems, object equality, although
+ tightly intertwined with identity, is harder to pin down. The
+ distinction between identity and equality is reflected in
+ object-oriented languages, almost all of which provide two variants of
+ 'equality', while some provide many more. Programmers can usually
+ override at least one of these forms of equality, and can always
+ define their own methods to distinguish their own objects.
+
+ This essay takes a reflexive journey through fifty years of identity
+ and equality in object-oriented languages, and ends somewhere we did
+ not expect: a 'left-handed' equality relying on trust and grace.",
+ paper = "Nobl16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@article{Avig18a,
+ author = "Avigad, Jeremy"
+ title = {{The Mechanization of Mathematics}},
+ journal = "Notices of the AMS",
+ volume = "65",
+ number = "6",
+ year = "2018",
+ pages = "681-690",
+ paper = "Avig18a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Leroy, Xavier}
+\begin{chunk}{axiom.bib}
+@techreport{Lero90,
+ author = "Leroy, Xavier",
+ title = {{The ZINC experiment: An Economical Implementation of the
+ ML Language.}},
+ type = "technical report",
+ institution = "Institut National de Recherche en Informatique et en
+ Automatique",
+ number = "117",
+ year = "1990",
+ abstract =
+ "This report details the design and implementation of the ZINC
+ system. This is an implementation of the ML language, inended to
+ serve as a test field for various extensions of the language, and
+ for new implementation techniques as well. This system is strongly
+ oriented toward separate compilation and the production of small,
+ standalone programs; type safety is ensured by a Modula-2-like
+ module system. ZINC uses simple, portable techniques, such as
+ bytecode interpretation; a sophisticated execution model helps
+ counterbalance the interpretation overhead. Other highlights
+ include an efficient implementation of records with inclusion
+ (subtyping).",
+ paper = "Lero90.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cohn, Avra Jean}
+\begin{chunk}{axiom.bib}
+@phdthesis{Cohn79,
+ author = "Cohn, Avra Jean",
+ title = {{Machine Assisted Proofs of Recursion Implementation}},
+ school = "University of Edinburgh",
+ year = "1979",
+ link = "\url{https://www.era.lib.ed.ac.uk/handle/1842/6643}",
+ abstract =
+ "Three studies in the machine assisted proof of recursion
+ implementation are described. The verification system used is
+ Edinburgh LCF (Logic for Computable Functions). Proofs are generated,
+ in LCF, in a goal-oriented fashion by the application of strategies
+ reflecting informal proof plans. LCF is introduced in Chapter 1. We
+ present three case studies in which proof strategies are developed and
+ (except in the third) tested in LCF. Chapter 2 contains an account of
+ the machine generated proofs of three program transformations (from
+ recursive to iterative function schemata). Two of the examples are
+ taken from Manna and Waldinger. In each case, the recursion is
+ implemented by the introduction of a new data type, e.g., a stack or
+ counter. Some progress is made towards the development of a general
+ strategy for producing the equivalence proofs of recursive and
+ iterative function schemata by machine. Chapter 3 is concerned with
+ the machine generated proof of the correctness of a compiling
+ algorithm. The formulation, borrowed from Russell, includes a simple
+ imperative language with a while and conditional construct, and a low
+ level language of labelled statements, including jumps. We have, in
+ LCF, formalised his denotational descriptions of the two languages and
+ performed a proof of the preservation of the semantics under
+ compilation. In Chapter 4, we express and informally prove the
+ correctness of a compiling algorithm for a language containing
+ declarations and calls of recursive procedures. We present a low level
+ language whose semantics model a standard activation stack
+ implementation. Certain theoretical difficulties (connected with
+ recursively defined relations) are discussed, and a proposed proof in
+ LCF is outlined. The emphasis in this work is less on proving original
+ theorems, or even automatically finding proofs of known theorems, than
+ on (i) exhibiting and analysing the underlying structure of proofs,
+ and of machine proof attempts, and (ii) investigating the nature of
+ the interaction (between a user and a computer system) required to
+ generate proofs mechanically; that is, the transition from informal
+ proof plans to behaviours which cause formal proofs to be performed.",
+ paper = "Cohn79.pdf"
+}
+
+\end{chunk}
+
+\index{Russell, Bruce D.}
+\begin{chunk}{axiom.bib}
+@article{Russ77,
+ author = "Russell, Bruce D.",
+ title = "Implementation Correctness Involving a Language with GOTO
+ statements",
+ journal = "SIAM Journal of Computing",
+ volume = "6",
+ number = "3",
+ year = "1977",
+ abstract =
+ "Two languages, one a simple structured programming language, the
+ other a simple goto language, are defined. A denotational semantics is
+ given for each language. An interpreter for the goto language is given
+ and is proved correct with respect to the denotational semantics. A
+ compiler from the structured to the goto language is defined and
+ proved to be a semantically invariant translation of programs. The
+ proofs are by computational induction.",
+ paper = "Russ77.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@book{Paul87,
+ author = "Paulson, Lawrence C.",
+ title = {{Logic and Computation}},
+ publisher = "Press Synticate of Cambridge University",
+ year = "1987",
+ isbn = 0-521-34632-0"
+}
+
+\end{chunk}
+
+\index{Shoenfield, Joseph R.}
+\begin{chunk}{axiom.bib}
+@book{Shoe67,
+ author = "Shoenfield, Joseph R.",
+ title = {{Mathematical Logic}},
+ publisher = "Association for Symbolic Logic",
+ year = "1967",
+ isbn = "1-56881-135-7"
+}
+
+\end{chunk}
+
+\index{Abramsky, S.}
+\index{Gabbay, Dov M.}
+\index{Maibaum, T.S.E}
+\begin{chunk}{axiom.bib}
+@book{Abra92,
+ author = "Abramsky, S. and Gabbay, Dov M. and Maibaum, T.S.E",
+ title = {{Handbook of Logic in Computer Science (Volume 2)}},
+ publisher = "Oxford Science Publications",
+ year = "1992",
+ isbn = "0-19-853761-1"
+}
+
+\end{chunk}
+
+\index{Basu, Saugata}
+\index{Pollack, Richard}
+\index{Roy, Marie-Francoise}
+\begin{chunk}{axiom.bib}
+@book{Basu10,
+ author = "Basu, Saugata and Pollack, Richard and Roy, Marie-Francoise",
+ title = {{Algorithms in Real Algebraic Geometry}},
+ publisher = "Springer-Verlag",
+ year = "2010",
+ isbn = "978-3-642-06964-2"
+}
+
+\end{chunk}
+
+\index{Harrison, John}
+\begin{chunk}{axiom.bib}
+@book{Harr09,
+ author = "Harrison, John",
+ title = {{Handbook of Practical Logic and Automated Reasoning}},
+ publisher = "Cambridge University Press",
+ year = "2009",
+ isbn = "978-0-521-89957-4"
+}
+
+\end{chunk}
+
+\index{Barendregt, Henk}
+\index{Dekkers, Wil}
+\index{Statman, Richard}
+\begin{chunk}{axiom.bib}
+@book{Bare13,
+ author = "Barendregt, Henk and Dekkers, Wil and Statman, Richard",
+ title = {{Lambda Calculus with Types}}
+ publisher = "Cambridge University Press",
+ year = "2013",
+ isbn = "978-0-521-76614-2"
+}
+
+\end{chunk}
+
+\index{Manna, Zohar}
+\index{Waldinger, Richard}
+\begin{chunk}{axiom.bib}
+@book{Mann85,
+ author = "Manna, Zohar and Waldinger, Richard",
+ title = {{The Logical Basis for Computer Programming (Vol 1)}},
+ publisher = "Addison-Wesley",
+ year = "1985",
+ isbn = "0-201-15260-2"
+}
+
+\end{chunk}
+
+\index{Pierce, Benjamin C.}
+\begin{chunk}{axiom.bib}
+@book{Pier02,
+ author = "Pierce, Benjamin C.",
+ title = {{Types and Programming Languages}},
+ publisher = "MIT Press",
+ year = "2002",
+ isbn = "978-0-262-16209-8"
+}
+
+\end{chunk}
+
+\index{Tarski, Alfred}
+\begin{chunk}{axiom.bib}
+@book{Tars46,
+ author = "Tarski, Alfred",
+ title = {{Introduction to Logic}},
+ publisher = "Dover",
+ year = "1946",
+ isbn = "13-978-0-486-28462-0"
+}
+
+\end{chunk}
+
+\index{Shankar, N.}
+\begin{chunk}{axiom.bib}
+@book{Shan94,
+ author = "Shankar, N.",
+ title = {{Metamathematics, Machines, and Godel's Proof}},
+ publisher = "Cambridge University Press",
+ year = "1994",
+ isbn = "0-521-58533-3"
+}
+
+\end{chunk}
+
+\index{Beeson, Michael J.}
+\begin{chunk}{axiom.bib}
+@book{Bees80,
+ author = "Beeson, Michael J.",
+ title = {{Foundations of Constructive Mathematics}}
+ publisher = "Springer-Verlag",
+ year = "1980",
+ isbn = "3-540-12173-0"
+}
+
+\end{chunk}
+
+\index{Shieber, Stuart M.}
+\index{Schabes, Yves}
+\index{Pereira, Fernando C.N.}
+\begin{chunk}{axiom.bib}
+@article{Shie95,
+ author = "Shieber, Stuart M. and Schabes, Yves and Pereira, Fernando C.N.",
+ title = {{Principles and Implementation of Deductive Parsing}},
+ journal = "J. Logic Programming",
+ volume = "24",
+ number = "1-2",
+ pages = "3-36",
+ year = "1995",
+ abstract =
+ "We present a system for generating parsers based directly on the
+ metaphor of parsing as deduction. Parsing algorithms can be
+ represented directly as deduction systems, and a single deduction
+ engine can interpret such deduction systems so as to implement the
+ corresponding parser. The method generalizes easily to parsers for
+ augmented phrase structure formalisms, such as definite-clause
+ grammars and other logic grammar formalisms, and has been used for
+ rapid prototyping of parsing algorithms for a variety of formalisms
+ including variants of tree-adjoining grammars, categorial grammars,
+ and lexicalized context-free grammars.",
+ paper = "Shie95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Greve, David A.}
+\index{Kaufmann, Matt}
+\index{Manolios, Panagiotis}
+\index{Moore, J Strother}
+\index{Ray, Sandip}
+\index{Ruiz-Reina, Jose Luis}
+\index{Sumners, Rob}
+\index{Vroon, Daron}
+\index{Wilding, Matthew}
+\begin{chunk}{axiom.bib}
+@article{Grev08,
+ author = "Greve, David A. and Kaufmann, Matt and Manolios, Panagiotis
+ and Moore, J Strother and Ray, Sandip and Ruiz-Reina, Jose Luis
+ and Sumners, Rob and Vroon, Daron and Wilding, Matthew",
+ title = {{Efficient Execution in an Automated Reasoning Environment}},
+ journal = "Journal of Functional Programming",
+ volume = "18",
+ number = "1",
+ pages = "15-46",
+ year = "2008",
+ abstract =
+ "We describe a method that permits the user of a mechanized
+ mathematical logic to write elegant logical definitions while allowing
+ sound and efficient execution. In particular, the features supporting
+ this method allow the user to install, in a logically sound way,
+ alternative executable counterparts for logically defined
+ functions. These alternatives are often much more efficient than the
+ logically equivalent terms they replace. These features have been
+ implemented in the ACL2 theorem prover, and we discuss several
+ applications of the features in ACL2.",
+ paper = "Grev08.pdf",
+ keywords = "printed"
+
+}
+
+\end{chunk}
+
+\index{Appel, Andrew W.}
+\index{MacQueen, David B.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Appe91,
+ author = "Appel, Andrew W. and MacQueen, David B.",
+ title = {{Standard ML of New Jersey}},
+ booktitle = "Int. Symp. on Prog. Lang. Implementations and Logic
+ Programming",
+ publisher = "Springer-Verlag",
+ pages = "1-13",
+ year = "1991",
+ abstract =
+ "The Standard ML of New Jersey compiler has been under development for
+ five years now. We have developed a robust and complete environment
+ for Standard ML that supports the implementation of large software
+ systems and generates efficient code. The compiler has also served as
+ a laboratory for developing novel implementation techniques for a
+ sophisticated type and module system, continuation based code
+ generation, efficient pattern matching, and concurrent programming
+ features.",
+ paper = "Appe91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tofte, Mads}
+\begin{chunk}{axiom.bib}
+@misc{Toft09,
+ author = "Tofte, Mads",
+ title = {{Tips for Computer Scientists on Standard ML (Revised)}},
+ link = "\url{http://www.itu.dk/people/tofte/publ/tips.pdf}",
+ year = "2009",
+ paper = "Toft09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Graham, Paul}
+\begin{chunk}{axiom.bib}
+@book{Grah93,
+ author = "Graham, Paul",
+ title = {{On Lisp}},
+ publisher = "Prentice Hall",
+ year = "1993",
+ link = "\url{http://ep.yimg.com/ty/cdn/paulgraham/onlisp.pdf}",
+ comment = "code in papers/onlisp.lisp",
+ isbn = "0130305529",
+ abstract =
+ "On Lisp is a comprehensive study of advanced Lisp techniques, with
+ bottom-up programming as the unifying theme. It gives the first
+ complete description of macros and macro applications. The book also
+ covers important subjects related to bottom-up programming, including
+ functional programming, rapid prototyping, interactive development,
+ and embedded languages. The final chapter takes a deeper look at
+ object-oriented programming than previous Lisp books, showing the
+ step-by-step construction of a working model of the Common Lisp Object
+ System (CLOS).",
+ paper = "Grah93.pdf"
+}
+
+\end{chunk}
+
+\index{Loh, Andres}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@misc{Lohx18,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{Simply Easy! An Implementation of a Dependently Typed
+ Lambda Calculus}},
+ link = "\url{http://strictlypositive.org/Easy.pdf}",
+ year = "2018",
+ abstract =
+ "We present an implementation in Haskell of a dependently-typed
+ lambda calculus that can be used as the core of a programming
+ language. We show that a dependently-typed lambda calculus is no
+ more difficult to implement than other typed lambda calculi. In fact,
+ our implementation is almost as easy as an implementation of the
+ simply typed lambda calculus, which we emphasize by discussing
+ the modifications necessary to go from one to the other. We explain
+ how to add data types and write simple programs in the core
+ language, and discuss the steps necessary to build a full-fledged
+ programming language on top of our simple core.",
+ paper = "Lohx18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Felty, Amy}
+\index{Miller, Dale}
+\begin{chunk}{axiom.bib}
+@techreport{Felt90,
+ author = "Felty, Amy and Miller, Dale",
+ title = {{Encoding a Dependent-Type Lambda Calculus in a Logic
+ Programming Language}},
+ type = "technical report",
+ number = "MS-CIS-90-18",
+ institution = "University of Pennsylvania",
+ year = "1990",
+ abstract =
+ "Various forms of typed $\lambda$-calculi have been proposed as
+ specification languages for representing wide varieties of object
+ logics. The logical framework, LF, is an example of such a
+ dependent-type $\lambda$-calculus. A small subset of intuitionistic
+ logic with quantification over simply typed $\lambda$-calculus has
+ also been proposed as a framework for specifying general logics. The
+ logic of {\sl hereditary Harrop formulas} with quantification at all
+ non-predicate types, denoted here as $hh^\omega$ , is such a
+ meta-logic that has been implemented in both the Isabelle theorem
+ prover and the $\lambda$Prolog logic programming language. Both
+ frameworks provide for specifications of logics in which details
+ involved with free and bound variable occurrences, substitutions,
+ eigenvariables, and the scope of assumptions within object logics are
+ handled correctly and elegantly at the 'meta' level. In this paper, we
+ show how LF ca n be encoded into $hh^\omega$ in a direct and natural
+ way by mapping the typing judgments in LF in to propositions in the
+ logic of $hh^\omega$. This translation establishes a very strong
+ connection between these two languages: the order of quantification in
+ an LF signature is exactly the order of a set of $hh^\omega$ clauses,
+ and the proofs in one system correspond directly to proofs in the
+ other system. Relating these two languages makes it possible to
+ provide implementations of proof checkers and theorem provers for
+ logics specified in LF by using standard logic programming techniques
+ which can be used to implement $hh^\omega$.",
+ paper = "Felt90.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Altenkirch, Thorsten}
+\index{McBride, Conor}
+\index{McKinna, James}
+\begin{chunk}{axiom.bib}
+@misc{Alte05,
+ author = "Altenkirch, Thorsten and McBride, Conor",
+ title = {{Why Dependent Types Matter}},
+ link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/ydtm.pdf}",
+ year = "2005",
+ abstract =
+ "We exhibit the rationale behind the design of Epigram, a dependently
+ typed programming language and interactive program development system,
+ using refinements of a well known program -- merge sort -- as a running
+ example. We discuss its relationship with other proposals to introduce
+ aspects of dependent types into functional programming languages and
+ sketch some topics for further work in this area.",
+ paper = "Alte05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Xi, Hongwei}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@inproceedings{Xixx99,
+ author = "Xi, Hongwei and Pfenning, Frank",
+ title = {{Dependent Types in Practical Programming}},
+ booktitle = "SIGPLAN-SIGACT Symp. on Principles of Programming
+ Languages",
+ year = "1990",
+ link = "\url{https://www.cs.cmu.edu/~fp/papers/popl99.pdf}",
+ publisher = "ACM",
+ abstract =
+ "We present an approach to enriching the type system of ML with a
+ restricted form of dependent types, where type index objects are drawn
+ from a constraint domain C , leading to the DML( C ) language schema.
+ This allows specification and inference of significantly more precise
+ type information, facilitating program error detection and compiler
+ optimization. A major complication resulting from introducing
+ dependent types is that pure type inference for the enriched system is
+ no longer possible, but we show that type-checking a suciently
+ annotated program in DML( C ) can be reduced to constraint
+ satisfaction in the constraint domain C . We exhibit the
+ un-obtrusiveness of our approach through practical examples and prove
+ that DML( C ) is conservative over ML. The main contribution of the
+ paper lies in our language design, including the formulation of
+ type-checking rules which makes the approach practical. To our
+ knowledge, no previous type system for a general purpose programming
+ language such as ML has combined dependent types with features including
+ datatype declarations, higher-order functions, general recursions,
+ let-polymorphism, mutable references, and exceptions. In addition,
+ we have finished a prototype implementation of DML( C ) for an integer
+ constraint domain C , where constraints are linear inequalities (Xi
+ and Pfenning 1998).",
+ paper = "Xixx99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Altenkirch, Thorsten}
+\begin{chunk}{axiom.bib}
+@misc{Alte04,
+ author = "Altenkirch, Thorsten",
+ title = {{$\lambda$-calculus and types}},
+ comment = "Lecture Notes; APPSEM Spring School 2004",
+ link = "\url{http://www.cs.nott.ac.uk/~psztxa/publ/mgs04.pdf}",
+ year = "2004"
+ paper = "Alte04.pdf",
+ keywords = "printed",
+}
+
+\end{chunk}
+
+\index{Loh, Andres}
+\index{McBride, Conor}
+\index{Swierstra, Wouter}
+\begin{chunk}{axiom.bib}
+@article{Lohx01,
+ author = "Loh, Andres and McBride, Conor and Swierstra, Wouter",
+ title = {{A Tutorial Implementation of a Dependently Typed Lambda
+ Calculus}},
+ journal = "Fundamenta Informaticae",
+ volume = "XXI",
+ year = "2001",
+ pages = "1001-1031",
+ abstract =
+ "We present the type rules for a dependently typed core calculus
+ together with a straight-forward implementation in Haskell. We
+ explicitly highlight the changes necessary to shift from a
+ simply-typed lambda calculus to the dependently typed lambda
+ calculus. We also describe how to extend our core language with data
+ types and write several small example programs. The article is
+ accompanied by an executable interpreter and example code that allows
+ immediate experimentation with the system we describe.",
+ paper = "Lohx01.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Roorda, Jan-Willem}
+\begin{chunk}{axiom.bib}
+@phdthesis{Roor00,
+ author = "Roorda, Jan-Willem",
+ title = {{Pure Type Systems for Functional Programming}},
+ year = "2000",
+ institution = "University of Utrecht",
+ abstract =
+ "We present a functional programming language based on Pure Type
+ Systems (PTSs). We show how we can define such a language by
+ extending the PTS framework with algebraic data types, case
+ expressions and definitions. To be able to experiment with our
+ language we present an implementation of a type checker and an
+ interpreter for our language. PTSs are well suited as a basis for a
+ functional programming language because they are at the top of a
+ hierarchy of increasingly stronger type systems. The concepts of
+ `existential types', `rank-n polymorphism' and `dependent types' arise
+ naturally in functional programming languages based on the systems in
+ this hierarchy. There is no need for ad-hoc extensions to incorporate
+ these features. The type system of our language is more powerful than
+ the Hindley-Milner system. We illustrate this fact by giving a number
+ of meaningful programs that cannot be typed in Haskell but are typable
+ in our language. A `real world' example of such a program is the
+ mapping of a specialisation of a Generic Haskell function to a Haskell
+ function. Unlike the description of the Henk language by Simon Peyton
+ Jones and Erik Meijer we give a complete formal definition of the type
+ system and the operational semantics of our language. Another
+ difference between Henk and our language is that our language is
+ defined for a large class of Pure Type Systems instead of only for the
+ systems of the $\lambda$-cube.",
+ paper = "Roor00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Jones, Simon Peyton}
+\index{Meijer, Erik}
+\begin{chunk}{axiom.bib}
+@misc{Jone97,
+ author = "Jones, Simon Peyton and Meijer, Erik",
+ title = {{Henk: A Typed Intermediate Language}},
+ year = "1997",
+ link =
+ "\url{https://www.microsoft.com/en-us/research/wp-content/uploads/1997/01/henk.pdf}",
+ abstract =
+ "There is a growing interest in the use of richly-typed
+ intermediate languages in sophisticated compilers for
+ higher-order, typed source languages. These intermediate languages
+ are typically stratified, involving terms, types, and kinds. As
+ the sophistication of the type system increases, there three
+ levels begin to look more and more similar, so an attraictive
+ approach is to use a single syntax, and a single data type in the
+ compiler, to represent all three.
+
+ The theory of so-called {\sl pure type systems} amkes precisely
+ such an identification. This paper describes Henk, a new typed
+ intermediate langugage based closely on a particuarl pure type
+ system, {\sl the lambda cube}. On the way we give a tutorial
+ introduction to the lambda cube.",
+ paper = "Jone97.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Morrisett, Greg}
+\begin{chunk}{axiom.bib}
+@phdthesis{Morr95,
+ author = "Morrisett, Greg",
+ title = {{Compiling with Types}},
+ school = "Carnegie Mellon University",
+ year = "1995",
+ comment = "CMU-CS-95-226",
+ link = "\url{https://www.cs.cmu.edu/~rwh/theses/morrisett.pdf}",
+ abstract =
+ "Compilers for monomorphic languages, suc h as C and Pascal, take
+ advantage of types to determine data representations, alignment,
+ calling conventions, and register selection. However, these languages
+ lack important features including polymorphism, abstract datatypes,
+ and garbage collection. In contrast, modern programming languages
+ such as Standard ML (SML), provide all of these features, but existing
+ implementations fail to take full advantage of types. The result is
+ that performance of SML code is quite bad when compared to C.
+
+ In this thesis, I provide a general framework, called
+ {\sl type-directed compilation}, that allows compiler writeres to
+ take advantage of types at all stages in compilation. In the
+ framework, types are used not only to determine efficient
+ representations and calling conventions, but also to prove the
+ correctness of the compiler. A key property of type-directed
+ compilation is that all but the lowest levels of the compiler use
+ {\sl typed intermediate languages}. An advantage of this approach
+ is that it provides a means for automatically checking the
+ integrity of the resulting code.
+
+ An mportant contribution of this work is the development of a new,
+ statically-typed intermediate language, called $\lambda^{ML}_i$.
+ This language supports {\sl dynamic type dispatch}, providing a
+ means to select operations based on types at run time. I show how
+ to use dynamic type dispatch to support polymorphism, ad-hoc
+ operators, and garbage collection without having to box or tag
+ values. This allows compilers for SML to take advantage of
+ techniques used in C compilers, without sacrificing language
+ features or separate compilation.
+
+ TO demonstrate the applicability of my approach, I, along with
+ others, have constructed a new compiler for SML called TIL that
+ eliminates most restrictions on the representations of values. The
+ code produced by TIL is roughly twice as fast as code produced by
+ the SML/NJ compiler. This is due to at least partially to the use
+ of natural representations, but primarily to the conventional
+ optimizer which manipulates typed, $\lambda^{ML}_i$ code. TIL
+ demonstrates that combining type-directed compilation with dynamic
+ type dispatch yields a superior architecture for compilers of
+ modern languages.",
+ paper = "Morr95.pdf"
+}
+
+\end{chunk}
+
+\index{Leroy, Xavier}
+\begin{chunk}{axiom.bib}
+@article{Lero98,
+ author = "Leroy, Xavier",
+ title = {{An Overview of Types in Compilation}},
+ journal = "LNCS",
+ volume = "1473",
+ year = "1998",
+ publisher = "Springer-Verlang",
+ pages = "1-8",
+ paper = "Lero98.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lu, Eric}
+\begin{chunk}{axiom.bib}
+@misc{Luxx16,
+ author = "Lu, Eric",
+ title = {{Barendregt's Cube and Programming with Dependent Types}},
+ link = "\url{https://www.seas.harvard.edu/courses/cs252/2016fa/15.pdf}",
+ paper = "Luxx16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Guallart, Nino}
+\begin{chunk}{axiom.bib}
+@misc{Gual14,
+ author = "Guallart, Nino",
+ title = {{An Overview of Type Theories}},
+ link = "\url{https://arxiv.org/pdf/1411.1029.pdf}",
+ year = "2014",
+ abstract =
+ "Pure type systems arise as a generalisation of simply typed lambda
+ calculus. The contemporary development of mathematics has renewed
+ the interest in type theories, as they are not just the object of
+ mere historical research, but have an active role in the development
+ of computational science and core mathematics. It is worth exploring
+ some of them in depth, particularly predicative Martin-Löf’s
+ intuitionistic type theory and impredicative Coquand’s calculus of
+ constructions. The logical and philosophical differences and
+ similarities between them will be studied, showing the relationship
+ between these type theories and other fields of logic.",
+ paper = "Gual14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Benini, Marco}
+\begin{chunk}{axiom.bib}
+@misc{Beni95,
+ author = "Benini, Marco",
+ title = {{Barendregt's $\lambda$-Cube in Isabelle}},
+ link = "\url{}",
+ year = "1995",
+ abstract =
+ "We present an implementation of Barendregt’s $\lambda$-Cube in the
+ Isabelle proof system. Isabelle provides many facilities for
+ developing a useful specification and proving environment from the
+ basic formulation of formal systems. We used those facilities to
+ provide an environment where the user can describe problems and derive
+ proofs interactively.",
+ paper = "Beni95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Hellman, Martin E.}
+\begin{chunk}{axiom.bib}
+@article{Hell17,
+ author = "Hellman, Martin E.",
+ title = {{Cybersecurity, Nuclear Security, Alan Turing, and
+ Illogical Logic}},
+ journal = "J. ACM",
+ volume = "60",
+ number = "12",
+ pages = "52-59",
+ year = "2017",
+ link = "\url{https://cacm.acm.org/magazines/2017/12/223042-cybersecurity-nuclear-security-alan-turing-and-illogical-logic/fulltext}"
+}
+
+\end{chunk}
+
+\index{Winston, Patrick Henry}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wins12,
+ author = "Winston, Patrick Henry",
+ title = {{The Nex 50 Years: A Personal View}},
+ booktitle = "Biologially Inspired Cognitive Architectures 1",
+ year = "2012",
+ pages = "92-99",
+ publisher = "Elsevier B.V"
+ abstract =
+ "I review history, starting with Turing’s seminal paper, reaching
+ back ultimately to when our species started to outperform other
+ primates, searching for the questions that will help us develop a
+ computational account of human intelligence. I answer that the
+ right questions are: What’s different between us and the other
+ primates and what’s the same. I answer the {\sl what’s different}
+ question by saying that we became symbolic in a way that enabled
+ story understanding, directed perception, and easy communication,
+ and other species did not. I argue against Turing’s
+ reasoning-centered suggestions, offering that reasoning is just a
+ special case of story understanding. I answer the {\sl what’s the
+ same} question by noting that our brains are largely engineered in
+ the same exotic way, with information flowing in all directions at
+ once. By way of example, I illustrate how these answers can
+ influence a research program, describing the Genesis system, a
+ system that works with short summaries of stories, provided in
+ English, together with low-level {\sl common-sense rules} and
+ higher-level {\sl concept patterns}, likewise expressed in
+ English. Genesis answers questions, notes abstract concepts such
+ as {\sl revenge}, tells stories in a listener-aware way, and fills
+ in story gaps using precedents. I conclude by suggesting,
+ optimistically, that a genuine computational theory of human
+ intelligence will emerge in the next 50 years if we stick to the
+ right, biologically inspired questions, and work toward
+ biologically informed models.",
+ paper = "Wins12.pdf"
+}
+
+\end{chunk}
+
+\index{Brooks, Rodney A.}
+\begin{chunk}{axiom.bib}
+@article{Broo87,
+ author = "Brooks, Rodney A.",
+ title = {{Intelligence Without Representation}},
+ journal = "Artificial Intelligence",
+ volume = "47",
+ year = "1991",
+ pages = "139-159",
+ abstract =
+ "Artificial intelligence research has foundered on the issue of
+ representation. When intelligence is approached in an incremental
+ manner, with strict reliance on interfacing to the real world through
+ perception and action, reliance on representation disappears. In this
+ paper we outline our approach to incrementally building complete
+ intelligent Creatures. The fundamental decomposition of the
+ intelligent system is not into independent information processing
+ units which must interface with each other via representations.
+ Instead, the intelligent system is decomposed into independent and
+ parallel activity producers which all interface directly to the world
+ through perception and action, rather than interface to each other
+ particularly much. The notions of central and peripheral systems
+ evaporateeverything is both central and peripheral. Based on these
+ principles we have built a very successful series of mobile robots
+ which operate without supervision as Creatures in standard office
+ environments.",
+ paper = "Broo87.pdf"
+}
+
+\end{chunk}
+
+\index{Roy, Peter Van}
+\index{Haridi, Seif}
+\begin{chunk}{axiom.bib}
+@book{Royx03,
+ author = "Roy, Peter Van and Haridi, Seif",
+ title = {{Concepts, Techniques, and Models of Computer Programming}},
+ year = "2003",
+ link =
+ "\url{http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.102.7366&rep=rep1&type=pdf}",
+ publisher = "MIT",
+ isbn = "978-0262220699",
+ paper = "Royx03.pdf"
+}
+
+\end{chunk}
+
+\index{Boyer, Robert S.}
+\index{Moore, J Strother}
+\begin{chunk}{axiom.bib}
+@article{Boye84,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{Proof-Checking, Theorem-Proving, and Program Verification}},
+ journal = "Contemporary Mathematics",
+ volume = "29",
+ year = "1984",
+ abstract =
+ "This article consists or three parts: a tutorial introduction to a
+ computer program that proves theorems by induction; a brief
+ description or recent applications or that theorem-prover; and a
+ discussion or several nontechnical aspects of the problem or building
+ automatic theorem-provers. The theorem-prover described has proved
+ theorems such as the uniqueness of prime factorizations, Fermat's
+ theorem, and the recursive unsolvability or the halting problem.
+
+ The article is addressed to those who know nothing about automatic
+ theorem-proving but would like a glimpse or one such system. This
+ article definitely does not provide a balanced view or all automatic
+ theorem-proving, the literature of which is already rather large and
+ technica1. Nor do we describe the details or our theorem-proving
+ system, but they can be round in the books, articles, and technical
+ reports that we reference.
+
+ In our opinion, progress in automatic theorem-proving is largely a
+ function or the mathematical ability or those attempting to build such
+ systems. We encourage good mathematicians to work in the field.",
+ paper = "Boye84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lamport, Leslie}
+\begin{chunk}{axiom.bib}
+@article{Lamp78
+ author = "Lamport, Leslie",
+ title = {{The Specification and Proof of Correctness of Interactive
+ Programs}},
+ journal = "LNCS",
+ volume = "75",
+ year = "1978",
+ abstract =
+ "method production assertional rules specified, is proved is modified
+ correctly to accept and interactive is described, method that a
+ program A program of specifying and to permit typed its implementation
+ correct. by and the Floyd-Hoare implements format programs one to
+ prove its specification. input is formally with a TECO program.",
+ paper = "Lamp78.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Anderson, E.R.}
+\index{Belz, F.C.}
+\index{Blum, E.K.}
+\begin{chunk}{axiom.bib}
+@article{Ande78,
+ author = "Anderson, E.R. and Belz, F.C. and Blum, E.K.",
+ title = {{Extending an Implementation Language to a Specification Language}},
+ journal = "LNCS",
+ volume = "75",
+ year = "1978",
+ paper = "Ande78.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tobin-Hochstadt, Sam}
+\index{Felleisen, Matthias}
+\begin{chunk}{axiom.bib}
+@misc{Tobi11,
+ author = "Tobin-Hochstadt, Sam and Felleisen, Matthias",
+ title = {{The Design and Implementation of Typed Scheme: From
+ Scripts to Programs}},
+ link = "\url{https://arxiv.org/pdf/1106.2575.pdf}",
+ year = "2011",
+ abstract =
+ "When scripts in untyped languages grow into large programs,
+ maintaining them becomes difficult. A lack of explicit type
+ annotations in typical scripting languages forces programmers to must
+ (re)discover critical pieces of design information every time they
+ wish to change a program. This analysis step both slows down the
+ maintenance process and may even introduce mistakes due to the
+ violation of undiscovered invariants.
+
+ This paper presents Typed Scheme, an explicitly typed extension of PLT
+ Scheme, an untyped scripting language. Its type system is based on
+ the novel notion of occurrence typing, which we formalize and
+ mechanically prove sound. The implementation of Typed Scheme
+ additionally borrows elements from a range of approaches, including
+ recursive types, true unions and subtyping, plus polymorphism combined
+ with a modicum of local inference.
+
+ The formulation of occurrence typing naturally leads to a simple and
+ expressive version of predicates to describe refinement types. A
+ Typed Scheme program can use these refinement types to keep track of
+ arbitrary classes of values via the type system. Further, we show how
+ the Typed Scheme type system, in conjunction with simple recursive
+ types, is able to encode refinements of existing datatypes, thus
+ expressing both proposed variations of refinement types.",
+ paper = "Tobi11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{McCarthy, John}
+\index{Painter, James}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mcca67,
+ author = "McCarthy, John and Painter, James",
+ title = {{Correctness of a Compiler for Arithmetic Expressions}},
+ booktitle = "Proc. Symp. in Applied Mathematics",
+ publisher = "American Mathematical Society",
+ year = "1967",
+ paper = "Mcca67.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Vuillemin, Jean}
+\begin{chunk}{axiom.bib}
+@phdthesis{Vuil73,
+ author = "Vuillemin, Jean",
+ title = {{Proof Techniques for Recursive Programs}},
+ school = "Stanford University",
+ year = "1973",
+ abstract =
+ "The concept of least fixed-point of a continuous function can be
+ considered as the unifying thread of this dissertation.
+
+ The connections between fixed-points and recursive programs are
+ detailed in chapter 2, providing some insights on practical
+ implementations of recursion. There are two usual characterizations
+ of the least fixed-point of a continuous function. To the first
+ characterization, due to Knaster and Tarski, corresponds a class
+ of proof techniques for programs, as described in Chapter 3. The
+ other characterization of least fixed points, better known as
+ Kleene's first recursion theorem, is discussed in Chapter 4. It
+ has the advantage of being effective and it leads to a wider class
+ of proof techniques.",
+ paper = "Vuil73.pdf"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@misc{Byte79,
+ author = "Byte Publications",
+ title = {{LISP}},
+ link = "\url{https://ia802603.us.archive.org/30/items/byte-magazine-1979-08/1979_08_BYTE_04-08_LISP.pdf}",
+ publisher = "McGraw-Hill",
+ year = "1979",
+ paper = "Byte79.pdf"
+}
+
+\end{chunk}
+
+\index{Manna, Zohar}
+\index{Vuillemin, Jean}
+\begin{chunk}{axiom.bib}
+@article{Mann72,
+ author = "Manna, Zohar and Vuillemin, Jean",
+ title = {{Fixpoint Approach to the Theory of Computation}},
+ journal = "Communications of the ACM",
+ volume = "15",
+ number = "7",
+ year = "1972",
+ pages = "828-836",
+ abstract =
+ "Following the fixpoint theory of Scott, the semantics of computer
+ programs are defined in terms of the least fixpoints of recursive
+ programs. This allows not only the justification of all existing
+ verification techniques, but also their extension to the handling, in
+ a uniform manner of various properties of computer programs, including
+ correctness, termination, and equivalence.",
+ paper = "Mann72.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@article{Paul83,
+ author = "Paulson, Lawrence C.",
+ title = {{A Higher-Order Implementation of Rewriting}},
+ journal = "Science of Computer Programming",
+ volume = "3",
+ pages = "119-149",
+ year = "1983",
+ abstract =
+ "Many automatic theorem-provers rely on rewriting. Using theorems as
+ rewrite rules helps to simplify the subgoals that arise during a
+ proof. LCF is an interactive theorem-prover intended For reasoning
+ about computation. Its implementation of rewriting is presented in
+ detail. LCF provides a Family of rewriting Functions, and operators to
+ combine them. A succession of Functions is described, From pattern
+ matching primitives to the rewriting tool that performs most
+ inferences in LCF proofs. The design is highly modular. Each function
+ performs a basic, specific task, such as recognizing a certain form of
+ tautology. Each operator implements one method of building a rewriting
+ Function From simpler ones. These pieces can be put together in
+ numerous ways, yielding a variety of rewriting strategies. The
+ approach involves programming with higher-order Functions. Rewriting
+ Functions are data values, produced by computation on other rewriting
+ Functions. The code is in daily use at Cambridge, demonstrating the
+ practical use of Functional programming.",
+ paper = "Paul83.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Avigad, Jeremy}
+\index{Holzl, Johannes}
+\index{Serafin, Luke}
+@misc{Avig17c,
+ author = "Avigad, Jeremy and Holzl, Johannes and Serafin, Luke",
+ title = {{A Formally Verified Proof of the Central Limit Theorem}},
+ link = "\url{}",
+ year = "2017",
+ abstract =
+ "We describe a proof of the Central Limit Theorem that has been
+ formally verified in the Isabelle proof assistant. Our formalization
+ builds upon and extends Isabelle’s libraries for analysis and
+ measure-theoretic probability. The proof of the theorem uses
+ characteristic functions , which are a kind of Fourier transform, to
+ demonstrate that, under suitable hypotheses, sums of random
+ variables converge weakly to the st andard normal distribution. We
+ also discuss the libraries and infrastructure that supported the
+ formalization, and reflect on some of the lessons we have learned
+ from the effort.",
+ paper = "Avig17c.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Feferman, Solomon}
+\begin{chunk}{axiom.bib}
+@misc{Fefe95,
+ author = "Feferman, Solomon",
+ title = {{Definedness}},
+ year = "1995",
+ link = "\url{https://math.stanford.edu/~feferman/papers/definedness.pdf}",
+ abstract =
+ "Questions of definedness are ubiquitous in mathematics. Informally,
+ these involve reasoning about expressions which may or may not have a
+ value. This paper surveys work on logics in which such reasoning can
+ be carried out directly, especially in computational contexts. It
+ begins with a general logic of 'partial terms', continues with partial
+ combinatory and lambda calculi, and concludes with an expressively
+ rich theory of partial functions and polymorphic types, where
+ termination of functional programs can be established in a natural way.",
+ paper = "Fefe95.pdf",
+ keywords = "printed"
+}
+
+\end{chumk}
diff --git a/src/axiom-website/patches.html b/src/axiom-website/patches.html
index e4c4aac..6103bf3 100644
--- a/src/axiom-website/patches.html
+++ b/src/axiom-website/patches.html
@@ -5936,6 +5936,8 @@ books/bookvolbib add references

books/bookvol0 add Griesmer Obituary

20180606.03.tpd.patch
books/bookvol0 fix minor typo to citation

+20180628.01.tpd.patch
+books/bookvolbib add references

--
1.9.1