From 3729acfd66bcb686622965120b6ebe4d93a85025 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Thu, 31 Jan 2019 12:48:46 0500
Subject: [PATCH] books/bookvolbib add references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Sane
\index{Castagna, Giuseppe}
\index{Lanvin, Victor}
\index{Petrucciani, Tommaso}
\index{Siek, Jeremy G.}
\begin{chunk}{axiom.bib}
@misc{Cast19,
author = "Castagna, Giuseppe and Lanvin, Victor and Petrucciani, Tommaso
and Siek, Jeremy G.",
title = {{Gradual Typing: A New Perspective}},
link = "\url{https://www.irif.fr/~gc//papers/popl19.pdf}",
year = "2019",
abstract =
"We define a new, more semantic interpretation of gradual types and use
it to “gradualize” two forms of polymorphism: subtyping polymorphism
and implicit parametric polymorphism. In particular, we use the new
interpretation to define three gradual type systems —HindleyMilner,
with subtyping, and with union and intersection types— in terms of two
preorders, subtyping and materialization. These systems are defined
both declaratively and algorithmically. The declarative presentation
consists in adding two subsumptionlike rules, one for each preorder,
to the standard rules of each type system. This yields more
intelligible and streamlined definitions and shows a direct
correlation between cast insertion and materialization. For the
algorithmic presentation, we show how it can be defined by reusing
existing techniques such as unification and tallying.",
paper = "Cast19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Siek, Jeremy, G.}
\index{Taha, Walid}
\begin{chunk}{axiom.bib}
@inproceedings{Siek06,
author = "Siek, Jeremy, G. and Taha, Walid",
title = {{Gradual Typing for Functional Languages}},
booktitle = "Proc. of Scheme and Functional Programming Workshop",
year = "2006",
publisher = "ACM",
pages = "8192",
abstract =
"Static and dynamic type systems have wellknown strengths and
weaknesses, and each is better suited for different programming
tasks. There have been many efforts to integrate static and dynamic
typing and thereby combine the benefits of both typing disciplines in
the same language. The flexibility of static typing can be improved
by adding a type Dynamic and a typecase form. The safety and
performance of dynamic typing can be improved by adding optional type
annotations or by performing type inference (as in soft
typing). However, there has been little formal work on type systems
that allow a programmercontrolled migration between dynamic and
static typing. Thatte proposed QuasiStatic Typing, but it does not
statically catch all type errors in completely annotated
programs. Anderson and Drossopoulou defined a nominal type system
for an objectoriented language with optional type annotations.
However, developing a sound, gradual type system for functional
languages with structural types is an open problem.
In this paper we present a solution based on the intuition that the
structure of a type may be partially known/unknown at compile time
and the job of the type system is to catch incompatibilities between
the known parts of types. We define the static and dynamic semantics
of a $lambda$calculus with optional type annotations and we prove that
its type system is sound with respect to the simplytyped λ
$lambda$calculus for fullyannotated terms. We prove that this
calculus is type safe and that the cost of dynamism is
'payasyougo'.",
paper = "Siek06.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bauer, Andrej}
\begin{chunk}{axiom.bib}
@misc{Baue19,
author = "Bauer, Andrej",
title = {{How to Implement Type Theory in an Hour}},
year = "2019",
link = "\url{https://vimeo.com/286652934}",
comment = "\url{https://github.com/andrejbauer/spartantypetheory}"
}
\end{chunk}
\index{Coquand, Thierry}
\index{Kinoshita, Yoshiki}
\index{Nordstrom, Bengt}
\index{Takeyama, Makoto}
\begin{chunk}{axiom.bib}
@misc{Coqu19,
author = "Coquand, Thierry and Kinoshita, Yoshiki and Nordstrom, Bengt
and Takeyama, Makoto",
title = {{A Simple TypeTheoretic Language: MiniTT}},
year = "2019",
link = "\url{http://www.cse.chalmers.se/~bengt/papers/GKminiTT.pdf}",
abstract =
"This paper presents a formal description of a small functional
language with dependent types. The language contains data types,
mutual re cursive/inductive definitions and a universe of small
types. The syntax, semantics and type system is specified in such a
way that the imple mentation of a parser, interpreter and type
checker is straightforward. The main difficulty is to design the
conversion algorithm in such a way that it works for open expressions.
The paper ends with a complete implementation in Haskell (around 400
lines of code).",
paper = "Coqu19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@misc{Pfen06,
author = "Pfenning, Frank",
title = {{Constraint Logic Programming}},
year = "2006",
link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/27clp.pdf}",
paper = "Pfen06.pdf",
keywords = "printed"
}
\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@misc{Pfen07,
author = "Pfenning, Frank",
title = {{Logic Programming}},
year = "2007",
link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/lpall.pdf}",
paper = "Pfen07.pdf"
}
\end{chunk}
\index{Tennant, Neil}
\begin{chunk}{axiom.bib}
@book{Tenn90,
author = "Tennant, Neil",
title = {{Natural Logic}},
publisher = "Edinburgh University Press",
isbn = "0852245793",
year = "1990",
paper = "Tenn90.pdf"
}
\end{chunk}
\index{Vellerman, Daniel J.}
\begin{chunk}{axiom.bib}
@book{Vell06,
author = "Vellerman, Daniel J.",
title = {{How To Prove It}},
publisher = "Cambridge University Press",
year = "2006",
isbn = "9780511161162",
paper = "Vell06.pdf"
}
\end{chunk}
\index{Chiswell, Ian}
\index{Hodges, Wilfrid}
\begin{chunk}{axiom.bib}
@book{Chis07,
author = "Chiswell, Ian and Hodges, Wilfrid",
title = {{Mathematical Logic}},
publisher = "Oxford University Press",
year = "2007",
isbn = "9780198571001",
paper = "Chis07.pdf"
}
\end{chunk}
\index{Bostock, David}
\begin{chunk}{axiom.bib}
@book{Bost97,
author = "Bostock, David",
title = {{Intermediate Logic}},
publisher = "Clarendon Press",
year = "1997",
isbn = "0198751419",
paper = "Bost97.pdf"
}
\end{chunk}
\index{Enderton, Herbert B.}
\begin{chunk}{axiom.bib}
@book{Ende01,
author = "Enderton, Herbert B.",
title = {{A Mathematical Introduction to Logic}},
publisher = "Harcourt Academic Press",
year = "2001",
isbn = "0122384520",
paper = "Ende01.pdf"
}
\end{chunk}
\index{van Dalen, Dirk}
\begin{chunk}{axiom.bib}
@book{Dale08,
author = "van Dalen, Dirk",
title = {{Logic and Structure}},
publisher = "Springer",
year = "2008",
isbn = "9783540208792",
paper = "Dale08.pdf"
}
\end{chunk}
\index{Smith, Peter}
\begin{chunk}{axiom.bib}
@book{Smit13,
author = "Smith, Peter",
title = {{An Introduction to Godel's Theorems}},
publisher = "Cambridge University Press",
year = "2013",
isbn = "9781107022843",
paper = "Smit13.pdf"
}
\end{chunk}
\index{Leary, Christopher C.}
\index{Kristiansen, Lars}
\begin{chunk}{axiom.bib}
@book{Lear15,
author = "Leary, Christopher C. and Kristiansen, Lars",
title = {{A Friendly Introduction to Mathematical Logic}},
publisher = "Milne Library, Geneseo",
year = "2015",
isbn = "9781942341321",
paper = "Lear15.pdf"
}
\end{chunk}
\index{Cutland, Nigel}
\begin{chunk}{axiom.bib}
@book{Cutl80,
author = "Cutland, Nigel",
title = {{Computability: An Introduction to Recusive Function Theory}},
publisher = "Cambridge University Press",
year = "1980",
isbn = "0521223849",
paper = "Cutl80.pdf"
}
\end{chunk}
\index{Hrbacek, Karel}
\index{Jech, Thomas}
\begin{chunk}{axiom.bib}
@book{Hrba99,
author = "Hrbacek, Karel and Jech, Thomas",
title = {{Introduction to Set Theory}},
publisher = "Marcel Dekker, Inc",
year = "1999",
isbn = "0824779150",
paper = "Hrba99.pdf"
}
\end{chunk}
\index{Roitman, Judith}
\begin{chunk}{axiom.bib}
@misc{Roit13,
author = "Roitman, Judith",
title = {{Introduction to Modern Set Theory}},
link = "\url{http://www.math.ku.edu/~roitman/SetTheory.pdf}",
year = "2013",
paper = "Roit13.pdf"
}
\end{chunk}
\index{Fitting, Melvin}
\begin{chunk}{axiom.bib}
@book{Fitt69,
author = "Fitting, Melvin",
title = {{Intuitionistic Logic Model Theory and Forcing}},
publisher = "North Holland",
year = "1969",
paper = "Fitt69.pdf"
}
\end{chunk}
\index{Popkorn, Sally}
\index{Simmons, Harold}
\begin{chunk}{axiom.bib}
@book{Popk94,
author = "Popkorn, Sally",
title = {{First Steps in Modal Logic}},
publisher = "Cambridge University Press",
year = "1994",
isbn = "9780521464826",
link =
"\url{https://cs.famaf.unc.edu.ar/~careces/ml18/downloads/popkorn.pdf}",
paper = "Popk94.pdf"
}
\end{chunk}
\index{Sipser, Michael}
\begin{chunk}{axiom.bib}
@book{Sips13,
author = "Sipser, Michael",
title = {{Introduction to the Theory of Computation}},
publisher = "Cengage Learning",
year = "2013",
isbn = "9781133187790",
link = "\url{https://theswissbay.ch/pdf/Book/Introduction%20to%20the%20theory%20of%20computation_third%20edition%20%20Michael%20Sipser.pdf}",
paper = "Sips13.pdf"
}
\end{chunk}
\index{Smullyan, Raymond M.}
\begin{chunk}{axiom.bib}
@book{Smul92,
author = "Smullyan, Raymond M.",
title = {{Godel's Incompleteness Theorems}},
publisher = "Oxford University Press",
year = "1992",
isbn = "0195046722",
paper = "Smul92.pdf"
}
\end{chunk}
\index{Smith, Peter}
\begin{chunk}{axiom.bib}
@misc{Smit17,
author = "Smith, Peter",
title = {{Teach Yourself Logic 2017: A Study Guide}},
link =
"\url{https://www.logicmatters.net/resources/pdfs/TeachYourselfLogic2017.pdf}",
year = "2017",
paper = "Smit17.pdf"
}
\end{chunk}
\index{Murphy, Robin R.}
\begin{chunk}{axiom.bib}
@book{Murp18,
author = "Murphy, Robin R.",
title = {{Robotics Through Science Fiction}},
publisher = "MIT Press",
year = "2018",
link =
"\url{https://mitpress.mit.edu/books/roboticsthroughsciencefiction}",
isbn = "9780262536264",
comment = "verification, validation, and trust"
}
\end{chunk}
\index{Kaminski, Paul}
\begin{chunk}{axiom.bib}
@techreport{Kami12,
author = "Kaminski, Paul",
title = {{The Role of Autonomy in DoD Systems}},
type = "Task Force Report",
institution = "Defense Science Board, Dept. of Defense",
year = "2012",
link = "\url{https://fas.org/irp/agency/dod/dsb/autonomy.pdf}",
comment = "verification, validation, and trust"
}
\end{chunk}
\index{Siciliano, Bruno}
\index{Oussama, Khatib}
\begin{chunk}{axiom.bib}
@book{Sici16,
author = "Siciliano, Bruno and Oussama, Khatib",
title = {{Springer Handbook of Robotics}},
publisher = "Springer",
year = "2016",
abstract =
"Reaching for the human frontier, robotics is vigorously engaged in
the growing challenges of new emerging domains. Interacting,
exploring, and working with humans, the new generation of robots will
increasingly touch people and their lives. The credible prospect of
practical robots among humans is the result of the scientific
endeavour of a half a century of robotic developments that established
robotics as a modern scientific discipline. The Springer Handbook of
Robotics brings a widespread and wellstructured compilation of
classic and emerging application areas of robotics. From the
foundations to the social and ethical implications of robotics, the
handbook provides a comprehensive collection of the accomplishments in
the field, and constitutes a premise of further advances towards new
challenges in robotics. This handbook, edited by two internationally
renowned scientists with the support of an outstanding team of seven
part editors and onehundred sixtyfour authors, is an authoritative
reference for robotics researchers, newcomers to the field, and
scholars from related disciplines such as biomechanics, neurosciences,
virtual simulation, animation, surgery, and sensor networks among
others.",
paper = "Sici16.pdf"
}
\end{chunk}
\index{MartinL\"of, P.}
\begin{chunk}{axiom.bib}
@article{Mart84,
author = "MartinL\"of, P.",
title = {{Constructive Mathematics and Computer Programming}},
journal = "Phil. Trans. Royal Society London",
volume = "312",
year = "1984",
pages = "501518",
link =
"\url{https://royalsocietypublishing.org/doi/pdf/10.1098/rsta.1984.0073}",
abstract =
"If programming is understood not as the writing of instructions for
this or that computing machine but as the design of methods of
computation that it is the computer’s duty to execute (a difference
that Dijkstra has referred to as the difference between computer
science and computing science), then it no longer seems possible to
distinguish the discipline of programming from constructive
mathematics. This explains why the intuitionistic theory of types
(MartinLof 1975 In Logic Colloquium 1973 (ed. H. E. Rose and
J. C. Shepherdson), pp. 73  118 . Amsterdam: North Holland), which
was originally developed as a symbolism for the precise codification
of constructive mathematics, may equally well be viewed as a
programming language. As such it provides a precise notation not
only, like other programming languages, for the programs themselves
but also for the tasks that the programs are supposed to perform.
Moreover, the inference rules of the theory of types, which are again
completely formal, appear as rules of correct program synthesis. Thus
the correctness of a program written in the theory of types is proved
formally at the same time as it is being synthesized.",
paper = "Mart84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Oberhoff, Sebastian}
\begin{chunk}{axiom.bib}
@misc{Ober18,
author = "Oberhoff, Sebastian",
title = {{Incompleteness Ex Machina}}
year = "2018",
abstract =
"In this essay we'll prove Godel's incompleteness theorems
twice. First, we'll prove them the good oldfashioed way. Then
we'll repeat the feat in the setting of computation. In the
process we'll discover that Godel's work, rightly viewed, needs to
be split into two parts: the transport of computation into the
arena of arithmetic on the one hand and the actual incompleteness
theorems on the other. After we're done there will be cake.",
paper = "Ober18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Deutsch, David}
\begin{chunk}{axiom.bib}
@article{Deut85,
author = "Deutsch, David",
title = {{Quantum Theory, the ChurchTuring Principle and the
Universal Quantum Computer}},
journal = "Proc. Royal Society of London",
volume = "400",
pages = "97117",
year = "1985",
abstract =
"It is argued that underlying the ChurchTuring hypothesis there
is an implicit physical assertion. Here, this assertion is
presented explicitly as a physical principle: 'every finitely
realizable physical system can be perfectly simulated by a
universal model computing machine operating by finite
means'. Classical physics and the universal Turing machine,
because the former is continuous and the latter discrete, do not
obey the principle, at leeast in the strong form above. A class of
model computing machines that is the quantum generalization of the
class of Turing machines is described, and it is shown that
quantum theory and the 'universal quantum computer' are compatible
with the principle. Computing machines resembling the universal
quantum computer could, in principle, be built and would have many
remarkable properties not reproducible by any Turing
machine. These do not include the computation of nonrecursive
functions, but they do include 'quantum parallelism', a method by
which certain probabilistic tasks can be performed faster by a
universal quantum computer than by any classical restriction of
it. The intuitive explanation of these properties places an
intolerable strain on all interpretations of quantum theory other
than Everett's. Some of the numerous connections between the
quantum theory of computation and the rest of physics are
explored. Quantum complexity theory allows a physically more
reasonable definition of the 'complexity' or 'knowledge' in a
physical system than does classical complexity theory.",
paper = "Deut85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Antoy, Sergio}
\index{Peters, Arthur}
\begin{chunk}{axiom.bib}
@article{Anto12,
author = "Antoy, Sergio and Peters, Arthur",
title = {{Compiling a Functional Logic Language}},
booktitle = "11th Int. Symp. on Functional and Logic Programming",
journal = "LNCS",
volume = "7294",
pages = "1731",
year = "2012",
abstract =
"We present the design of a compiler for a functional logic
programming language and discuss the compiler's implementation.
The source program is abstracted by a constructor based graph
rewriting system obtained from a functional logic program after
syntax desugaring, lambda lifting and similar transformations
provided by a compiler's frontend. This system is
nondeterministic and requires a specialized normalization
strategy. The target program consists of 3 procedures that execute
graph replacements originating from either rewrite or pulltab
seps. These procedures are deterministic and easy to encode in an
ordinary programming language. We describe the generation of the 3
procedures, discuss the correctness of our approach, highlight
some key elements of an implementation, and benchmark the
performance of a proof of concept. Our compilation scheme is
elegant and simple enough to be presented in one page.",
paper = "Anto12.pdf",
keywords = "printed"
}
\end{chunk}
\index{MacQueen, David B.}
\begin{chunk}{axiom.bib}
@misc{Macqxx,
author = "MacQueen, David B.",
title = {{Reflections on Standard ML}},
year = "unknown",
abstract =
"Standard ML is one of a number of new programming languages
developed in the 1980s that are seen as suitable vehicles for
serious systems and applications programming. It offers an
excellent ratio of expressiveness to language complexity, and
provides competitive efficiency. Because of its type and module
system, Standard ML manages to combine safety, security, and
robustness with much of the flexibility of dynamically typed
languages like Lisp. It is also has the most welldeveloped
scientific foundation of any major language. Here I review the
strengths and weaknesses of Standard ML and describe some of what
we have learned through the design, implementation, and use of the
language.",
paper = "Macqxx.pdf",
keywords = "printed"
}
\end{chunk}
\index{Tarver, Mark}
\begin{chunk}{axiom.bib}
@misc{Tarvxx,
author = "Tarver, Mark",
title = {{A Language for Implementing Arbitrary Logics}},
year = "unknown",
abstract =
"SEQUEL is a newgeneration functional programming language, which
allows the specification of types in a notation based on the
sequent calculus. The sequent calculus notation suffices for the
construction of types for type checking and for the specification
of arbitrary logics. Compilation techniques derived from both
functional and logic programming are used to derive
highperformance ATPs from these specifications.",
paper = "Tarvxx.pdf",
keywords = "printed"
}
\end{chunk}
\index{Collofello, James S.}
\begin{chunk}{axiom.bib}
@techreport{Coll88,
author = "Collofello, James S.",
title = {{Introduction to Software Verification and Validation}},
type = "technical report",
institution = "Carnegie Mellon University",
number = "SEICM131.1",
year = "1988",
abstract =
"SEI curriculum modules document and explication software
engineering topics. They are intended to be useful in a variety of
situations  in the preparation of courses, in the planning of
individual lectures, in the design of curricula, and in
professional selfstudy. Topics do not exist in isolation,
however, and the question inevitably arises how one module is
related to another. Because modules are written by different
authors at different times, the answer could easily be 'not at
all'. In a young field struggling to define itself, this would be
an unfortunate situation.
The SEI deliberately employs a number of mechanisms to achieve
compatible points of view across curriculum modules and to fill
the content gaps between them. Modules such as {\sl Introduction
to Software Verification and Validation} is one of the most
important devices. In this latest revision, Professor Collofillo
has more modules to integrate into a coherent picture than when we
released his first draft more than a year ago  modules on
quality assurance, unit testing, technical reviews, and formal
verification, as well as less directly related modules on
specification, requirements definition, and design. We believe you
will find this curriculum module interesting and useful, both in
its own right and by virtue of the understanding of other modules
that it facilitates.",
paper = "Coll88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Gravel, Katherine}
\index{Jananthan, Hayden}
\index{Kepner, Jeremy}
\begin{chunk}{axiom.bib}
@misc{Grav18,
author = "Gravel, Katherine and Jananthan, Hayden and Kepner, Jeremy",
title = {{Visually Representing the Landscape of Mathematical Structures}},
link = "\url{https://arxiv.org/abs/1809.05930}",
year = "2018",
abstract =
"The information technology explosion has dramatically increased
the application of new mathematical ideas and has led to an
increasing use of mathematics across a wide range of fields that
have been traditionally labeled 'pure' or 'theoretical'. There is
a critical need for tools to make these concepts readily
accessible to a broader community. This paper details the creation
of visual representations of mathematical structures commonly
found in pure mathematics to enable both students and
professionals to rapidly understand the relationships among and
between mathematical structures. Ten broad mathematical structures
were identified and used to make eleven maps, with 187 unique
structures in total. The paper presents a method and
implementation for categorizing mathematical structures and for
drawing relationships between mathematical structures that
provides insight for a wide audience. The most in dept map is
available online for public use.",
comment = "\url{http://www.mit.edu/~kepner/GravelMathMap.pdf}",
paper = "Grav18.pdf"
}
\end{chunk}
\index{Hoare, C.A.R}
\begin{chunk}{axiom.bib}
@article{Hoar74,
author = "Hoare, C.A.R",
title = {{Hints on Programming Language Design}},
journal = "Computer Systems Reliability",
volume = "20",
pages = "505534",
year = "1974",
link = "\url{http://flint.cs.yale.edu/cs428/doc/HintsPL.pdf}",
abstract =
"This paper presents the view that a programming language
is a tool that should assist the programmer in the most difficult
aspects of his art, namely program design, documentation, and
debugging. It discusses the objective criteria for evaluating a
language design and illustrates them by application to language
features of both highlevel languages and machinecode
programming. It concludes with an annotated reading list,
recomended for all intending language designers.",
paper = "Hoar74.pdf",
keywords = "printed"
}
\end{chunk}
\index{Hemann, Jason}
\index{Friedman, Daniel P.}
\begin{chunk}{axiom.bib}
@misc{Hema13,
author = "Hemann, Jason and Friedman, Daniel P.",
title = {{uKanren: A Minimal Functional Core for Relational Programming}},
year = "2013",
link = "\url{webyrd.net/scheme2013/papers/HemannMuKanren2013.pdf}",
abstract =
"This paper presents $\mu$Kanren, a minimalist language in the
miniKanren family of relational (logic) programming languages. Its
implementation comprises fewer than 40 lines of Scheme. We
motivate the need for a minimalist miniKanren language, and
iteratively develop a complete search strategy. Finally, we
demonstrate that through sufficient userlevel features one
regains much of the expressiveness of other miniKanren
languages. In our opinion its brevity and simple semantics make
$\mu$Kanren uniquely elegant.",
paper = "Hema13.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@techreport{Card93,
author = "Cardelli, Luca",
title = {{Typeful Programmnig}},
type = "research report",
year = "1993",
institution = "Digital Equipment Corporation",
number = "SRC Research Report 45",
abstract =
"There exists an identifiable programming style based on the
widespread use of type information handled through mechanical
typechecking techniques.
This typeful programming style is in a sense independent of the
language it is embedded in; it adapts equally well to functional,
imperative, objectoriented, and algebraic programming, and it is
not incompatible with relational and concurrent programming.
The main purpose of this paper is to show how typeful programming
is best supported by sophisticated type systems, and how these
systems can help in clarifying programming issues and in adding
power and regularity to languages.
We start with an introduction to the notions of types, subtypes
and polymorphism. Then we introduce a general framework, derived
in part from constructive logic, into which most of the known type
systems can be accommodated and extended. The main part of the
paper shows how this framework can be adapted systematically to
cope with actual programming constructs. For concreteness we
describe a particular programming language with advanced features;
the emphasis here is on the combination of subtyping and
polymorphism. We then discuss how typing concepts apply to large
programs. We also sketch how typing applies to system programming;
an area which by nature escapes rigid typing. In summary, we
compare the most common programming styles, suggesting that many
of them are compatible with, and benefit from, a typeful discipline.",
paper = "Card93.pdf",
keywords = "printed"
}
\end{chunk}
\index{Lamport, Leslie}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@article{Lamp99,
author = "Lamport, Leslie and Paulson, Lawrence C.",
title = {{Should Your Specification Language Be Typed?}},
journal = "ACM Trans. on Document Formatting",
volume = "21",
number = "3",
year = "1999",
pages = "502526",
abstract =
"Most specification languages have a type system. Type systems are
hard to get right, and getting them wrong can lead to
inconsistencies. Set theory can serve as the basis for a
specification language without types. This possibility, which has
been widely overlooked, offers many advantages. Untyped set theory
is simple and is more flexible than any simple typed
formalism. Polymorphism, overloading, and subtyping can make a
type system more powerful, but at the cost of increased
complexity, and such refinements can never attain the flexibility
of having no types at all. Typed formalisms have advantages too,
stemming from the power of mechanical type checking. While types
serve little purpose in hand proofs, they do help with mechanized
proofs. In the absence of verification, type checking can catch
errors in specifications. It may be possible to have the best of
both worlds by adding typing annotations to an untyped
specification language. We consider only specification languages,
not programming languages",
paper = "Lamp99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Appel, Andrew W.}
\begin{chunk}{axiom.bib}
@article{Appe98a,
author = "Appel, Andrew W.",
title = {{SSA is Functional Programming}},
journal = "SIGPLAN Notices",
year = "1998",
abstract =
"Static SingleAssignment (SSA) form is an intermediate language
designed to make optimization clean and efficient for imperative
language (Fortran, C) compilers. LambdaCalculus is an
intermediate language that makes optimization clean and efficient
for functional language (Scheme, ML, Haskell) compilers. The SSA
community draws pictures of graphs with basic blocks and flow
edges, and the functional language community writes lexically
nested functions, but (as Richard Kelsey recently pointed out)
they're both doing exactly the same thing in different notation.",
paper = "Appe98a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cloostermans, Bouke},
\begin{chunk}{axiom.bib}
@misc{Cloo12,
author = "Cloostermans, Bouke",
title = {{QuasiLinear GCD Computation and Factoring RSA Moduli}},
school = "Eindhoven Univ. of Technology",
comment = "Bachelor Project",
year = "2012",
abstract =
"This bachelor project consists of two parts. First, we describe
two subquadratic GCD algorithms and test our implementation of
these algorithms. Both algorithms work in a similar recursive way
and have a running time of $O(n log^2n log log n)$ where $n$ is
the amount of digits of the input. Second, we describe and compare
three algorithms which compute GCD's of more than two numbers. Two
algorithms use a treelike structure to compute these GCD's and the
other algorithm builds a coprime base from the input set. The
algorithms which uses a treelike structure to compute these GCD's
is faster than building a coprime base although all algorithms are
quasilinear. The downside of these algorithm is that they only run
well if few common factors exist in the input set. These
algorithms have been used to factorize roughly 0.4\% of a 11.1
million RSA keys dataset. However, this is only possible if the
keys are badly generated.",
paper = "Cloo12.pdf",
keywords = "printed"
}
\end{chunk}
\index{Roessle, Ian}
\index{Verbeek, Freek}
\index{Ravindran, Binoy}
\begin{chunk}{axiom.bib}
@proceedings{Roes19,
author = "Roessle, Ian and Verbeek, Freek and Ravindran, Binoy",
title = {{Formally Verified Big Step Semantics out of x8664 Binaries}},
booktitle = "CPP'19",
year = "2019",
publisher = "ACM",
abstract =
"This paper presents a methodology for generating formally proven
equivalence theorems between decompiled x8664 machine code and
big step semantics. These proofs are built on top of two
additional contributions. First, a robust and tested formal x8664
machine model containing small step semantics for 1625
instructions. Second, a decompilation into logic methodology
supporting both x8664 assembly and machine code at large
scale. This work enables blackbox binary verification, i.e.,
formal verification of a binary where source code is
unavailable. As such, it can be applied to safetycritical systems
that consist of legacy components, or components whose source code
is unavailable due to proprietary reasons. The methodology
minimizes the trusted code base by leveraging machinelearned
semantics to build a formal machine model. We apply the
methodolgoy to several case studies, including binaries that
heavily rely on the SSE2 floatingpoint instruction set, and
binaries that are obtained by compiling code that is obtained by
inlining assembly into C code.",
paper = "Roes19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kaliszyk, Cezary}
\index{Urban, Josef}
\begin{chunk}{axiom.bib}
@misc{Kali14,
author = "Kaliszyk, Cezary and Urban, Josef",
title = {{LearningAssisted Automated Reasoning with Flyspeck}},
comment = "arXiv:1211.7012v3",
year = "2014",
abstract =
"The considerable mathematical knowledge encoded by the Flyspeck
project is combined with external automated theorem provers (ATPs)
and machinelearning premise selection methods trained on the
Flyspeck proofs, producing an AI system capable of proving a wide
range of mathematical conjectures automatically. The performance
of this architecture is evaluated in a bootstrapping scenario
emulating the development of Flyspeck from axioms to the last
theorem, each time using only the previous theorems and proofs. It
is shown that 39\% of the 14185 theorems could be proved in a
pushbutton mode (without any highlevel advice and user
interaction) in 30 seconds of real time on a fourteen CPU
workstation.
The necessary work involves: (i) an implementation of sound
translations of the HOL Light logic to ATP formalisms: untyped
firstorder, polymorphic typed firstorder, and typed firstorder,
(ii) export of the dependency information from HOL Light and ATP
proofs for the machine learners, and (iii) choice of suitable
representations and methods for learning from previous proofs, and
their integration as advisors with HOL Light. This work is
described and discussed here, and an initial analysis of the body
of proofs that were found fully automatically is provided.",
paper = "Kali14.pdf",
keywords = "printed"
}
\end{chunk}
\index{Mueller, Robert A.}
\index{Page, L. Rex}
\begin{chunk}{axiom.bib}
@book{Muel88,
author = "Mueller, Robert A. and Page, L. Rex",
title = {{Symbolic Computing with Lisp and Prolog}},
publisher = "Wiley",
year = "1988",
isbn = "0471607711"
}
\end{chunk}
\index{Stump, Aaron}
\begin{chunk}{axiom.bib}
@misc{Stum18,
author = "Stump, Aaron",
title = {{Syntax and Semantics of Cedille}},
year = "2018",
comment = "arXiv:1806.04709v1",
paper = "Stum18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Geuvers, Herman}
\begin{chunk}{axiom.bib}
@article{Geuv00,
author = "Geuvers, Herman",
title = {{Induction Is NOt Derivable in Second Order Dependent Type Theory}},
journal = "LNCS",
volume = "2044",
year = "2000",
pages = "166181",
abstract =
"This paper proves the nonderivability of induction in second
order dependent type theory ($\lambda{}P2$). This is done by
providing a model construction for $\lambda{}P2$, based on a
saturated sets like interpretation of types as sets of terms of a
weakly extensional combinatory algebra. We give countermodels in
which the induction principle over natural numbers is not
valid. The proof does not depend on the specific encoding for
natural numbers that has been chosen (like e.g. polymorphic Church
numerals), so in fact we prove that there can not be an encoding
of natural numbers in $\lambda{}P2$ such that the induction
principle is satisfied. The method extends immediately to other
data types, like booleans, lists, trees, etc.
In the process of the proof we establish some general properties
of the models, which we think are of independent
interest. Moreover, we show that the Axiom of Choice is not
derivable in $\lambda{}P2$.",
paper = "Geuv00.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sjoberg, Vilhelm}
\begin{chunk}{axiom.bib}
@phdthesis{Sjob15,
author = "Sjoberg, Vilhelm",
title = {{A Dependently Typed Language with NonTermination}},
school = "University of Pennsylvania",
year = "2015",
abstract =
"We propose a fullspectrum dependently typed programming
language, {\sl Zombie}, which supports general recursion
natively. The Zombie implementation is an elaborating
typechecker. We prove type safety for a large subset of the Zombie
core language, including features wuch as computational
irrelevance, CBVreduction, and propositional equality with a
heterogeneous, completely erased elimination form. Zombie does not
automatically $\beta$reduce expressions, but instead uses
congruence closure for proof and type inference. We give a
specification of a subset of the surface language via a
bidirectional type system, which works ``uptocongruence'', and
an algorithm for elaborating expressions in this language to an
explicitly typed core language. We prove that our elaboration
algorithm is complete with respect to the source type
system. Zombie also features an optional terminationchecker,
allowing nonterminating programs returning proofs as well as
external proofs about programs.",
paper = "Sjob15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cervesato, Iliano}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Cerv96,
author = "Cervesato, Iliano and Pfenning, Frank",
paper = {{A Linear Logical Framework}},
booktitle = "Logic in Computer Science",
publisher = "IEEE",
year = "1996",
abstract =
"We present the linear type theory LLF as the formal basis for a
conservative extension of the LF logical framework. LLF combines
the expressive power of dependent types with linear logic to
permit the natural and concise represention of a whole new class
of deductive systems, names those dealing with state. As an
example we encode a version of MiniML with references including
its type system, its operational semantics, and a proof of type
preservation. Another example is the encoding of a sequent
calculus for classical linear logic and its cut elimination
theorem. LLF can also be given an operational interpretation as a
logic programming language under which the representations above
can be used for type inference, evaluation and cutelimination.",
paper = "Cerv96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cervesato, Iliano}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@article{Cerv00,
author = "Cervesato, Iliano and Pfenning, Frank",
title = {{A Linear Logic Framework}},
journal = "Information and Computation",
volume = "179",
number = "1",
pages = "1975",
link = "\url{http://www.cs.cmu.edu/~fp/papers/llf00.pdf}",
year = "2002",
abstract =
"We present the linear type theory $\labmda^{\PIo\&\top}$ as the
formal basis for LLF, a conservative extension of the logical
framework LF. LLF combines the expressive power of dependent types
with linear logic to permit the natural and concise representation
of a whole new class of deductive systems, namely those dealing
with state. As an example we encode a version of MiniML with
mutable references including its type system and its operational
semantics, and describe how to take practical advantage of the
representation of its computations.",
paper = "Cerv00.pdf",
keywords = "printed"
}
\end{chunk}
\index{Girard, JeanYves}
\begin{chunk}{axiom.bib}
@book{Gira95,
author = "Girard, JeanYves",
title = {{Linear Logic: Its Syntax and Semantics}},
publisher = "Cambridge University Press",
year = "1995",
link = "\url{http://girard.perso.math.cnrs.fr/Synsem.pdf}",
isbn = "9780521559614",
paper = "Gira95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Girard, JeanYves}
\begin{chunk}{axiom.bib}
@article{Gira87,
author = "Girard, JeanYves",
title = {{Linear Logic}},
journal = "Theoretical Computer Science",
volume = "50",
year = "1987",
pages = "1102"
abstract =
"The familiar connective of negation is broken into two
operations: linear negation which is the purely negative part of
negation and the modality ``of course'' which has the meaning of a
reaffirmation. Following this basic discovery, a completely new
approach to the whole area between constructive logics and
programmation is initiated.",
paper = "Gira87.pdf",
keywords = "printed"
}
\end{chunk}
\index{Knill, Oliver}
\begin{chunk}{axiom.bib}
@misc{Knil18,
author = "Knill, Oliver",
title = {{Some Fundamental Theorems in Mathematics}}
link = "\url{https://arxiv.org/pdf/1807.08416.pdf}",
year = "2018",
abstract =
"An expository hitchhikers guide to some theorems in mathematics",
paper = "Knil18.pdf"
}
\end{chunk}
\index{Martzloff, J.C.}
\begin{chunk}{axiom.bib}
@book{Mart97a,
author = "Martzloff, J.C.",
title = {{A History of Chinese Mathematics}},
publisher = "Springer Verlag",
year = "1997"
}
\end{chunk}
\index{Ding, C.}
\index{Pei, D.}
\index{Salomaa, A.}
\begin{chunk}{axiom.bib}
@book{Ding96,
author = "Ding, C. and Pei, D. and Salomaa, A.",
title = {{Chinese Remainder Theorem, Applications in Computing,
Coding Cryptography}},
publisher = "World Scientific",
year = "1996"
}
\end{chunk}
\index{Knill, Oliver}
\begin{chunk}{axiom.bib}
@misc{Knil12,
author = "Knill, Oliver",
title = {{A Multivariate Chinese Remainder Theorem}},
link = "\url{https://arxiv.org/abs/1206.5114}",
year = "2012"
abstract =
"Using an adaptation of Qin Jiushao's method from the 13th
century, it is possible to prove that a system of linear modular
equations $a(i,1)x(i)+a(i,n)x(n)=b(i)\bmod m(i),i=1,\ldots,n$ has
integer solutions if $m(i)>1$ are pairwise relatively prime and in
each row, at least one matrix element $a(i,j)$ is relatively prime
to $m(i)$. The Chinese remainder theorem is a special case, where
$A$ has only one column.",
paper = "Knil12.pdf"
}
\end{chunk}

books/axiom.bib  2861 +++++++++++++++++++++++++++++++++++++++
books/bookvolbib.pamphlet  1253 +++++++++++++++++
changelog  2 +
patch  1164 +++++++++++++++
src/axiomwebsite/patches.html  2 +
5 files changed, 5239 insertions(+), 43 deletions()
diff git a/books/axiom.bib b/books/axiom.bib
index ac9040a..d4726d9 100644
 a/books/axiom.bib
+++ b/books/axiom.bib
@@ 297,7 +297,7 @@
number = "9007.0",
year = "1995",
abstract =
 "It is widely recognized that programming languages should oer
+ "It is widely recognized that programming languages should offer
features to help structure programs. To achieve this goal, languages
like Ada , Modula2 , objectoriented languages, and functional
languages have been developed. The structuring techniques available
@@ 321,7 +321,9 @@
implementation and against existing computer algebra systems. Our
development should have an impact both on the programming languages
world and on the computer algebra world.",
 paper = "Baum95.pdf"
+ paper = "Baum95.pdf",
+ keywords = "printed, axiomref"
+
}
@techreport{Berg92,
@@ 476,7 +478,7 @@
$\nu$ reduction does not commute with algebraic reduction, in general.
However, using long $\nu$normal forms, we show that if $R$ is canonical
then $R+\beta+type\beta+type\nu$ convertibility is still decidable.",
paper = "Brea89.pdf"
+ paper = "Brea89.pdf"
}
@book{Buch82,
@@ 819,16 +821,6 @@ paper = "Brea89.pdf"
main features of the language as of version 2.0."
}
@inproceedings{Dama82,
 author = "Damas, Luis and Milner, Robin",
 title = {{Principal Typeschemes for Functional Programs}},
 booktitle = "POPL 82",
 pages = "207212",
 year = "1982",
 isbn = "0897980656",
 paper = "Dama82.pdf"
}

@book{Davi94,
author = "Davis, Martin D. and Sigal, Ron and Weyuker, Elaine J.",
title = {{Computability, Complexity, and Languages: Fundamentals of
@@ 855,7 +847,16 @@ paper = "Brea89.pdf"
Initial Semantics}},
publisher = "Springer Verlag",
year = "1985",
 isbn = "9780387137186"
+ isbn = "0387137181"
+}
+
+@book{Ehri85a,
+ author = "Ehrig, Hartmut and Mahr, Bernd",
+ title = {{Fundamentals of Algebraic Specification 2: Module
+ Specifications and Constraints}},
+ publisher = "Springer Verlag",
+ year = "1985",
+ isbn = "0387517995"
}
@inproceedings{Elli89,
@@ 1258,7 +1259,8 @@ paper = "Brea89.pdf"
reduced to type checking. The practical benefit of our treatment of
formal systems is that logicindependent tools, such as proof editors
and proof checkers, can be constructed.",
 paper = "Harp93.pdf"
+ paper = "Harp93.pdf",
+ keywords = "printed"
}
@article{Harp93a,
@@ 1360,7 +1362,7 @@ paper = "Brea89.pdf"
author = "Howard, W.H.",
title = {{The FormulaeasTypes Notion of Construction}},
year = "1969",
 link = "\url{}",
+ link = "\url{http://www.dcc.fc.up.pt/~acm/howard.pdf}",
abstract =
"The following consists of notes which were privately circulated in
1969. Since they have been referred to a few times in the literature,
@@ 1422,7 +1424,7 @@ paper = "Brea89.pdf"
@misc{Isab11,
author = "Unknown",
 title = {{Isabell}},
+ title = {{Isabelle}},
link = "\url{http://www.cl.cam.ac.uk/research/hvg/Isabelle/index.html}",
year = "2011"
}
@@ 1907,7 +1909,8 @@ paper = "Brea89.pdf"
publisher = "Lab for Foundations of Computer Science, Univ. Edinburgh",
link = "\url{http://smlfamily.org/sml90defn.pdf}",
year = "1990",
 paper = "Miln90.pdf"
+ paper = "Miln90.pdf",
+ keywords = "printed"
}
@book{Miln97,
@@ 2403,6 +2406,44 @@ paper = "Brea89.pdf"
year = "1972"
}
+@inproceedings{Siek06,
+ author = "Siek, Jeremy, G. and Taha, Walid",
+ title = {{Gradual Typing for Functional Languages}},
+ booktitle = "Proc. of Scheme and Functional Programming Workshop",
+ year = "2006",
+ publisher = "ACM",
+ pages = "8192",
+ abstract =
+ "Static and dynamic type systems have wellknown strengths and
+ weaknesses, and each is better suited for different programming
+ tasks. There have been many efforts to integrate static and dynamic
+ typing and thereby combine the benefits of both typing disciplines in
+ the same language. The flexibility of static typing can be improved
+ by adding a type Dynamic and a typecase form. The safety and
+ performance of dynamic typing can be improved by adding optional type
+ annotations or by performing type inference (as in soft
+ typing). However, there has been little formal work on type systems
+ that allow a programmercontrolled migration between dynamic and
+ static typing. Thatte proposed QuasiStatic Typing, but it does not
+ statically catch all type errors in completely annotated
+ programs. Anderson and Drossopoulou defined a nominal type system
+ for an objectoriented language with optional type annotations.
+ However, developing a sound, gradual type system for functional
+ languages with structural types is an open problem.
+
+ In this paper we present a solution based on the intuition that the
+ structure of a type may be partially known/unknown at compile time
+ and the job of the type system is to catch incompatibilities between
+ the known parts of types. We define the static and dynamic semantics
+ of a $lambda$calculus with optional type annotations and we prove that
+ its type system is sound with respect to the simplytyped λ
+ $lambda$calculus for fullyannotated terms. We prove that this
+ calculus is type safe and that the cost of dynamism is
+ 'payasyougo'.",
+ paper = "Siek06.pdf",
+ keywords = "printed"
+}
+
@article{Siek89,
author = "Siekmann, Jorg H.",
title = {{Unification Theory}},
@@ 7070,7 +7111,7 @@ paper = "Brea89.pdf"
link = "\url{http://www.cecm.sfu.ca/personal/mmonagan/papers/AFGCD.pdf}",
abstract =
"Let $L$ be an algebraic function field in $k \ge 0$ parameters
 $t_1,\ldots,t)k$. Let $f_1$, $f_2$ be nonzero polynomials in
+ $t_1,\ldots,t_k$. Let $f_1$, $f_2$ be nonzero polynomials in
$L[x]$. We give two algorithms for computing their gcd. The first, a
modular GCD algorithm, is an extension of the modular GCD algorithm
for Brown for {\bf Z}$[x_1,\ldots,x_n]$ and Encarnacion for {\bf
@@ 7110,8 +7151,8 @@ paper = "Brea89.pdf"
author = "Baez, John C.; Stay, Mike",
title = {{Physics, Topology, Logic and Computation: A Rosetta Stone}},
link = "\url{http://arxiv.org/pdf/0903.0340v3.pdf}",
 abstract = "
 In physics, Feynman diagrams are used to reason about quantum
+ abstract =
+ "In physics, Feynman diagrams are used to reason about quantum
processes. In the 1980s, it became clear that underlying these
diagrams is a powerful analogy between quantum physics and
topology. Namely, a linear operator behaves very much like a
@@ 7484,7 +7525,8 @@ paper = "Brea89.pdf"
exculded middle similar to Brouwer's had been advanced in print by
Jules Molk two years before. Finally, we discuss the influence on
George Griss' negationless mathematics",
 paper = "Atte15.pdf, printed"
+ paper = "Atte15.pdf",
+ keywords = "printed"
}
@misc{Avig17b,
@@ 9470,6 +9512,80 @@ paper = "Brea89.pdf"
link = "\url{http://www.wolfram.com}"
}
+@misc{Daik18,
+ author = "Unknown",
+ title = {{Daikon Invariant Detector Developer Manual}},
+ year = "2018",
+ version = "5.7.2",
+ link =
+ "\url{http://plse.cs.washington.edu/daikon/download/doc/developer.pdf}",
+ paper = "Daik18.pdf"
+}
+
+@misc{Daik18a,
+ author = "Unknown",
+ title = {{Daikon Invariant Detector User Manual}},
+ year = "2018",
+ version = "5.7.2",
+ link =
+ "\url{http://plse.cs.washington.edu/daikon/download/doc/daikon.pdf}",
+ paper = "Daik18a.pdf"
+}
+
+@misc{Alte18,
+ author = "Altenkirch, Thorsten",
+ title = {{Naive Type Theory}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=bNG53SA4n48p}"
+}
+
+@article{Anto12,
+ author = "Antoy, Sergio and Peters, Arthur",
+ title = {{Compiling a Functional Logic Language}},
+ booktitle = "11th Int. Symp. on Functional and Logic Programming",
+ journal = "LNCS",
+ volume = "7294",
+ pages = "1731",
+ year = "2012",
+ abstract =
+ "We present the design of a compiler for a functional logic
+ programming language and discuss the compiler's implementation.
+ The source program is abstracted by a constructor based graph
+ rewriting system obtained from a functional logic program after
+ syntax desugaring, lambda lifting and similar transformations
+ provided by a compiler's frontend. This system is
+ nondeterministic and requires a specialized normalization
+ strategy. The target program consists of 3 procedures that execute
+ graph replacements originating from either rewrite or pulltab
+ seps. These procedures are deterministic and easy to encode in an
+ ordinary programming language. We describe the generation of the 3
+ procedures, discuss the correctness of our approach, highlight
+ some key elements of an implementation, and benchmark the
+ performance of a proof of concept. Our compilation scheme is
+ elegant and simple enough to be presented in one page.",
+ paper = "Anto12.pdf",
+ keywords = "printed"
+}
+
+@article{Appe98a,
+ author = "Appel, Andrew W.",
+ title = {{SSA is Functional Programming}},
+ journal = "SIGPLAN Notices",
+ year = "1998",
+ abstract =
+ "Static SingleAssignment (SSA) form is an intermediate language
+ designed to make optimization clean and efficient for imperative
+ language (Fortran, C) compilers. LambdaCalculus is an
+ intermediate language that makes optimization clean and efficient
+ for functional language (Scheme, ML, Haskell) compilers. The SSA
+ community draws pictures of graphs with basic blocks and flow
+ edges, and the functional language community writes lexically
+ nested functions, but (as Richard Kelsey recently pointed out)
+ they're both doing exactly the same thing in different notation.",
+ paper = "Appe98a.pdf",
+ keywords = "printed"
+}
+
@book{Arko17,
author = "Arkoudas, Konstantine and Musser, David",
title = {{Fundamental Proof Methods in Computer Science}},
@@ 9478,6 +9594,51 @@ paper = "Brea89.pdf"
isbn = "9780262035538"
}
+@article{Arms19,
+ author = "Armstrong, Alasdair and Bauereiss, Thomas and Campbell, Brian
+ and Reid, Alastair and gray, Kathryn E. and Norton, Robert M.
+ and Mundkur, Prashanth and Wassell, Mark and French, Jon
+ and Pulte, Christopher and Flus, Shaked and Krishnaswami, Neel
+ and Sewell, Peter",
+ title = {{ISA Semantics for ARMv8A, RISCV, and CHERIMIPS}},
+ journal = "Proc. ACM Programming Lang.",
+ volume = "3",
+ year = "2019",
+ abstract =
+ "Architecture specifications notionally define the fundamental
+ interface between hardware and software: the envelope of allowed
+ behaviour for processor implementations, and the basic assumptions for
+ software development and verification. But in practice, they are
+ typically prose and pseudocode documents, not rigorous or executable
+ artifacts, leaving software and verification on shaky ground.
+
+ In this paper, we present rigorous semantic models for the sequential
+ behaviour of large parts of the mainstream ARMv8A, RISCV, and MIPS
+ architectures, and the research CHERIMIPS architecture, that are
+ complete enough to boot operating systems, variously Linux, FreeBSD,
+ or seL4. Our ARMv8A models are automatically translated from
+ authoritative ARMinternal definitions, and (in one variant) tested
+ against the ARM Architecture Validation Suite.
+
+ We do this using a custom language for ISA semantics, Sail, with a
+ lightweight dependent type system, that supports automatic generation
+ of emulator code in C and OCaml, and automatic generation of
+ proofassistant definitions for Isabelle, HOL4, and (currently only
+ for MIPS) Coq. We use the former for validation, and to assess
+ specification coverage. To demonstrate the usability of the latter, we
+ prove (in Isabelle) correctness of a purely functional
+ characterisation of ARMv8A address translation. We moreover integrate
+ the RISCV model into the RMEM tool for (usermode) relaxedmemory
+ concurrency exploration. We prove (on paper) the soundness of the core
+ Sail type system.
+
+ We thereby take a big step towards making the architectural
+ abstraction actually welldefined, establishing foundations for
+ verification and reasoning.",
+ paper = "Arms19.pdf",
+ keywords = "printed"
+}
+
@article{Aspi01,
author = "Aspinall, David and Compagnoni, Adriana",
title = {{Subtyping Dependent Types}},
@@ 9503,6 +9664,144 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Atke18,
+ author = "Atkey, Robert",
+ title = {{Syntax and Semantics of Quantitative Type Theory}},
+ booktitle = "LICS",
+ year = "2018",
+ publisher = "ACM",
+ isbn = "9781450355834",
+ abstract =
+ "We present Quantitative Type Theory, a Type Theory that records
+ usage information for each variable in a judgement, based on a
+ previous system by McBride. The usage information is used to give
+ a realizability semantics using a variant of Linear Combinatory
+ Algebras, refining the usual realizability semantics of Type
+ Theory by accurately tracking resource behaviour. We define the
+ semantics in terms of Quantitative Categories with Families, a
+ novel extension of Categories with Families for modelling resource
+ sensitive type theories.",
+ paper = "Atke18.pdf",
+ keywords = "printed"
+}
+
+@misc{Baue19,
+ author = "Bauer, Andrej",
+ title = {{How to Implement Type Theory in an Hour}},
+ year = "2019",
+ link = "\url{https://vimeo.com/286652934}",
+ comment = "\url{https://github.com/andrejbauer/spartantypetheory}"
+}
+
+@article{Bake92,
+ author = "Baker, Henry G.",
+ title = {{A Decision Procedure for Common Lisp's SUBTYPEP Predicate}},
+ journal = "Lisp and Symbolic Computation",
+ volume = "5",
+ number = "3",
+ year = "1992",
+ pages = "157190",
+ abstract =
+ "Common Lisp [CL84,CL90] includes a dynamic datatype system of
+ moderate complexity, as well as predicates for checking the types of
+ language objects. Additionally, an interesting predicate of two 'type
+ specifiers'— SUBTYPEP —is included in the language. This subtypep
+ predicate provides a mechanism with which to query the Common Lisp
+ type system regarding containment relations among the various builtin
+ and userdefined types. While subtypep is rarely needed by an
+ applications programmer, the efficiency of a Common Lisp
+ implementation can depend critically upon the quality of its subtypep
+ predicate: the runtime system typically calls upon subtypep to decide
+ what sort of representations to use when making arrays; the compiler
+ calls upon subtypep to interpret user declarations, on which efficient
+ data representation and code generation decisions are based.
+
+ As might be expected due to the complexity of the Common Lisp type
+ system, there may be type containment questions which cannot be
+ decided. In these cases subtypep is expected to return 'can't
+ determine', in order to avoid giving an incorrect
+ answer. Unfortunately, most Common Lisp implementations have abused
+ this license by answering 'can't determine' in all but the most
+ trivial cases. In particular, most Common Lisp implementations of
+ SUBTYPEP fail on the basic axioms of the Common Lisp type system
+ itself [CL84,p.33]. This situation is particularly embarrassing for
+ Lisp—the premier 'symbol processing language'—in which the
+ implementation of complex symbolic logical operations should be
+ relatively easy. Since subtypep was presumably included in Common Lisp
+ to answer the hard cases of type containment, this 'lazy evaluation'
+ limits the usefulness of an important language feature.
+
+ This paper shows how those type containment relations of Common Lisp
+ which can be decided at all, can be decided simply and quickly by a
+ decision procedure which can dramatically reduce the number of
+ occurrences of the 'can't determine' answer from subtypep . This
+ decision procedure does not require the conversion of a type specifier
+ expression to conjunctive or disjunctive normal form, and therefore
+ does not incur the exponential explosion in space and time that such a
+ conversion would entail.
+
+ The lattice mechanism described here for deciding subtypep is also
+ ideal for performing type inference [Baker90]; the particular
+ implementation developed here, however, is specific to the type system
+ of Common Lisp [Beer88].",
+ paper = "Bake92.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Balz17,
+ author = "Balzer, Stephanie and Pfenning, Frank",
+ title = {{Manifest Sharing with Session Types}},
+ booktitle = "Proc. ACM Program. Lang.",
+ publisher = "ACM",
+ year = "2017",
+ link = "\url{https://www.cs.cmu.edu/~fp/papers/icfp17.pdf}",
+ abstract =
+ "Sessiontyped languages building on the CurryHoward isomorphism
+ between linear logic and sessiontyped communication guarantee session
+ fidelity and deadlock freedom. Unfortunately, these strong guarantees
+ exclude many naturally occurring programming patterns pertaining to
+ shared resources. In this paper, we introduce sharing into a
+ sessiontyped language where types are stratified into linear and
+ shared layers with modal operators connecting the layers. The
+ resulting language retains session fidelity but not the absence of
+ deadlocks, which can arise from contention for shared processes. We
+ illustrate our language on various examples, such as the dining
+ philosophers problem, and provide a translation of the untyped
+ asynchronous $\Pi$calculus into our language.",
+ paper = "Balz17.pdf",
+ keywords = "printed"
+}
+
+@article{Balz18,
+ author = "Balzer, Stephanie and Pfenning, Frank and Toninho, Bernardo",
+ title = {{A Universal Session Type for Untyped Asynchronous Communication}},
+ journal = "CONCUR",
+ volume = "30",
+ number = "1",
+ year = "2018",
+ link = "\url{https://www.cs.cmu.edu/~fp/papers/univtype18.pdf}",
+ abstract =
+ "In the simplytyped $\lambda$calculus we can recover the full
+ range of expressiveness of the untyped $\lambda$calculus solely
+ by adding a single recursive type
+ $U = U\rightarrow U$. In contract,
+ in the sessiontyped $\Pi$calculus, recursion alone is
+ insufficient to recover the untyped $\Pi$calculus, primarily due
+ to linearity: each channel just has two unique endpoints. In this
+ paper, we show that shared channels with a corresponding sharing
+ semantics (base on the the language SILL$_s$ developed in prior
+ work) are enough to embed the untyped asynchronous $\Pi$calculus
+ via a universal shared session type $U_s$. We show that
+ our encoding of the asynchronous $\Pi$calculus satisfies
+ operational correspondence and preserves observable actions (i.e.,
+ processes are weakly bisimilar to their encoding). Moreover, we
+ clarify the expressiveness of SILL$_s$ by developing an
+ operationally correct encoding of SILL$_s$ in the asynchronous
+ $\Pi$calculus.",
+ paper = "Balz18.pdf",
+ keywords = "printed"
+}
+
@article{Bare91,
author = "Barendregt, Hendrik Pieter",
title = {{An Introduction to Generalized Type Systems}},
@@ 9529,6 +9828,241 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Barw96,
+ author = "Barwise, Jon and Moss, Lawrence",
+ title = {{Vicious Circles}},
+ publisher = "CSLI Publications",
+ year = "1996",
+ isbn = "1575860082"
+}
+
+@article{Beer87,
+ author = "Beer, Randall D.",
+ title = {{Preliminary Report on A Practical Type Inference System
+ for Common Lisp}},
+ journal = "Lisp Pointers",
+ volume = "1",
+ number = "2",
+ year = "1987",
+ pages = "511",
+ paper = "Beer87.pdf",
+ keywords = "printed"
+}
+
+@article{Boas12,
+ author = "Boas, Peter van Emde",
+ title = {{Turing Machines for Dummies}},
+ journal = "LNCS",
+ volume = "7147",
+ pages = "1430",
+ year = "2012",
+ abstract =
+ "Various methods exists in the litearture for denoting the con
+ figuration of a Turing Machine. A key difference is whether the head
+ position is indicated by some integer (mathematical representation) or
+ is specified by writing the machine state next to the scanned tape
+ symbol (intrinsic representation).
+
+ From a mathematical perspective this will make no difference. How
+ ever, since Turing Machines are primarily used for proving undecidability
+ and/or hardness results these representations do matter. Based on
+ a number of applications we show that the intrinsic representation
+ should be preferred.",
+ paper = "Boas12.pdf",
+ keywords = "printed"
+}
+
+@article{Bohr17,
+ author = "Bohrer, Brandon and Crary, Karl",
+ title = {{TWAM: A Certifying Abstract Machine for Logic Programs}},
+ journal = "ACM Trans. on Computational Logic",
+ volume = "1",
+ number = "1",
+ year = "2017",
+ link = "\url{https://arxiv.org/pdf/1801.00471.pdf}",
+ abstract =
+ "Typepreserving (or typed) compilation uses typing derivations to
+ certify correctness properties of compilation. We have designed and
+ implemented a typepreserving compiler for a simplytyped dialect of
+ Prolog we call TProlog. The crux of our approach is a new certifying
+ abstract machine which we call the Typed Warren Abstract Machine
+ (TWAM). The TWAM has a dependent type system strong enough to specify
+ the semantics of a logic program in the logical framework LF. We
+ present a soundness metatheorem which constitutes a partial
+ correctness guarantee: welltyped programs implement the logic
+ program specified by their type. This metatheorem justifies our design
+ and implementation of a certifying compiler from TProlog to TWAM.",
+ paper = "Bohr17.pdf",
+ keywords = "printed"
+}
+
+@book{Bool07,
+ author = "Boolos, George S. and Burgess, John P. and Jeffrey, Richard C.",
+ title = {{Computability and Logic}},
+ publisher = "Cambridge University Press",
+ year = "2007",
+ isbn = "9780521701464"
+
+}
+
+@book{Bost97,
+ author = "Bostock, David",
+ title = {{Intermediate Logic}},
+ publisher = "Clarendon Press",
+ year = "1997",
+ isbn = "0198751419",
+ paper = "Bost97.pdf"
+}
+
+@book{Boye81a,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{The Correctness Problem in Computer Science}},
+ publisher = "Academic Press",
+ year = "1981",
+ isbn = "0121229203"
+
+}
+
+@phdthesis{Brad05,
+ author = "Brady, Edwin C.",
+ title = {{Practical Implementation of a Dependently Typed Functional
+ Programming Language}},
+ school = "University of Durham",
+ year = "2005",
+ link = "\url{https://eb.host.cs.standrews.ac.uk/writings/thesis.pdf}",
+ abstract =
+ "Types express a program’s meaning, and checking types ensures that a
+ program has the intended meaning. In a dependently typed programming
+ language types are predicated on values, leading to the possibility of
+ expressing invariants of a program’s behaviour in its type. Dependent
+ types allow us to give more detailed meanings to programs, and hence
+ be more confident of their correctness.
+
+ This thesis considers the practical implementation of a dependently
+ typed programming language, using the Epigram notation defined by
+ McBride and McKinna. Epigram is a high level notation for dependently
+ typed functional programming elaborating to a core type theory based
+ on Luo’s UTT, using Dybjer’s inductive families and elimination rules
+ to implement pattern matching. This gives us a rich framework for
+ reasoning about programs. However, a na ̈ıve implementation introduces
+ several runtime overheads since the type sys tem blurs the
+ distinction between types and values; these overheads include the
+ duplication of values, and the storage of redundant information and
+ explicit proofs.
+
+ A practical implementation of any programming language should be as
+ efficient as pos sible; in this thesis we see how the apparent
+ efficiency problems of dependently typed pro gramming can be overcome
+ and that in many cases the richer type information allows us to apply
+ optimisations which are not directly available in traditional
+ languages. I introduce three storage optimisations on inductive
+ families; forcing, detagging and collapsing. I further introduce a
+ compilation scheme from the core type theory to Gmachine code,
+ including a pattern matching compiler for elimination rules and a
+ compilation scheme for efficient run time implementation of Peano’s
+ natural numbers. We also see some low level optimisations for removal
+ of identity functions, unused arguments and impossible case branches.
+ As a result, we see that a dependent type theory is an effective base
+ on which to build a feasible programming language.",
+ paper = "Brad05.pdf"
+}
+
+@misc{Brad17,
+ author = "Brady, Edwin",
+ title = {{Dependent Types in the Idris Programming Language 1}},
+ year = "2017",
+ link = "\url{https://www.cs.uoregon.edu/research/summerschool/summer17/topics.php}"
+
+}
+
+@inproceedings{Broo82,
+ author = "Brooks, Rodney A. and Gabriel, Richard P. and Steele, Guy L.",
+ title = {{S1 Common Lisp Implementation}},
+ booktitle = "Proc ACM Symp. on Lisp and functional programming",
+ publisher = "ACM",
+ year = "1982",
+ isbn = "0897910826",
+ abstract =
+ "We are developing a Lisp implementation for the Lawrence
+ Livermore National Laboratory S1 Mark IIA computer. The dialect
+ of Lisp is an extension of COMMON Lisp [Steele;1982], a descendant
+ of Maclisp [Moon;1974] and Lisp Machine Lisp [Weinreb;1981].",
+ paper = "Broo82.pdf",
+ keywords = "printed"
+
+}
+
+@inproceedings{Broo84,
+ author = "Brooks, Rodney A. and Gabriel, Richard P.",
+ title = {{A Critique of Common Lisp}},
+ booktitle = "Symposium on Lisp and Functional Programming",
+ pages = "18",
+ year = "1984",
+ abstract =
+ "A major goal of the COMMON LISP committee was to define a Lisp
+ language with sufficient power and generality that people would be
+ happy to stay within its confines and thus write inherently
+ transportable code. We argue that the resulting language
+ definition is too large for many shortterm and mediumterm
+ potential applications. In addition many parts of COMMON LISP
+ cannot be implemented very efficiently on stock hardware. We
+ further argue that the very generality of the design with its
+ different efficieny profiles on different architectures works
+ agains the goal of transportability.",
+ paper = "Broo84.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Broo86,
+ author = "Brooks, Rodney A. and Posner, David B. and McDonald, James L.
+ and White, Jon L. and Benson, Eric and Gabriel, Richard P.",
+ title = {{Design of An Optimising, Dynamically Retargetable Compiler
+ for Common Lisp}},
+ booktitle = "Conf. on Lisp and Functional Programming",
+ publisher = "ACM",
+ pages = "6785",
+ year = "1986",
+ abstract =
+ "We outline the components of a retargetable crosscompiler for
+ the Common Lisp language. A description is given of a method for
+ modeling the various hardware features in the compiler's database,
+ and a breakdown is shown of the compiler itself into various
+ machineindependent and machinedependent modules. A novel feature
+ of this development is the dynamic nature of the retargeting:
+ Databses for multiple hardware architectures are a standard part
+ of the compiler, and the internal interfaces used by the compiler
+ are such that the machinedependent modules may be instantly
+ switched from one to another. Examples of generated code in
+ several environments will be given to demonstrate the high quality
+ of the output available, even under this very modular approach..",
+ paper = "Broo86.pdf",
+ keywords = "printed"
+}
+
+@article{Burs69,
+ author = "Burstall, R.M.",
+ title = {{Proving Properties of Programs by Structural Induction}},
+ journal = "Computer Journal",
+ volume = "12",
+ number = "1",
+ pages = "4148",
+ link = "\url{http://www.cse.chalmers.se/edu/year/2010/course/DAT140_Types/Burstall.pdf}",
+ year = "1969",
+ abstract =
+ "This paper discusses the technique of structural induction for
+ proving theorems about programs. This technique is closely related to
+ recursion induction but makes use of the inductive definition of the
+ data structures handled by the programs. It treats programs with
+ recursion but without assignments or jumps. Some syntactic extensions
+ to Landin's functional programming language ISWIM are suggested which
+ make it easier to program the manipulation of data structures and to
+ develop proofs about such programs. Two sample proofs are given to
+ demonstrate the technique, one for a tree sorting algorithm and one
+ for a simple compiler for expressions.",
+ paper = "Burs69.pdf",
+ keywords = "printed"
+}
+
@article{Call08,
author = "Callaghan, Paul",
title = {{Coercive Subtyping via Mappings of Reduction Behaviour}},
@@ 9565,6 +10099,327 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@techreport{Card93,
+ author = "Cardelli, Luca",
+ title = {{Typeful Programmnig}},
+ type = "research report",
+ year = "1993",
+ institution = "Digital Equipment Corporation",
+ number = "SRC Research Report 45",
+ abstract =
+ "There exists an identifiable programming style based on the
+ widespread use of type information handled through mechanical
+ typechecking techniques.
+
+ This typeful programming style is in a sense independent of the
+ language it is embedded in; it adapts equally well to functional,
+ imperative, objectoriented, and algebraic programming, and it is
+ not incompatible with relational and concurrent programming.
+
+ The main purpose of this paper is to show how typeful programming
+ is best supported by sophisticated type systems, and how these
+ systems can help in clarifying programming issues and in adding
+ power and regularity to languages.
+
+ We start with an introduction to the notions of types, subtypes
+ and polymorphism. Then we introduce a general framework, derived
+ in part from constructive logic, into which most of the known type
+ systems can be accommodated and extended. The main part of the
+ paper shows how this framework can be adapted systematically to
+ cope with actual programming constructs. For concreteness we
+ describe a particular programming language with advanced features;
+ the emphasis here is on the combination of subtyping and
+ polymorphism. We then discuss how typing concepts apply to large
+ programs. We also sketch how typing applies to system programming;
+ an area which by nature escapes rigid typing. In summary, we
+ compare the most common programming styles, suggesting that many
+ of them are compatible with, and benefit from, a typeful discipline.",
+ paper = "Card93.pdf",
+ keywords = "printed"
+}
+
+@misc{Cast19,
+ author = "Castagna, Giuseppe and Lanvin, Victor and Petrucciani, Tommaso
+ and Siek, Jeremy G.",
+ title = {{Gradual Typing: A New Perspective}},
+ link = "\url{https://www.irif.fr/~gc//papers/popl19.pdf}",
+ year = "2019",
+ abstract =
+ "We define a new, more semantic interpretation of gradual types and use
+ it to “gradualize” two forms of polymorphism: subtyping polymorphism
+ and implicit parametric polymorphism. In particular, we use the new
+ interpretation to define three gradual type systems —HindleyMilner,
+ with subtyping, and with union and intersection types— in terms of two
+ preorders, subtyping and materialization. These systems are defined
+ both declaratively and algorithmically. The declarative presentation
+ consists in adding two subsumptionlike rules, one for each preorder,
+ to the standard rules of each type system. This yields more
+ intelligible and streamlined definitions and shows a direct
+ correlation between cast insertion and materialization. For the
+ algorithmic presentation, we show how it can be defined by reusing
+ existing techniques such as unification and tallying.",
+ paper = "Cast19.pdf"
+}
+
+@inproceedings{Cerv96,
+ author = "Cervesato, Iliano and Pfenning, Frank",
+ title = {{A Linear Logical Framework}},
+ booktitle = "Logic in Computer Science",
+ publisher = "IEEE",
+ pages = "264275",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/lics96.pdf}",
+ year = "1996",
+ abstract =
+ "We present the linear type theory LLF as the formal basis for a
+ conservative extension of the LF logical framework. LLF combines
+ the expressive power of dependent types with linear logic to
+ permit the natural and concise represention of a whole new class
+ of deductive systems, names those dealing with state. As an
+ example we encode a version of MiniML with references including
+ its type system, its operational semantics, and a proof of type
+ preservation. Another example is the encoding of a sequent
+ calculus for classical linear logic and its cut elimination
+ theorem. LLF can also be given an operational interpretation as a
+ logic programming language under which the representations above
+ can be used for type inference, evaluation and cutelimination.",
+ paper = "Cerv96.pdf",
+ keywords = "printed"
+}
+
+@article{Cerv00,
+ author = "Cervesato, Iliano and Pfenning, Frank",
+ title = {{A Linear Logic Framework}},
+ journal = "Information and Computation",
+ volume = "179",
+ number = "1",
+ pages = "1975",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/llf00.pdf}",
+ year = "2002",
+ abstract =
+ "We present the linear type theory $\lambda^{\Pio\&\top}$ as the
+ formal basis for LLF, a conservative extension of the logical
+ framework LF. LLF combines the expressive power of dependent types
+ with linear logic to permit the natural and concise representation
+ of a whole new class of deductive systems, namely those dealing
+ with state. As an example we encode a version of MiniML with
+ mutable references including its type system and its operational
+ semantics, and describe how to take practical advantage of the
+ representation of its computations.",
+ paper = "Cerv00.pdf",
+ keywords = "printed"
+}
+
+@book{Chis07,
+ author = "Chiswell, Ian and Hodges, Wilfrid",
+ title = {{Mathematical Logic}},
+ publisher = "Oxford University Press",
+ year = "2007",
+ isbn = "9780198571001",
+ paper = "Chis07.pdf"
+}
+
+@misc{Chri18,
+ author = "Christiansen, David Thrane",
+ title = {{A Little Taste of Dependent Types}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=VxINoKFmS4}",
+ abstract =
+ "Dependent types let us use the same programming language for
+ compiletime and runtime code, and are inching their way towards the
+ mainstream from research languages like Coq, Agda and Idris. Dependent
+ types are useful for programming, but they also unite programming and
+ mathematical proofs, allowing us to use the tools and techniques we
+ know from programming to do math.
+
+ The essential beauty of dependent types can sometimes be hard to find
+ under layers of powerful automatic tools. The Little Typer is an
+ upcoming book on dependent types in the tradition of the The Little
+ Schemer that features a tiny dependently typed language called Pie. We
+ will demonstrate a proof in Pie that is also a program."
+}
+
+@misc{Chow18,
+ author = "Chow, Timothy Y.",
+ title = {{The Consistency of Arithmetic}},
+ year = "2018",
+ link = "\url{http://timothychow.net/consistent.pdf}",
+ paper = "Chow18.pdf",
+ keywords = "printed"
+}
+
+@misc{Chri18a,
+ author = "Christiansen, David Thrane",
+ title = {{Coding for Types: The Universe Pattern in Idris}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=AWeT_G04a0A}"
+}
+
+@book{Chur41,
+ author = "Church, Alonzo",
+ title = {{The Calculi of Lambda Conversion}},
+ year = "1941",
+ publisher = "Princeton University Press",
+ paper = "Alon41*.pdf"
+}
+
+@article{Cloc18,
+ author = "Clochard, Martin and Gondelman, Leon and Pereira, Mario",
+ title = {{The Matrix Reproved}},
+ journal = "Journal of Automated Reasoning",
+ volume = "60",
+ number = "3",
+ pages = "365383",
+ year = "2018",
+ link = "\url{https://hal.inria.fr/hal01617437/document}",
+ abstract =
+ "In this paper we describe a complete solution for the
+ first challenge of the VerifyThis 2016 competition held at the 18th
+ ETAPS Forum. We present the proof of two variants for the
+ multiplication of matrices: a naive version using three nested loops
+ and Strassen's algorithm. The proofs are conducted using the Why3
+ platform for deductive program verification and automated theorem
+ provers to discharge proof obligations. In order to specify and
+ prove the two multiplication algorithms, we develop a new Why3
+ theory of matrices. In order to prove the matrix identities on which
+ Strassen's algorithm is based, we apply the proof by reflection
+ methodology, which we implement using ghost state. To our knowledge,
+ this is the first time such a methodology is used under an
+ autoactive setting.",
+ keywords = "printed"
+}
+
+@book{Cloc91,
+ author = "Clocksin, William F.",
+ title = {{Clause and Effect}},
+ year = "1991",
+ publisher = "Springer",
+ isbn = "3540629718"
+}
+
+@misc{Cloo12,
+ author = "Cloostermans, Bouke",
+ title = {{QuasiLinear GCD Computation and Factoring RSA Moduli}},
+ school = "Eindhoven Univ. of Technology",
+ comment = "Bachelor Project",
+ year = "2012",
+ abstract =
+ "This bachelor project consists of two parts. First, we describe
+ two subquadratic GCD algorithms and test our implementation of
+ these algorithms. Both algorithms work in a similar recursive way
+ and have a running time of $O(n log^2n log log n)$ where $n$ is
+ the amount of digits of the input. Second, we describe and compare
+ three algorithms which compute GCD's of more than two numbers. Two
+ algorithms use a treelike structure to compute these GCD's and the
+ other algorithm builds a coprime base from the input set. The
+ algorithms which uses a treelike structure to compute these GCD's
+ is faster than building a coprime base although all algorithms are
+ quasilinear. The downside of these algorithm is that they only run
+ well if few common factors exist in the input set. These
+ algorithms have been used to factorize roughly 0.4\% of a 11.1
+ million RSA keys dataset. However, this is only possible if the
+ keys are badly generated.",
+ paper = "Cloo12.pdf",
+ keywords = "printed"
+}
+
+@techreport{Coll88,
+ author = "Collofello, James S.",
+ title = {{Introduction to Software Verification and Validation}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "SEICM131.1",
+ year = "1988",
+ abstract =
+ "SEI curriculum modules document and explication software
+ engineering topics. They are intended to be useful in a variety of
+ situations  in the preparation of courses, in the planning of
+ individual lectures, in the design of curricula, and in
+ professional selfstudy. Topics do not exist in isolation,
+ however, and the question inevitably arises how one module is
+ related to another. Because modules are written by different
+ authors at different times, the answer could easily be 'not at
+ all'. In a young field struggling to define itself, this would be
+ an unfortunate situation.
+
+ The SEI deliberately employs a number of mechanisms to achieve
+ compatible points of view across curriculum modules and to fill
+ the content gaps between them. Modules such as {\sl Introduction
+ to Software Verification and Validation} is one of the most
+ important devices. In this latest revision, Professor Collofillo
+ has more modules to integrate into a coherent picture than when we
+ released his first draft more than a year ago  modules on
+ quality assurance, unit testing, technical reviews, and formal
+ verification, as well as less directly related modules on
+ specification, requirements definition, and design. We believe you
+ will find this curriculum module interesting and useful, both in
+ its own right and by virtue of the understanding of other modules
+ that it facilitates.",
+ paper = "Coll88.pdf",
+ keywords = "printed"
+
+}
+
+@book{Cope04,
+ author = "Copeland, B. Jack",
+ title = {{The Essential Turing}},
+ publisher = "Oxford University Press",
+ year = "2004",
+ isbn = "9780198250807"
+}
+
+@misc{Coqu19,
+ author = "Coquand, Thierry and Kinoshita, Yoshiki and Nordstrom, Bengt
+ and Takeyama, Makoto",
+ title = {{A Simple TypeTheoretic Language: MiniTT}},
+ year = "2019",
+ link = "\url{http://www.cse.chalmers.se/~bengt/papers/GKminiTT.pdf}",
+ abstract =
+ "This paper presents a formal description of a small functional
+ language with dependent types. The language contains data types,
+ mutual re cursive/inductive definitions and a universe of small
+ types. The syntax, semantics and type system is specified in such a
+ way that the imple mentation of a parser, interpreter and type
+ checker is straightforward. The main difficulty is to design the
+ conversion algorithm in such a way that it works for open expressions.
+ The paper ends with a complete implementation in Haskell (around 400
+ lines of code).",
+ paper = "Coqu19.pdf",
+ keywords = "printed"
+}
+
+@book{Cosm95,
+ author = "Cosmo, Roberto Di",
+ title = {{Isomorphisms of TYpes}},
+ publisher = "Birkhauser",
+ year = "1995",
+ isbn = "081763763X"
+}
+
+@inproceedings{Cous77,
+ author = "Cousot, Patrick and Cousot, Radhia",
+ title = {{Abstract Interpretation: A Unified Lattice Model for
+ Static Analysis of Programs by Construction or
+ Approximation of Fixpoints}},
+ booktitle = "Symp. on Principles of Programming Languages",
+ pages = "238252",
+ year = "1977",
+ paper = "Cous77.pdf",
+ keywords = "printed"
+}
+
+\end{chunk
+
+\index{Cutland, Nigel}
+\begin{chunk}{axiom.bib}
+@book{Cutl80,
+ author = "Cutland, Nigel",
+ title = {{Computability: An Introduction to Recusive Function Theory}},
+ publisher = "Cambridge University Press",
+ year = "1980",
+ isbn = "0521223849",
+ paper = "Cutl80.pdf"
+}
+
@book{Dahl72a,
author = "Dahl, O.J. and Dijkstra, E.W. and Hoare, C.A.R",
title = {{Structured Programming}},
@@ 9573,6 +10428,214 @@ paper = "Brea89.pdf"
isbn = "0122005562"
}
+@book{Dale08,
+ author = "van Dalen, Dirk",
+ title = {{Logic and Structure}},
+ publisher = "Springer",
+ year = "2008",
+ isbn = "9783540208792",
+ paper = "Dale08.pdf"
+
+}
+
+@inproceedings{Dama82,
+ author = "Damas, Luis and Milner, Robin",
+ title = {{Principal Typeschemes for Functional Programs}},
+ booktitle = "Proc POPL '82",
+ year = "1982",
+ pages = "207212",
+ paper = "Dama82.pdf",
+ keywords = "printed"
+}
+
+\article{Ders08,
+ author = "Dershowitz, Nachum and Gurevich, Yuri",
+ title = {{A Natural Axiomatization of Computability and Proof of
+ Church's Thesis}},
+ journal = "Bulletin of Symbolic Logic",
+ volume = "14",
+ number = "3",
+ year = "2008",
+ abstract =
+ "Church’s Thesis asserts that the only numeric functions that can be
+ calculated by effective means are the recursive ones, which are the
+ same, extensionally, as the Turing computable numeric functions. The
+ Abstract State Machine Theorem states that every classical algorithm
+ is behaviorally equivalent to an abstract state machine. This theorem
+ presupposes three natural postulates about algorithmic
+ computation. Here, we show that augmenting those postulates with an
+ additional requirement regarding basic operations gives a natural
+ axiomatization of computability and a proof of Church’s Thesis, as
+ Gödel and others suggested may be possible. In a similar way, but with
+ a different set of basic oper ations, one can prove Turing’s Thesis,
+ characterizing the effective string functions, and—in particular—the
+ effectivelycomputable functions on string representations of numbers.",
+ paper = "Ders08.pdf",
+ keywords = "printed"
+}
+
+@book{Dijk88,
+ author = "Dijkstra, Edsger W. and Feijen, W.H.J",
+ title = {{A Method of Programming}},
+ publisher = "Addison Wesley",
+ year = "1988",
+ isbn = "0201175363"
+}
+
+@misc{Dijk70,
+ author = "Dijkstra, Edsger W.",
+ title = {{Concern for Correctness as a Guiding Principle for Program
+ Composition}},
+ year = "1970",
+ comment = "EWD288",
+ link = "\url{https://www.cs.utexas.edu/users/EWD/transcriptions/EWD02xx/EWD288.html}",
+ paper = "Dijk70.html",
+ keywords = "printed"
+}
+
+@book{Dijk90,
+ author = "Dijkstra, Edsger W. and Scholten, Carel S.",
+ title = {{Predicate Calculus and Program Semantics}},
+ publisher = "Springer",
+ year = "1990",
+ isbn = "0387969578"
+}
+
+@book{Ding96,
+ author = "Ding, C. and Pei, D. and Salomaa, A.",
+ title = {{Chinese Remainder Theorem, Applications in Computing,
+ Coding Cryptography}},
+ publisher = "World Scientific",
+ year = "1996"
+}
+
+@book{Ende01,
+ author = "Enderton, Herbert B.",
+ title = {{A Mathematical Introduction to Logic}},
+ publisher = "Harcourt Academic Press",
+ year = "2001",
+ isbn = "0122384520",
+ paper = "Ende01.pdf"
+}
+
+@misc{Elli18,
+ author = "Ellis, Ferris",
+ title = {{Strength in Numbers: Unums and the Quest for Reliable
+ Arithmetics}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=nVNYjmj_qbY}",
+ abstract =
+ "In the land of computer arithmetic, a tyrant has ruled since its very
+ beginning: the floating point number. Under its rule we have all
+ endured countless hardships and cruelties. To this very day the
+ floating point number still denies 0.1 + 0.2 == 0.3 and returns
+ insidious infinities to software developers everywhere. But a new hero
+ has entered fray: the universal number (unum). Can it topple the float
+ number system and its century long reign?
+
+ This talk will introduce unums, explain their benefits over floating
+ point numbers, and examine multiple real world examples comparing the
+ two. For those not familiar with floating point numbers and their
+ pitfalls, this talk also includes a primer on the topic. Code examples
+ are in Rust, though strong knowledge of the language is not needed."
+}
+
+@misc{Fill13a,
+ author = "Filliatre, JeanChristophe",
+ title = {{Deductive Program Verification}},
+ year = "2013",
+ comment = "slides"
+}
+
+@book{Fitt69,
+ author = "Fitting, Melvin",
+ title = {{Intuitionistic Logic Model Theory and Forcing}},
+ publisher = "North Holland",
+ year = "1969",
+ paper = "Fitt69.pdf"
+}
+
+@misc{Fred16,
+ author = "Fredrikson, Matt",
+ title = {{Automated Program Verification and Testing}},
+ comment = "slides",
+ year = "2016"
+}
+
+@misc{Gabr10,
+ author = "Gabriel, Richard P.",
+ title = {{A Reivew of The Art of the Metaobject Protocol}},
+ year = "2010",
+ paper = "Gabr10.pdf",
+ keywords = "printed"
+}
+
+@article{Geuv00,
+ author = "Geuvers, Herman",
+ title = {{Induction Is Not Derivable in Second Order Dependent Type Theory}},
+ journal = "LNCS",
+ volume = "2044",
+ year = "2000",
+ pages = "166181",
+ abstract =
+ "This paper proves the nonderivability of induction in second
+ order dependent type theory ($\lambda{}P2$). This is done by
+ providing a model construction for $\lambda{}P2$, based on a
+ saturated sets like interpretation of types as sets of terms of a
+ weakly extensional combinatory algebra. We give countermodels in
+ which the induction principle over natural numbers is not
+ valid. The proof does not depend on the specific encoding for
+ natural numbers that has been chosen (like e.g. polymorphic Church
+ numerals), so in fact we prove that there can not be an encoding
+ of natural numbers in $\lambda{}P2$ such that the induction
+ principle is satisfied. The method extends immediately to other
+ data types, like booleans, lists, trees, etc.
+
+ In the process of the proof we establish some general properties
+ of the models, which we think are of independent
+ interest. Moreover, we show that the Axiom of Choice is not
+ derivable in $\lambda{}P2$.",
+ paper = "Geuv00.pdf",
+ keywords = "printed"
+}
+
+@article{Gira87,
+ author = "Girard, JeanYves",
+ title = {{Linear Logic}},
+ journal = "Theoretical Computer Science",
+ volume = "50",
+ year = "1987",
+ pages = "1102",
+ abstract =
+ "The familiar connective of negation is broken into two
+ operations: linear negation which is the purely negative part of
+ negation and the modality ``of course'' which has the meaning of a
+ reaffirmation. Following this basic discovery, a completely new
+ approach to the whole area between constructive logics and
+ programmation is initiated.",
+ paper = "Gira87.pdf",
+ keywords = "printed"
+}
+
+@book{Gira95,
+ author = "Girard, JeanYves",
+ title = {{Linear Logic: Its Syntax and Semantics}},
+ publisher = "Cambridge University Press",
+ year = "1995",
+ link = "\url{http://girard.perso.math.cnrs.fr/Synsem.pdf}",
+ isbn = "9780521559614",
+ paper = "Gira95.pdf",
+ keywords = "printed"
+}
+
+@book{Gold84,
+ author = "Goldblatt, Robert",
+ title = {{Topoi: The Categorical Analysis of Logic}},
+ year = "1984",
+ publisher = "Dover",
+ isbn = "9780486450261"
+}
+
@incollection{Gord79a,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
@@ 9633,6 +10696,32 @@ paper = "Brea89.pdf"
paper = "Gord79e.pdf"
}
+@misc{Grav18,
+ author = "Gravel, Katherine and Jananthan, Hayden and Kepner, Jeremy",
+ title = {{Visually Representing the Landscape of Mathematical Structures}},
+ link = "\url{https://arxiv.org/abs/1809.05930}",
+ year = "2018",
+ abstract =
+ "The information technology explosion has dramatically increased
+ the application of new mathematical ideas and has led to an
+ increasing use of mathematics across a wide range of fields that
+ have been traditionally labeled 'pure' or 'theoretical'. There is
+ a critical need for tools to make these concepts readily
+ accessible to a broader community. This paper details the creation
+ of visual representations of mathematical structures commonly
+ found in pure mathematics to enable both students and
+ professionals to rapidly understand the relationships among and
+ between mathematical structures. Ten broad mathematical structures
+ were identified and used to make eleven maps, with 187 unique
+ structures in total. The paper presents a method and
+ implementation for categorizing mathematical structures and for
+ drawing relationships between mathematical structures that
+ provides insight for a wide audience. The most in dept map is
+ available online for public use.",
+ comment = "\url{http://www.mit.edu/~kepner/GravelMathMap.pdf}",
+ paper = "Grav18.pdf"
+}
+
@misc{Gray18,
author = "Grayson, Daniel R.",
title = {{An Introduction to Univalent Foundations for Mathematicians}},
@@ 9648,6 +10737,154 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Gure12,
+ author = "Gurevich, Yuri",
+ title = {{What Is An Algorithm?}},
+ journal = "LNCS",
+ volume = "7147",
+ pages = "3142",
+ year = "2012",
+ abstract =
+ "We attempt to put the title problem and the ChurchTuring thesis into
+ a proper perspective and to clarify some common misconcep tions
+ related to Turing’s analysis of computation. We examine two approaches
+ to the title problem, one wellknown among philosophers and another
+ among logicians.",
+ paper = "Gure12.pdf",
+ keywords = "printed"
+}
+
+@misc{Gure18,
+ author = "Gurevich, Yuri",
+ title = {{ChurchTuring Thesis Cannot Possibly Be True}},
+ year = "2018",
+ link =
+ "\url{https://www.microsoft.com/enus/research/video/churchturingthesiscannotpossiblybetrue/}",
+ abstract =
+ "The thesis asserts this: If an algorithm A computes a partial
+ function f from natural numbers to natural numbers then f is partially
+ recursive, i.e., the graph of f is recursively enumerable.
+
+ The thesis has been formulated in 1930s. The only algorithms at the
+ time were sequential algorithms. Sequential algorithms were
+ axiomatized in 2000. This axiomatization was used in 2008 to prove the
+ thesis for sequential algorithms, i.e., for the case where A ranges
+ over sequential algorithms.
+
+ These days, in addition to sequential algorithms, there are parallel
+ algorithms, distributed algorithms, probabilistic algorithms, quantum
+ algorithms, learning algorithms, etc.
+
+ The question whether the thesis is true in full generality is actively
+ discussed from 1960s. We argue that, in full generality, the thesis
+ cannot possibly be true.",
+ paper = "Gure18.pdf",
+ keywords = "printed"
+}
+
+@misc{Hahn04,
+ author = "Hahnle, Reiner and Wallenburg, Angela",
+ title = {{Using a Software Testing Technique to Improve Theorem Proving}},
+ year = "2004",
+ link = "\url{http://www.cse.chalmers.se/~angelaw/papers/lic.pdf}",
+ comment = "paper follows thesis in file",
+ abstract =
+ "Most efforts to combine formal methods and software testing go in the
+ direction of exploiting formal methods to solve testing problems, most
+ commonly test case generation. Here we take the reverse viewpoint and
+ show how the technique of partition testing can be used to improve a
+ formal proof technique (induction for correctness of loops). We first
+ compute a partition of the domain of the induction variable, based on
+ the branch predicates in the program code of the loop we wish to
+ prove. Based on this partition we derive mechanically a partitioned
+ induction rule, which is (hopefully) easier to use than the standard
+ induction rule. In particular, with an induction rule that is
+ tailored to the program to be verified, less user interaction can be
+ expected to be required in the proof. We demonstrate with a number of
+ examples the effectiveness of our method.",
+ paper = "Wall04.pdf",
+ keywords = "printed"
+}
+
+@article{Hant76,
+ author = "Hantler, Sidney L. King, James C.",
+ title = {{An Introduction to Proving the Correctness of Programs}},
+ journal = "ACM Computing Surveys",
+ volume = "8",
+ number = "3",
+ pages = "331353",
+ year = "1976",
+ abstract =
+ "This paper explains, in an introductory fashion, the method of
+ specifying the correct behavior of a program by the use of
+ input/output assertions and describes one method for showing that the
+ program is correct with respect to those assertions. An initial
+ assertion characterizes conditions expected to be true upon entry to
+ the program and a final assertion characterizes conditions expected to
+ be true upon exit from the program. When a program contains no
+ branches, a technique known as symbolic execution can be used to show
+ that the truth of the initial assertion upon entry guarantees the
+ truth of the final assertion upon exit. More generally, for a program
+ with branches one can define a symbolic execution tree. If there is
+ an upper bound on the number of times each loop in such a program may
+ be executed, a proof of correctness can be given by a simple traversal
+ of the (finite) symbolic execution tree.
+
+ However, for most programs, no fixed bound on the number of times each
+ loop is executed exists and the corresponding symbolic execution trees
+ are infinite. In order to prove the correctness of such programs, a
+ more general assertion structure must be provided. The symbolic
+ execution tree of such programs must be traversed inductively rather
+ than explicitly. This leads naturally to the use of additional
+ assertions which are called ``inductive assertions.''",
+ paper = "Hant76.pdf",
+ keywords = "printed",
+}
+
+@mastersthesis{Harg02a,
+ author = "Hargreaves, G.",
+ title = {{Interval analysis in MATLAB}},
+ school = "University of Manchester, Dept. of Mathematics",
+ year = "2002"
+}
+
+@techreport{Harp97,
+ author = "Harper, Robert and Stone, Christopher",
+ title = {{An Interpretation of Standard ML in Type Theory}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMUCS97147",
+ year = "1997",
+ abstract =
+ "We define an interpretation of Standard ML into type theory. The
+ interpretation takes the form of a set of elaboration rules
+ reminiscent of the static semantics given in {\sl The Definition
+ of Standard ML}. In particular, the elaboration rules are given in
+ a relational style, exploiting indeterminancy to avoid
+ overcommitment to specific implementation techniques. Elaboration
+ consists of identifier scope resolution, type checking and type
+ inference, expansion of derived forms, pattern compilation,
+ overloading resolution, equality compilation, and the coercive
+ aspects of signature matching.
+
+ The underlying type theory is an explicitlytyped
+ $\lambda$calculus with product, sum, function, and recursive
+ types, together with module types derived from the translucent sum
+ formalism of Harper and Lillibridge. Programs of the type theory
+ are given a typepassing dynamic semantics compatible with
+ constructs such as polymorphic equality that rely on type analysis
+ at runtime.",
+ paper = "Harp97.pdf"
+}
+
+@misc{Harp16,
+ author = "Harper, Robert",
+ title = {{Practical Foundations for Progamming Languages}},
+ publisher = "Cambridge University Press",
+ year = "2016",
+ isbn = "9781107150300"
+}
+
@misc{Harp18,
author = "Harper, Robert",
title = {{Computational Type Theory}},
@@ 9657,6 +10894,515 @@ paper = "Brea89.pdf"
comment = "OPLSS 2018"
}
+@techreport{Hayn87,
+ author = "Haynes, Christopher T. and Friedman, Daniel P.",
+ title = {{Embedding Continuations in Procedural Objects}},
+ type = "technical report",
+ institution = "Indiana University",
+ number = "213",
+ year = "1987",
+ abstract =
+ "Continuations, when available as firstclass objects, provide a
+ general control abstraction in programming languages. They
+ liberate the programmer from specific control structures,
+ increasing programming language extensibility. Such continuations
+ may be extended by embedding them in procedural objects. This
+ technique is first used to restore a fluid environment when a
+ continuation object is invoked. We then consider techniques for
+ constraining the power of continuations in the interest of
+ security and efficiency. Domain mechanisms, which create dynamic
+ barriers for enclosing control, are implemented using
+ fluids. Domains are then used to implement an unwindprotect
+ facility in the presence of firstclass continuations. Finally, we
+ present two mechanisms, windunwind and dynamicwind, that
+ generalizes unwindprotect.",
+ paper = "Hayn87.pdf",
+ keywords = "printed"
+}
+
+@book{Heij67,
+ author = "van Heijenoort, Jean",
+ title = {{From Frege to Godel}},
+ publisher = "Harvard University Press",
+ isbn = "0674324498",
+ year = "1967"
+}
+
+@misc{Hema14,
+ author = "Hemann, Jason and Friedman, Daniel",
+ title = {{Write the Other Half of Your Program}},
+ year = "2014",
+ link = "\url{https://www.youtube.com/watch?v=RG9fBbQrVOM}"
+}
+
+@misc{Hema13,
+ author = "Hemann, Jason and Friedman, Daniel P.",
+ title = {{uKanren: A Minimal Functional Core for Relational Programming}},
+ year = "2013",
+ link = "\url{webyrd.net/scheme2013/papers/HemannMuKanren2013.pdf}",
+ abstract =
+ "This paper presents $\mu$Kanren, a minimalist language in the
+ miniKanren family of relational (logic) programming languages. Its
+ implementation comprises fewer than 40 lines of Scheme. We
+ motivate the need for a minimalist miniKanren language, and
+ iteratively develop a complete search strategy. Finally, we
+ demonstrate that through sufficient userlevel features one
+ regains much of the expressiveness of other miniKanren
+ languages. In our opinion its brevity and simple semantics make
+ $\mu$Kanren uniquely elegant.",
+ paper = "Hema13.pdf",
+ keywords = "printed"
+}
+
+@phdthesis{Heng89,
+ author = "Henglein, Friedrich",
+ title = {{Polymorphic Type Inference and SemiUnification}},
+ school = "Rutgers",
+ year = "1989",
+ link =
+ "\url{http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.388.1275&rep=rep1&type=pdf}",
+ abstract =
+ "In the last ten years declarationfree programming languages with
+ a polymorphic typing discipline (ML, B) have been developed to
+ approximate the flexibility and conciseness of dynamically typed
+ languages (LISP, SETL) while retaining the safety and execution
+ efficiency of conventional statically typed languages (Algol68,
+ Pascal). These polymorphic languages can be type checked at
+ compile time, yet allow functions whose arguments range over a
+ variety of types.
+
+ We investigate several polymorphic type systems, the most powerful
+ of which, termed MilnerMycroft Calculus, extends the socalled
+ letpolymorphism found in, e.g. ML with a polymorphic typing rule
+ for recursive defiitions. We show that semiunification, the
+ problem of solving inequalities over firstorder terms,
+ characterizes type checking in the MilnerMycroft Calculus to
+ polynomial time, even in the restricted case were nested
+ definitions are disallowed. This permits us to extend some
+ infeasibility results for related combinatorial problems to type
+ inference and to correct several claims and statements in the
+ literature.
+
+ We prove the existence of unique most general solutions of term
+ inequalities, called most general semiunifiers, and present an
+ algorithm for computing them that terminates for all known inputs
+ due to a novel ``extended occurs check''. We conjecture this
+ algorithm to be uniformly terminating even though, at present,
+ general semiunification is not known to be decidable. We prove
+ termination of our algorithm for a restricted case of
+ semiunification that is of independent interest.
+
+ Finally, we offer an explanation for the apparent practicality of
+ polymorphic type inference in the face of theoretical
+ intractability results.",
+ paper = "Heng89.pdf",
+}
+
+@book{Herk88,
+ author = "Herken, Rolf",
+ title = {{The Universal Turing Machine}},
+ publisher = "Oxford Science Publications",
+ year = "1988",
+ isbn = "0198537743"
+}
+
+@inproceedings{Herm07,
+ author = "Herman, David",
+ title = {{Functional Pearl: The Great Escape}},
+ booktitle = "ICFP'07",
+ publisher = "ACM",
+ isbn = "1595938152",
+ year = "2007",
+ abstract =
+ "Filinski showed that callcc and a single mutable reference cell
+ are sufficient to express the delimited control operators shift
+ and reset. However, this implementation interacts poorly with
+ dynamic bindings like exception handlers. We present a variation
+ on Filinski's encoding of delimited continuations that behaves
+ appropriately in the presence of exceptions and give an
+ implementation in Standard ML of New Jersey. We prove the encoding
+ correct with respect to the semantics of delimited dynamic binding.",
+ paper = "Herm07.pdf"
+}
+
+@book{Heyt56,
+ author = "Heyting, A.",
+ title = {{Intuitionism: An Introduction}},
+ publisher = "North Holland",
+ year = "1956"
+}
+
+@book{Hilb52,
+ author = "Hilbert, David and CohnVossen, S.",
+ title = {{Geometry and the Imagination}},
+ publisher = "Chelsea Publishing Company",
+ year = "1952"
+}
+
+@article{Hoar74,
+ author = "Hoare, C.A.R",
+ title = {{Hints on Programming Language Design}},
+ journal = "Computer Systems Reliability",
+ volume = "20",
+ pages = "505534",
+ year = "1974",
+ link = "\url{http://flint.cs.yale.edu/cs428/doc/HintsPL.pdf}",
+ abstract =
+ "This paper presents the view that a programming language
+ is a tool that should assist the programmer in the most difficult
+ aspects of his art, namely program design, documentation, and
+ debugging. It discusses the objective criteria for evaluating a
+ language design and illustrates them by application to language
+ features of both highlevel languages and machinecode
+ programming. It concludes with an annotated reading list,
+ recomended for all intending language designers.",
+ paper = "Hoar74.pdf",
+ keywords = "printed"
+}
+
+@book{Hrba99,
+ author = "Hrbacek, Karel and Jech, Thomas",
+ title = {{Introduction to Set Theory}},
+ publisher = "Marcel Dekker, Inc",
+ year = "1999",
+ isbn = "0824779150",
+ paper = "Hrba99.pdf"
+}
+
+@book{Jeff81,
+ author = "Jeffrey, Richard",
+ title = {{Formal Logic: Its Scope and Limits}},
+ publisher = "McGrawHill",
+ year = "1981",
+ isbn = "0070323216"
+}
+
+@article{Kahr97,
+ author = "Kahrs, Stefan and Sannella, Donald and Tarlecki, Andrzej",
+ title = {{The Definition of Extended ML}},
+ journal = "Theoretical Computer Science",
+ volume = "173",
+ pages = "445484",
+ year = "1997",
+ abstract =
+ "This document formally defines the syntax and semantics of the
+ Extended ML language. It is based on the published semantics of
+ Standard ML in an attempt to ensure compatibility between the two
+ languages.",
+ paper = "Kahr97.pdf"
+}
+
+@article{Kahr98,
+ author = "Kahrs, Stefan and Sannella, Donald",
+ title = {{Reflections on the Design of a Specification Language}},
+ journal = "LNCS",
+ volume = "1382",
+ pages = "154170",
+ year = "1998",
+ abstract =
+ "We reflect on our experiences from work on the design and
+ semantic underpinnings of Extended ML, a specification language
+ which supports the specification and formal development of
+ Standard ML programs. Our aim is to isolate problems and issues
+ that are intrinsic to the general enterprise of designing a
+ specification language for use with a given programming language.
+ Consequently the lessons learned go far beyond our original aim of
+ designing a specification language for ML.",
+ paper = "Karh98.pdf",
+ keywords = "printed"
+}
+
+@misc{Kali14,
+ author = "Kaliszyk, Cezary and Urban, Josef",
+ title = {{LearningAssisted Automated Reasoning with Flyspeck}},
+ comment = "arXiv:1211.7012v3",
+ year = "2014",
+ abstract =
+ "The considerable mathematical knowledge encoded by the Flyspeck
+ project is combined with external automated theorem provers (ATPs)
+ and machinelearning premise selection methods trained on the
+ Flyspeck proofs, producing an AI system capable of proving a wide
+ range of mathematical conjectures automatically. The performance
+ of this architecture is evaluated in a bootstrapping scenario
+ emulating the development of Flyspeck from axioms to the last
+ theorem, each time using only the previous theorems and proofs. It
+ is shown that 39\% of the 14185 theorems could be proved in a
+ pushbutton mode (without any highlevel advice and user
+ interaction) in 30 seconds of real time on a fourteen CPU
+ workstation.
+
+ The necessary work involves: (i) an implementation of sound
+ translations of the HOL Light logic to ATP formalisms: untyped
+ firstorder, polymorphic typed firstorder, and typed firstorder,
+ (ii) export of the dependency information from HOL Light and ATP
+ proofs for the machine learners, and (iii) choice of suitable
+ representations and methods for learning from previous proofs, and
+ their integration as advisors with HOL Light. This work is
+ described and discussed here, and an initial analysis of the body
+ of proofs that were found fully automatically is provided.",
+ paper = "Kali14.pdf",
+ keywords = "printed"
+}
+
+@techreport{Kami12,
+ author = "Kaminski, Paul",
+ title = {{The Role of Autonomy in DoD Systems}},
+ type = "Task Force Report",
+ institution = "Defense Science Board, Dept. of Defense",
+ year = "2012",
+ link = "\url{https://fas.org/irp/agency/dod/dsb/autonomy.pdf}",
+ comment = "verification, validation, and trust"
+
+}
+
+@article{Kapl80,
+ author = "Kaplan, Marc A. and Ullman, Jeffrey D.",
+ title = {{A Scheme for the Automatic Inference ov Variable Types}},
+ journal = "J. ACM",
+ volume = "27",
+ number = "1",
+ pages = "128.145",
+ year = "1980",
+ abstract =
+ "In this paper an algorithm for the determination of runtime
+ types in a programming language requiring no type declarations is
+ presented. It is demonstrated that this algorithm is superior to
+ other published algorithms in the sense that it produces stronger
+ assertions about the set of possible types for variables than do
+ other known algorithms. In fact this algorithm is shown to be the
+ best possible algorithm from among all those that use the same set
+ of primitive operators.",
+ paper = "Kapl80.pdf",
+ keywords = "printed"
+}
+
+@book{Kicz91,
+ author = "Kiczales, Gregor and des Rivieres, Jim and Bobrow, Daniel G.",
+ title = {{The Art of the Metaobject Protocol}},
+ publisher = "MIT Press",
+ year = "1991",
+ isbn = "0262610744"
+}
+
+@book{Klee65,
+ author = "Kleene, S.C. and Vesley, R.E.",
+ title = {{The Foundations of Intuitionistic Mathematics}},
+ publisher = "NorthHolland Publishing Company",
+ year = "1965"
+}
+
+@misc{Knil12,
+ author = "Knill, Oliver",
+ title = {{A Multivariate Chinese Remainder Theorem}},
+ link = "\url{https://arxiv.org/abs/1206.5114}",
+ year = "2012",
+ abstract =
+ "Using an adaptation of Qin Jiushao's method from the 13th
+ century, it is possible to prove that a system of linear modular
+ equations $a(i,1)x(i)+a(i,n)x(n)=b(i)\bmod m(i),i=1,\ldots,n$ has
+ integer solutions if $m(i)>1$ are pairwise relatively prime and in
+ each row, at least one matrix element $a(i,j)$ is relatively prime
+ to $m(i)$. The Chinese remainder theorem is a special case, where
+ $A$ has only one column.",
+ paper = "Knil12.pdf"
+}
+
+@misc{Knil18,
+ author = "Knill, Oliver",
+ title = {{Some Fundamental Theorems in Mathematics}},
+ link = "\url{https://arxiv.org/pdf/1807.08416.pdf}",
+ year = "2018",
+ abstract =
+ "An expository hitchhikers guide to some theorems in mathematics",
+ paper = "Knil18.pdf"
+}
+
+@techreport{Kreix11,
+ author = "Kreitz, Christoph and Rahli, Vincent",
+ title = {{Introduction to Classic ML}},
+ type = "technical report",
+ institution = "Cornell University",
+ year = "2011",
+ paper = "Krei11.pdf",
+ keywords = "printed"
+}
+
+@book{Krip80,
+ author = "Kripke, Saul A.",
+ title = {{Naming and Necessity}},
+ publisher = "Harvard University Press",
+ year = "1980",
+ isbn = "0674598466"
+}
+
+@article{Lamp99,
+ author = "Lamport, Leslie and Paulson, Lawrence C.",
+ title = {{Should Your Specification Language Be Typed?}},
+ journal = "ACM Trans. on Document Formatting",
+ volume = "21",
+ number = "3",
+ year = "1999",
+ pages = "502526",
+ abstract =
+ "Most specification languages have a type system. Type systems are
+ hard to get right, and getting them wrong can lead to
+ inconsistencies. Set theory can serve as the basis for a
+ specification language without types. This possibility, which has
+ been widely overlooked, offers many advantages. Untyped set theory
+ is simple and is more flexible than any simple typed
+ formalism. Polymorphism, overloading, and subtyping can make a
+ type system more powerful, but at the cost of increased
+ complexity, and such refinements can never attain the flexibility
+ of having no types at all. Typed formalisms have advantages too,
+ stemming from the power of mechanical type checking. While types
+ serve little purpose in hand proofs, they do help with mechanized
+ proofs. In the absence of verification, type checking can catch
+ errors in specifications. It may be possible to have the best of
+ both worlds by adding typing annotations to an untyped
+ specification language. We consider only specification languages,
+ not programming languages",
+ paper = "Lamp99.pdf",
+ keywords = "printed"
+}
+
+@book{Lear15,
+ author = "Leary, Christopher C. and Kristiansen, Lars",
+ title = {{A Friendly Introduction to Mathematical Logic}},
+ publisher = "Milne Library, Geneseo",
+ year = "2015",
+ isbn = "9781942341321",
+ paper = "Lear15.pdf"
+
+}
+
+@inproceedings{Leex07,
+ author = "Lee, Daniel K. and Crary, Karl and Harper, Robert",
+ title = {{Towards a Mechanized Metatheory of Standard ML}},
+ booktitle = "POPL'07",
+ publisher = "ACM",
+ year = "2007",
+ abstract =
+ "We present an internal language with equivalent expressive power to
+ Standard ML, and discuss its formalization in LF and the
+ machinechecked verification of its type safety in Twelf. The
+ internal language is intended to serve as the target of elaboration in
+ an elaborative semantics for Standard ML in the style of Harper and
+ Stone. Therefore, it includes all the programming mechanisms
+ necessary to implement Standard ML, including translucent modules,
+ abstraction, polymorphism, higher kinds, references, exceptions,
+ recursive types, and recursive functions. Our successful formalization
+ of the proof involved a careful interplay between the precise
+ formulations of the various mechanisms, and required the invention of
+ new representation and proof techniques of general interest.",
+ paper = "Leex07.pdf",
+ keywords = "printed"
+}
+
+@article{Maxx90,
+ author = "Ma, KwanLiu and Kessler, Robert R.",
+ title = {{TICL  A Type Inference System for Common Lisp}},
+ journal = "Software  Practice and Experience",
+ volume = "20",
+ number = "6",
+ pages = "593623",
+ year = "1990",
+ abstract =
+ "Most current Common Lisp compilers generate more efficient code
+ when supplied with data type information. However, in keeping with
+ standard Lisp programming style, most programmers are reluctant to
+ provide type information; they simply allow the runtime type
+ system to managed the data types accordingly. To fill this gap, we
+ have designed and implemented a type inference system for Common
+ Lisp (TICL). TICL takes a Lisp program that has been annotated
+ with a few type declarations, adds as many declarations as
+ possible, and produces a type declared program. The compiler can
+ then use this information to generate more efficient
+ code. Measurements indicate that a 20 per cent speed improvement
+ can generally be achieved.",
+ paper = "Maxx90.pdf",
+ keywords = "printed"
+}
+
+@misc{Macqxx,
+ author = "MacQueen, David B.",
+ title = {{Reflections on Standard ML}},
+ year = "unknown",
+ abstract =
+ "Standard ML is one of a number of new programming languages
+ developed in the 1980s that are seen as suitable vehicles for
+ serious systems and applications programming. It offers an
+ excellent ratio of expressiveness to language complexity, and
+ provides competitive efficiency. Because of its type and module
+ system, Standard ML manages to combine safety, security, and
+ robustness with much of the flexibility of dynamically typed
+ languages like Lisp. It is also has the most welldeveloped
+ scientific foundation of any major language. Here I review the
+ strengths and weaknesses of Standard ML and describe some of what
+ we have learned through the design, implementation, and use of the
+ language.",
+ paper = "Macqxx.pdf",
+ keywords = "printed"
+}
+
+@article{Mart84,
+ author = "MartinLof, P.",
+ title = {{Constructive Mathematics and Computer Programming}},
+ journal = "Phil. Trans. Royal Society London",
+ volume = "312",
+ year = "1984",
+ pages = "501518",
+ link =
+ "\url{https://royalsocietypublishing.org/doi/pdf/10.1098/rsta.1984.0073}",
+ abstract =
+ "If programming is understood not as the writing of instructions for
+ this or that computing machine but as the design of methods of
+ computation that it is the computer’s duty to execute (a difference
+ that Dijkstra has referred to as the difference between computer
+ science and computing science), then it no longer seems possible to
+ distinguish the discipline of programming from constructive
+ mathematics. This explains why the intuitionistic theory of types
+ (MartinLof 1975 In Logic Colloquium 1973 (ed. H. E. Rose and
+ J. C. Shepherdson), pp. 73  118 . Amsterdam: North Holland), which
+ was originally developed as a symbolism for the precise codification
+ of constructive mathematics, may equally well be viewed as a
+ programming language. As such it provides a precise notation not
+ only, like other programming languages, for the programs themselves
+ but also for the tasks that the programs are supposed to perform.
+ Moreover, the inference rules of the theory of types, which are again
+ completely formal, appear as rules of correct program synthesis. Thus
+ the correctness of a program written in the theory of types is proved
+ formally at the same time as it is being synthesized.",
+ paper = "Mart84.pdf",
+ keywords = "printed"
+}
+
+@book{Mart97a,
+ author = "Martzloff, J.C.",
+ title = {{A History of Chinese Mathematics}},
+ publisher = "Springer Verlag",
+ year = "1997"
+}
+
+@misc{Mcke14,
+ author = "McKenna, Brian",
+ title = {{Idris: Practical Dependent Types with Practical Examples}},
+ year = "2014",
+ link = "\url{https://www.youtube.com/watch?v=4i7KrG1Afbk}",
+ abstract =
+ "Dependent types turn types into a firstclass language construct and
+ allows types to be predicated upon values. Allowing types to be
+ controlled by ordinary values allows us to prove many more properties
+ about our programs and enables some interesting forms of
+ metaprogramming. We can do interesting things like write a typesafe
+ printf or verify algebraic laws of data structures  but while
+ dependent types are quickly gaining in popularity, it can be tricky to
+ find examples which are both useful and introductory.
+
+ This talk will demonstrate some practical dependent typing examples
+ (and not sized vectors) using Idris, a language designed for writing
+ executable programs, rather than just proving theorems."
+}
+
@book{Mcca62,
author = "McCarthy, John and Abrahams, Paul W. and Edwards, Daniel J.
and Hart, Timothy P. and Levin, Michael I.",
@@ 9676,6 +11422,161 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Mill06,
+ author = "Mills, Bruce",
+ title = {{Theoretical Introduction to Programming}},
+ publisher = "Springer",
+ year = "2006",
+ isbn = "1846280214"
+}
+
+@misc{Moba09,
+ author = "Mobarakeh, S. Saeidi",
+ title = {{Type Inference Algorithms}},
+ year = "2009",
+ link = "\url{https://www.win.tue.nl/~hzantema/semssm.pdf}",
+ abstract =
+ "n this paper we are going to describe the Wand’s type inference
+ algorithm and we’ll try to extend this algorithm with the notion of
+ polymorphic let. By means of a type system, which we’re going to
+ extend with some constraint language, we are able to extend the
+ algorithm’s first phase (generation of equations) with
+ letpolymorphism. The second phase of the algorithm (solving of the
+ generated equations) needs some modifications to be done on standard
+ unification algorithms because the new generated equation constructs
+ can not directly be fed in to the standard unification algorithm. The
+ correctness of the first phase of the algorithm is been proved by
+ extending the Wand’s soundness and completeness prove. Finally a
+ simple example is given to clarify the idea behind the algorithm.",
+ paper = "Moba09.pdf",
+ keywords = "printed"
+}
+
+@article{Mosc84,
+ author = "Moschovakis, Y.N.",
+ title = {{Abstract Recursion as a Foundation of the Theory of Algorithms}},
+ journal = "LNCS",
+ volume = "1104",
+ pages = "289364",
+ publisher = "Springer",
+ year = "1984",
+ abstract =
+ "The main object of this paper is to describe an abstract,
+ (axiomatic) theory of recursion and its connection with some of
+ the basic, foundational questions of computer science.",
+ paper = "Mosc84.pdf",
+ keywords = "printed"
+}
+
+@misc{Moyx10,
+ author = "Moy, Yannick and Wallenburg, Angela",
+ title = {{Tokeneer: Beyond Formal Program Verification}},
+ year = "2010",
+ link = "\url{http://www.cse.chalmers.se/~angelaw/papers/ERTS2010.pdf}",
+ abstract =
+ "Tokeneer is a smallsized (10 kloc) security system which was
+ formally developed and verified by Praxis at the request of NSA, using
+ SPARK technology. Since its opensource release in 2008, only two
+ problems were found, one by static analysis, one by code review. In
+ this paper, we report on experiments where we systematically applied
+ various static analysis tools (compiler, bugfinder, proof tools)
+ and focused code reviews to all of the SPARK code (formally verified)
+ and supporting Ada code (not formally verified) of the Tokeneer
+ Project. We found 20 new problems overall, half of which are defects
+ that could lead to a system failure should the system be used in its
+ current state. Only two defects were found in SPARK code, which
+ confirms the benefits of applying formal verification to reach
+ higher levels of assurance. In order to leverage these benefits to
+ code that is was not formally verified from the start, we propose to
+ associate static analyses and dynamic analyses around a common
+ expression of properties and constraints. This is the goal of starting
+ project HiLite, which involves AdaCore and Altran Praxis together
+ with several industrial users and research labs.",
+ paper = "Moyx10.pdf",
+ keywords = "printed"
+}
+
+@book{Muel88,
+ author = "Mueller, Robert A. and Page, L. Rex",
+ title = {{Symbolic Computing with Lisp and Prolog}},
+ publisher = "Wiley",
+ year = "1988",
+ isbn = "0471607711"
+}
+
+@book{Murp18,
+ author = "Murphy, Robin R.",
+ title = {{Robotics Through Science Fiction}},
+ publisher = "MIT Press",
+ year = "2018",
+ link =
+ "\url{https://mitpress.mit.edu/books/roboticsthroughsciencefiction}",
+ isbn = "9780262536264",
+ comment = "verification, validation, and trust"
+
+}
+
+@misc{Murr18,
+ author = "Murray, Toby and van Oorcchot, P.C.",
+ title = {{BP: Formal Proofs, the Fine Print and Side Effects}},
+ link = "\url{https://people.eng.unimelb.edu.au/tobym/papers/secdev2018.pdf}",
+ comment = "Version: 26 June 2018 to appear in IEEE SecDev 2018",
+ abstract =
+ "Given recent highprofile successes in formal verification of
+ securityrelated properties (e.g. for seL4), and the rising
+ popularity of applying formal methods to cryptographic libraries
+ and security protocols like TLS, we revisit the meaning of
+ securityrelated proofs about software. We reexamine old issues,
+ and identify new questions that have escaped scrutiny in the
+ formal methods literature. We consider what value proofs about
+ software systems deliver to endusers (e.g. in terms of net
+ assurance benefits), and at what cost in terms of side effects
+ (such as changes made to software to facilitate the proofs, and
+ assumptionrelated deployment restrictions imposed on software if
+ these proofs are to remain valid in operation). We consider in
+ detail, for the first time to our knowledge, possible
+ relationships between proofs and side effects. To make our
+ discussion concrete, we draw on tangible examples, experience, and
+ the literature.",
+ paper = "Murr18.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Nimm02,
+ author = "Nimmer, Jeremy W. and Ernst, Michael D.",
+ title = {{Automatic Generation of Program Specifications}},
+ booktitle = "Symp. on Software Testing and Analysis",
+ publisher = "ACM Press",
+ year = "2002",
+ abstract =
+ "Producing specifications by dynamic (runtime) analysis of program
+ executions is potentially unsound, because the analyzed executions may
+ not fully characterize all possible executions of the program. In
+ practice, how accurate are the results of a dynamic analysis? This
+ paper describes the results of an investigation into this question,
+ determining how much specifications generalized from program runs
+ must be changed in order to be verified by a static checker.
+ Surprisingly, small test suites captured nearly all program behavior
+ required by a specific type of static checking; the static checker
+ guaranteed that the implementations satisfy the generated specifications
+ , and ensured the absence of runtime exceptions. Measured
+ against this verification task, the generated specifications scored
+ over 90 percent on precision, a measure of soundness, and on recall, a
+ measure of completeness.
+
+ This is a positive result for testing, because it suggests that
+ dynamic analyses can capture all semantic information of interest for
+ certain applications. The experimental results demonstrate that a
+ specific technique, dynamic invariant detection, is effective at
+ generating consistent, sufficient specifications for use by a static
+ checker. Finally, the research shows that combining static and
+ dynamic analyses over program specifications has benefits for users of
+ each technique, guaranteeing soundness of the dynamic analysis and
+ lessening the annotation burden for users of the static analysis.",
+ paper = "Nimm02.pdf",
+ keywords = "printed"
+}
+
@misc{Nipk18,
author = "Nipkow, Tobias and Tabacznyj, Christophe and
Paulson, Lawrence C. and Chaieb, Amine and Rasmussen, Thomas M.
@@ 9685,6 +11586,263 @@ paper = "Brea89.pdf"
year = "2018"
}
+@misc{Ober18,
+ author = "Oberhoff, Sebastian",
+ title = {{Incompleteness Ex Machina}},
+ year = "2018",
+ abstract =
+ "In this essay we'll prove Godel's incompleteness theorems
+ twice. First, we'll prove them the good oldfashioed way. Then
+ we'll repeat the feat in the setting of computation. In the
+ process we'll discover that Godel's work, rightly viewed, needs to
+ be split into two parts: the transport of computation into the
+ arena of arithmetic on the one hand and the actual incompleteness
+ theorems on the other. After we're done there will be cake.",
+ paper = "Ober18.pdf",
+ keywords = "printed"
+}
+
+@misc{Olss04,
+ author = "Olsson, Ola and Wallenburg, Angela",
+ title = {{Automatic Generation of Customised Induction Rules for
+ Proving Correctness of Imperative Programs}},
+ year = "2004",
+ comment = "paper follows thesis in file",
+ link = "\url{http://www.cse.chalmers.se/~angelaw/papers/lic.pdf}",
+ abstract =
+ "In this paper we develop a method for automatic construction of
+ customised induction rules for use in a semiinteractiv e theorem
+ prover. The induction rules are developed to prove the total
+ correctness of loops in an objectoriented language. We concentrate
+ on integers. First we compute a partition of the domain of the
+ induction variable. Our method makes use of failed proof attempts in
+ the theorem prover to gain information about the problem structure and
+ create the partition. Then, based on this partition we create an
+ induction rule, in destructor style, that is customised to make the
+ proving of the loop simpler. Our concern is in user interaction,
+ rather than in proof strength.
+
+ Using the customised induction rules, some separation of proof of
+ control flow and data correctness is achieved, and we find that in
+ comparison to standard (Peano) induction or Noetherian induction,
+ simpler user interaction can be expected. Furthermore, by using
+ destructor style induction we circumvent the problem of creating
+ inverses of functions. We show the soundness of the customised
+ induction rules created by the method. Furthermore, we use the
+ machinery of the theorem prover (KeY) to make the method automatic.
+ Several interesting areas are also identified that could open up for
+ a larger range of loops the method can handle, as well as pointing
+ towards full automation of these cases.",
+ paper = "Wall04.pdf",
+ keywords = "printed"
+}
+
+@misc{Olss05,
+ author = "Olsson, Ola and Wallenburg, Angela",
+ title = {{Customised Induction Rules for Proving Correctness of
+ Imperative Programs}},
+ year = "2005",
+ link = "\url{http://www.cse.chalmers.se/~angelaw/papers/olswbg05.pdf}",
+ abstract =
+ "In this paper we develop a method for automatic construction of
+ customised induction rules for use in a semiinteractiv e theorem
+ prover. The induction rules are developed to prove the total
+ correctness of loops in an objectoriented language. We concentrate
+ on integers. First we compute a partition of the domain of the
+ induction variable. Our method makes use of failed proof attempts in
+ the theorem prover to gain information about the problem structure and
+ create the partition. Then, based on this partition we create an
+ induction rule, in destructor style, that is customised to make the
+ proving of the loop simpler. Our concern is in user interaction,
+ rather than in proof strength.
+
+ Using the customised induction rules, some separation of proof of
+ control flow and data correctness is achieved, and we find that in
+ comparison to standard (Peano) induction or Noetherian induction,
+ simpler user interaction can be expected. Furthermore, by using
+ destructor style induction we circumvent the problem of creating
+ inverses of functions. We show the soundness of the customised
+ induction rules created by the method. Furthermore, we use the
+ machinery of the theorem prover (KeY) to make the method automatic.
+ Several interesting areas are also identified that could open up for
+ a larger range of loops the method can handle, as well as pointing
+ towards full automation of these cases.",
+ paper = "Olss05.pdf",
+ keywords = "printed"
+}
+
+@misc{Pfen06,
+ author = "Pfenning, Frank",
+ title = {{Constraint Logic Programming}},
+ year = "2006",
+ link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/27clp.pdf}",
+ paper = "Pfen06.pdf",
+ keywords = "printed"
+}
+
+@misc{Pfen07,
+ author = "Pfenning, Frank",
+ title = {{Logic Programming}},
+ year = "2007",
+ link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/lpall.pdf}",
+ paper = "Pfen07.pdf"
+}
+
+@misc{Pitt18,
+ author = "Pittman, Dan",
+ title = {{Proof Theory Impressionism: Blurring the CurryHoward Line}},
+ link = "\url{https://www.youtube.com/watch?v=jrVPBAd5Gc}",
+ year = "2018",
+ comment = "Strange Loop 2018 Conference"
+}
+
+@book{Plat18,
+ author = "Platzer, Andre",
+ title = {{Logical Foundations of CyberPhysical Systems}},
+ publisher = "Springer",
+ year = "2018",
+ isbn = "9783319635873"
+}
+
+@misc{Poli18,
+ author = "Polikarpova, Nadia",
+ title = {{TypeDriven Program Synthesis}},
+ link = "\url{https://www.youtube.com/watch?v=HnOix9TFy1A}",
+ year = "2018",
+ abstract =
+ "A promising approach to improving software quality is to enhance
+ programming languages with declarative constructs, such as logical
+ specifications or examples of desired behavior, and to use program
+ synthesis technology to translate these constructs into efficiently
+ executable code. In order to produce code that provably satisfies a
+ rich specification, the synthesizer needs an advanced program
+ verification mechanism that is sufficiently expressive and fully
+ automatic. In this talk, I will present a program synthesis technique
+ based on refinement type checking: a verification mechanism that
+ supports automatic reasoning about expressive properties through a
+ combination of types and SMT solving.
+
+ The talk will present two applications of typedriven synthesis. The
+ first one is a tool called Synquid, which creates recursive functional
+ programs from scratch given a refinement type as input. Synquid is the
+ first synthesizer powerful enough to automatically discover programs
+ that manipulate complex data structures, such as balanced trees and
+ propositional formulas. The second application is a language called
+ Lifty, which uses typedriven synthesis to repair information flow
+ leaks. In Lifty, the programmer specifies expressive information flow
+ policies by annotating the sources of sensitive data with refinement
+ types, and the compiler automatically inserts access checks necessary
+ to enforce these policies across the code."
+}
+
+@book{Popk94,
+ author = "Popkorn, Sally",
+ title = {{First Steps in Modal Logic}},
+ publisher = "Cambridge University Press",
+ year = "1994",
+ isbn = "9780521464826",
+ link =
+ "\url{https://cs.famaf.unc.edu.ar/~careces/ml18/downloads/popkorn.pdf}",
+ paper = "Popk94.pdf"
+}
+
+@article{Post44,
+ author = "Post, Emil L.",
+ title = {{Recursively Enumerable Sets of Postive Integers and Their
+ Decision Problems}},
+ journal = "Bull. Amer. Math Soc.",
+ volume = "50",
+ pages = "284316",
+ year = "1944",
+ paper = "Post44.pdf",
+ keywords = "printed"
+}
+
+@article{Deut85,
+ author = "Deutsch, David",
+ title = {{Quantum Theory, the ChurchTuring Principle and the
+ Universal Quantum Computer}},
+ journal = "Proc. Royal Society of London",
+ volume = "400",
+ pages = "97117",
+ year = "1985",
+ abstract =
+ "It is argued that underlying the ChurchTuring hypothesis there
+ is an implicit physical assertion. Here, this assertion is
+ presented explicitly as a physical principle: 'every finitely
+ realizable physical system can be perfectly simulated by a
+ universal model computing machine operating by finite
+ means'. Classical physics and the universal Turing machine,
+ because the former is continuous and the latter discrete, do not
+ obey the principle, at leeast in the strong form above. A class of
+ model computing machines that is the quantum generalization of the
+ class of Turing machines is described, and it is shown that
+ quantum theory and the 'universal quantum computer' are compatible
+ with the principle. Computing machines resembling the universal
+ quantum computer could, in principle, be built and would have many
+ remarkable properties not reproducible by any Turing
+ machine. These do not include the computation of nonrecursive
+ functions, but they do include 'quantum parallelism', a method by
+ which certain probabilistic tasks can be performed faster by a
+ universal quantum computer than by any classical restriction of
+ it. The intuitive explanation of these properties places an
+ intolerable strain on all interpretations of quantum theory other
+ than Everett's. Some of the numerous connections between the
+ quantum theory of computation and the rest of physics are
+ explored. Quantum complexity theory allows a physically more
+ reasonable definition of the 'complexity' or 'knowledge' in a
+ physical system than does classical complexity theory.",
+ paper = "Deut85.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Rees82,
+ author = "Rees, Jonathan and Adams, Norman I",
+ title = {{T: A Dialect of Lisp or, LAMBDA: The Ultimate Software Tool}},
+ booktitle = "Sym. on Lisp and Functional Programming",
+ pages = "114122",
+ year = "1982",
+ abstract =
+ "The T proejct is an experiment in language design and
+ implementation. Its purpose is to test the thesis developed by
+ Steele and Sussman in their series of papers about the Scheme
+ language: that Scheme may be used as the basis for a practical
+ programming language of exceptional expressive power, and that
+ implementations of Scheme could perform better than other Lisp
+ systems, and competitively with implementations of programming
+ languages, such as C and Bliss, which are usually considered to be
+ inherently more efficient than Lisp on conventaional machine
+ architectures. We are developing a portable implementation of T,
+ currently targeted for the VAX under the Unix and VMS operating
+ systems and for the Apollo, a MC68000based workstation.",
+ paper = "Rees82.pdf",
+ keywords = "printed"
+}
+
+@techreport{Remy92,
+ author = "Remy, Didier",
+ title = {{Extensions to ML type system with a sorted equation theory
+ on types}},
+ type = "research report",
+ institution = "INRIA",
+ number = "RR1766",
+ year = "1992",
+ abstract =
+ "We extend the ML language by allowing a sorted regular equational
+ theory on types for which unification is decidable and unitary. We
+ prove that the extension keeps principlal typings and subject
+ reduction. A new set of typing rules is proposed so that type
+ generalization is simpler and more efficient. We consider typing
+ problems as general unification problems, which we solve with a
+ formalism of unificands. Unificands naturally deal with sharing
+ between types and lead to a more efficient type inference
+ algorithm. The use of unificands also simplifies the proof of
+ correctness of the algorithm by splitting it into more elementary
+ steps.",
+ paper = "Remy92.pdf",
+ keywords = "printed"
+}
+
@misc{Reyn94,
author = "Reynolds, John C.",
title = {{An Introduction to the Polymorphic Lambda Calculus}},
@@ 9693,6 +11851,421 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inbook{Reyn97,
+ author = "Reynolds, John C.",
+ title = {{The Essence of Algol}},
+ booktitle = "ALGOLlike languages, Volume 1",
+ publisher = "Birkhauser Boston Inc.",
+ chapter = "1",
+ pages = "6788",
+ isbn = "0817638806",
+ year = "1997",
+ abstract =
+ "Although Algol 60 has been uniquely influential in programming
+ language design, its descendants have been signicantly different than
+ their prototype. In this paper, we enumerate the principles that we
+ believe embody the essence of Algol, describe a model that satisfies
+ these principles, and illustrate this model with a language that,
+ while more uniform and general, retains the character of Algol.",
+ paper = "Reyn97.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Roes19,
+ author = "Roessle, Ian and Verbeek, Freek and Ravindran, Binoy",
+ title = {{Formally Verified Big Step Semantics out of x8664 Binaries}},
+ booktitle = "CPP'19",
+ year = "2019",
+ publisher = "ACM",
+ abstract =
+ "This paper presents a methodology for generating formally proven
+ equivalence theorems between decompiled x8664 machine code and
+ big step semantics. These proofs are built on top of two
+ additional contributions. First, a robust and tested formal x8664
+ machine model containing small step semantics for 1625
+ instructions. Second, a decompilation into logic methodology
+ supporting both x8664 assembly and machine code at large
+ scale. This work enables blackbox binary verification, i.e.,
+ formal verification of a binary where source code is
+ unavailable. As such, it can be applied to safetycritical systems
+ that consist of legacy components, or components whose source code
+ is unavailable due to proprietary reasons. The methodology
+ minimizes the trusted code base by leveraging machinelearned
+ semantics to build a formal machine model. We apply the
+ methodolgoy to several case studies, including binaries that
+ heavily rely on the SSE2 floatingpoint instruction set, and
+ binaries that are obtained by compiling code that is obtained by
+ inlining assembly into C code.",
+ paper = "Roes19.pdf",
+ keywords = "printed"
+}
+
+@misc{Roit13,
+ author = "Roitman, Judith",
+ title = {{Introduction to Modern Set Theory}},
+ link = "\url{http://www.math.ku.edu/~roitman/SetTheory.pdf}",
+ year = "2013",
+ paper = "Roit13.pdf"
+}
+
+@misc{Ross13,
+ author = "Rossberg, Andreas",
+ title = {{HaMLet: To Be Or Not To Be Standard ML}},
+ year = "2013",
+ link = "\url{https://people.mpisws.org/~rossberg/hamlet}",
+ paper = "Ross13.pdf",
+ keywords = "printed"
+}
+
+@article{Sann86,
+ author = "Sannella, Donald",
+ title = {{Extended ML: An InstitutionIndependent Framework for
+ Formal Program Development}},
+ booktitle = "Proc. Workshop on Category Theory and Computer Programming",
+ journal = "LNCS",
+ volume = "240",
+ pages = "364389",
+ year = "1986",
+ paper = "Sann86.pdf"
+}
+
+@article{Sann87,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{On Observational Equivalence and Algebraic Specification}},
+ journal = "J. of Computer and System Sciences",
+ volume = "34",
+ pages = "150178",
+ year = "1987",
+ abstract =
+ "The properties of a simple and natural notion of observational
+ equivalence of algebras and the corresponding specificationbuilding
+ operation are studied. We begin with a defmition of observational
+ equivalence which is adequate to handle reachable algebras only, and
+ show how to extend it to cope with unreachable algebras and also how
+ it may be generalised to make sense under an arbitrary institution.
+ Behavioural equivalence is treated as an important special case of
+ observational equivalence, and its central role in program development
+ is shown by means of an example.",
+ paper = "Sann87.pdf"
+}
+
+@article{Sann88,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Specifications in an Arbitrary Institution}},
+ journal = "Information and Computation",
+ volume = "76",
+ pages = "165210",
+ year = "1988",
+ abstract =
+ "A formalism for constructing and using axiomatic specifications in an
+ arbitrary logical system is presented. This builds on the framework
+ provided by Goguen and Burstall’s work on the notion of an institution
+ as a formalisation of the concept of a logical system for writing
+ specifications. We show how to introduce free variables into the
+ sentences of an arbitrary institution and how to add quantitiers which
+ bind them. We use this foundation to define a set of primitive
+ operations for building specifications in an arbitrary institution
+ based loosely on those in the ASL kernel specification language. We
+ examine the set of operations which results when the definitions are
+ instantiated in institutions of total and partial tirstorder logic
+ and compare these with the operations found in existing specification
+ languages. We present proof rules which allow proofs to be conducted
+ in specifications built using the operations we define. Finally, we
+ introduce a simple mechanism for defining and applying parameterised
+ specifications and briefly discuss the program development process.",
+ paper = "Sann88.pdf"
+}
+
+@inproceedings{Sann89,
+ author = "Sannella, Donald and Tarlecki, Andrzej",
+ title = {{Toward Formal Development of ML Programs: Foundations and
+ Methodology (Extended Abstract}},
+ booktitle = "Int. Conf. on Theory and Practice of Software Development",
+ pages = "375389",
+ publisher = "Springer",
+ year = "1989",
+ abstract =
+ "A methodology is presented for the formal development of modular
+ Standard ML programs from specifications. Program development proceeds
+ via a sequence of design (modular decomposition), coding and
+ refinement steps. For each of these three kinds of step, conditions
+ are given which ensure the correctness of the result. These conditions
+ seem to be as weak as possible under the constraint of being
+ expressible as local interface matching requirements.",
+ paper = "Sann89.pdf"
+}
+
+@article{SCSCP10,
+ author = "Unknown",
+ title = {{Symbolic Computation Software Composability Project and
+ its implementations}},
+ journal = "ACM Comm. in Computer Algebra",
+ volume = "44",
+ number = "4",
+ year = "2010",
+ keywords = "printed"
+}
+
+@techreport{Shiv90,
+ author = "Shivers, Olin",
+ title = {{DataFlow Analysis and Type Recovery in Scheme}}
+, year = "1990",
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMUCS90115",
+ abstract =
+ "The lack of explicit type information in Scheme prevents the
+ application of many compiler optimizations. Implicit type
+ information can oftentimes be recovered by analyzing the flow of
+ control through primitive operations and conditionals. Such flow
+ analysis, however, is difficult in the presence of higherorder
+ functions. This paper presents a technique for recovering type
+ information fro mScheme programs using flow analysis. The
+ algorithm used for flow analysis is semantically based, and is
+ novel in that it correctly handles the ``environment flow''
+ problem. Several examples of a working implementation of the
+ analysis are presented. The techniques are applicable not only to
+ Scheme type recovery, but also to related languages, such as
+ Common Lisp and Standard ML, and to related dataflow analyses,
+ such as range analysis, future analysis, and copy propagation.",
+ paper = "Shiv90.pdf",
+ keywords = "printed"
+}
+
+@book{Sici16,
+ author = "Siciliano, Bruno and Oussama, Khatib",
+ title = {{Springer Handbook of Robotics}},
+ publisher = "Springer",
+ year = "2016",
+ abstract =
+ "Reaching for the human frontier, robotics is vigorously engaged in
+ the growing challenges of new emerging domains. Interacting,
+ exploring, and working with humans, the new generation of robots will
+ increasingly touch people and their lives. The credible prospect of
+ practical robots among humans is the result of the scientific
+ endeavour of a half a century of robotic developments that established
+ robotics as a modern scientific discipline. The Springer Handbook of
+ Robotics brings a widespread and wellstructured compilation of
+ classic and emerging application areas of robotics. From the
+ foundations to the social and ethical implications of robotics, the
+ handbook provides a comprehensive collection of the accomplishments in
+ the field, and constitutes a premise of further advances towards new
+ challenges in robotics. This handbook, edited by two internationally
+ renowned scientists with the support of an outstanding team of seven
+ part editors and onehundred sixtyfour authors, is an authoritative
+ reference for robotics researchers, newcomers to the field, and
+ scholars from related disciplines such as biomechanics, neurosciences,
+ virtual simulation, animation, surgery, and sensor networks among
+ others.",
+ paper = "Sici16.pdf"
+}
+
+@book{Sips13,
+ author = "Sipser, Michael",
+ title = {{Introduction to the Theory of Computation}},
+ publisher = "Cengage Learning",
+ year = "2013",
+ isbn = "9781133187790",
+ link = "\url{https://theswissbay.ch/pdf/Book/Introduction%20to%20the%20theory%20of%20computation_third%20edition%20%20Michael%20Sipser.pdf}",
+ paper = "Sips13.pdf"
+}
+
+@phdthesis{Sjob15,
+ author = "Sjoberg, Vilhelm",
+ title = {{A Dependently Typed Language with NonTermination}},
+ school = "University of Pennsylvania",
+ year = "2015",
+ abstract =
+ "We propose a fullspectrum dependently typed programming
+ language, {\sl Zombie}, which supports general recursion
+ natively. The Zombie implementation is an elaborating
+ typechecker. We prove type safety for a large subset of the Zombie
+ core language, including features wuch as computational
+ irrelevance, CBVreduction, and propositional equality with a
+ heterogeneous, completely erased elimination form. Zombie does not
+ automatically $\beta$reduce expressions, but instead uses
+ congruence closure for proof and type inference. We give a
+ specification of a subset of the surface language via a
+ bidirectional type system, which works ``uptocongruence'', and
+ an algorithm for elaborating expressions in this language to an
+ explicitly typed core language. We prove that our elaboration
+ algorithm is complete with respect to the source type
+ system. Zombie also features an optional terminationchecker,
+ allowing nonterminating programs returning proofs as well as
+ external proofs about programs.",
+ paper = "Sjob15.pdf",
+ keywords = "printed"
+}
+
+@book{Smit13,
+ author = "Smith, Peter",
+ title = {{An Introduction to Godel's Theorems}},
+ publisher = "Cambridge University Press",
+ year = "2013",
+ isbn = "9781107022843",
+ paper = "Smit13.pdf"
+}
+
+@misc{Smit17,
+ author = "Smith, Peter",
+ title = {{Teach Yourself Logic 2017: A Study Guide}},
+ link =
+"\url{https://www.logicmatters.net/resources/pdfs/TeachYourselfLogic2017.pdf}",
+ year = "2017",
+ paper = "Smit17.pdf"
+}
+
+@book{Smul92,
+ author = "Smullyan, Raymond M.",
+ title = {{Godel's Incompleteness Theorems}},
+ publisher = "Oxford University Press",
+ year = "1992",
+ isbn = "0195046722",
+ paper = "Smul92.pdf"
+}
+
+@article{Soar96,
+ author = "Soare, Robert I.",
+ title = {{Computability and Recursion}},
+ journal = "Bulletin of Symbolic Logic",
+ volume = "2",
+ year = "1996",
+ pages = "284321",
+ link = "\url{http://www.people.cs.uchicago.edu/~soare/History/compute.pdf}",
+ abstract =
+ "We consider the informal concept of 'computability' or 'effective
+ calculability' and two of the formalisms commonly used to define
+ it, '(Turing) computability' and '(general) recursiveness'. We
+ consider their origin, exact technical definition, concepts,
+ history, general English meanings, how they became fixed in their
+ present roles, how they were first and are now used, their impact
+ on nonspecialists, how their use will affect the future content of
+ the subject of computability theory, and its connection to other
+ related areas.
+
+ After a careful historical and conceptual analysis of
+ computability and recursion we make several recommentations in
+ section 7 about preserving the {\sl intensional} differences
+ between the concepts of 'computability' and
+ 'recursion'. Specificually we recommend that: the term 'recursive'
+ should no longer carry the additional meaning of 'computable' or
+ 'decidable', functions defined using Turing machines, register
+ machines, or their variants should be called 'computable' rather
+ than 'recursive', we should distinguish the intensional difference
+ between Church's Thesis and Turing's Thesis, and use the latter
+ particluarly in dealing with mechanistic questions; the name of
+ the subject should be '{\sl Computability Theory}' or simply
+ {\sl Computability} rather than 'Recursive Function Theory'",
+ paper = "Soar96.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Soar07,
+ author = "Soare, Robert I.",
+ title = {{Computability and Incomputability}},
+ booktitle = "Proc. Third Conf. on Computability in Europe",
+ year = "2007",
+ publisher = "SpringerVerlog",
+ isbn = "139783540730002",
+ link = "\url{http://www.people.cs.uchicago.edu/~soare/History/siena.pdf}",
+ abstract =
+ "The conventional wisdom presented in most computability books and
+ historical papers is that there were several researchers in the early
+ 1930’s working on various precise definitions and demonstrations of a
+ function specified by a finite procedure and that they should all
+ share approximately equal credit. This is incorrect. It was Turing
+ alone who achieved the characterization, in the opinion of G ̈odel. We
+ also explore Turing’s oracle machine and its analogous properties in
+ analysis.",
+ paper = "Soar07.pdf",
+ keywords = "printed"
+}
+
+@misc{Soar99,
+ author = "Soare, Robert I.",
+ title = {{The History and Concept of Computability}},
+ booktitle = "Handbook of Computability Theory",
+ publisher = "NorthHolland",
+ link = "\url{http://www.people.cs.uchicago.edu/~soare/History/handbook.pdf}",
+ year = "1999",
+ paper = "Soar99.pdf",
+ keywords = "printed"
+}
+
+@article{Stee87,
+ author = "Steenkiste, Peter and Hennessy, John",
+ title = {{Tags and Type Checking in LISP: Hardware and Software Approaches}},
+ journal = "ACM SIGPLAN Notices",
+ volume = "22",
+ number = "10",
+ pages = "5059",
+ year = "1987",
+ abstract =
+ "One of the major factors that distinguishes LISP from many other
+ languages (Pascal, C, Fortran, etc.) is the need for runtime type
+ checking. Runtime type checking is implemented by adding to each
+ data object a tag that encodes type information. Tags must be
+ compared for type compatibility, removed when using the data, and
+ inserted when new data items are created. This tag manipulation,
+ together with other work related to dynamic type checking and
+ generic operations, constitutes a significant component of the
+ execution time of LISP programs. This has led both to the
+ development of LISP machines that support tag checking in hardware
+ and to the avoidance of type checking by users running on stock
+ hardware. To understand the role and necessity of specialpurpose
+ hardware for tag handling, we first measure the cost of type
+ checking operations for a group of LISP programs. We then examine
+ hardware and software implementations of tag operations and
+ estimate the cost of tag handling with the different tag
+ implementation schemes. The data shows that minimal levels of
+ support provide most of the benefits, and that tag operations can
+ be relatively inexpensive, even when no special hardware support
+ is present.",
+ paper = "Stee87.pdf",
+ keywords = "printed"
+}
+
+@misc{Stum18,
+ author = "Stump, Aaron",
+ title = {{Syntax and Semantics of Cedille}},
+ year = "2018",
+ comment = "arXiv:1806.04709v1",
+ paper = "Stum18.pdf",
+ keywords = "printed"
+}
+
+@article{Tanx18,
+ author = "Tan, Yong Kiam and Myreen, Magnus O. and Kumar, Ramana and
+ Fox, Anthony and Owens, Scott",
+ title = {{The Verified CakeML Compiler Backend}},
+ journal = "Functional Programming",
+ year = "2018",
+ link = "\url{https://www.cs.cmu.edu/~yongkiat/files/cakemljfp.pdf}",
+ abstract =
+ "The CakeML compiler is, to the best of our knowledge, the most
+ realistic verified compiler for a functional programming language to
+ date. The architecture of the compiler, a sequence of intermediate
+ languages through which highlevel features are compiled away
+ incrementally, enables verification of each compilation pass at an
+ appropriate level of semantic detail. Parts of the compiler’s
+ implementation resemble mainstream (unverified) compilers for strict
+ functional languages, and it supports several important features and
+ optimisations. These include efficient curried multiargument
+ functions, configurable data representations, efficient exceptions,
+ register allocation, and more. The compiler produces machine code for
+ five architectures: x8664, ARMv6, ARMv8, MIPS64, and RISCV. The
+ generated machine code contains the verified runtime system which
+ includes a verified generational copying garbage collector and a
+ verified arbitrary precision arithmetic (bignum) library. In this
+ paper we present the overall design of the compiler backend, including
+ its 12 intermediate languages. We explain how the semantics and
+ proofs fit together, and provide detail on how the compiler has been
+ bootstrapped inside the logic of a theorem prover. The entire
+ development has been carried out within the HOL4 theorem prover.",
+ paper = "Tanx18.pdf",
+ keywords = "printed"
+}
+
@article{Tann93,
author = "BreazuTannen, Val and Coquand, Thierry and Gunter, Carl A.
and Scedrov, Andre",
@@ 9730,6 +12303,59 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Tard03,
+ author = "Tarditi, David and Morrisett, Greg and Cheng, Perry and
+ Stone, Chris and Harper, Roboert and Lee, Peter",
+ title = {{TIL: A TypeDirected, Optimizing Compiler for ML}},
+ booktitle = "20 Years of Prog. Lang. Design and Implementation",
+ year = "2003",
+ publisher = "ACM",
+ isbn = "1581136234",
+ paper = "Tard03.pdf",
+ keywords = "printed"
+}
+
+@misc{Tarvxx,
+ author = "Tarver, Mark",
+ title = {{A Language for Implementing Arbitrary Logics}},
+ year = "unknown",
+ abstract =
+ "SEQUEL is a newgeneration functional programming language, which
+ allows the specification of types in a notation based on the
+ sequent calculus. The sequent calculus notation suffices for the
+ construction of types for type checking and for the specification
+ of arbitrary logics. Compilation techniques derived from both
+ functional and logic programming are used to derive
+ highperformance ATPs from these specifications.",
+ paper = "Tarvxx.pdf",
+ keywords = "printed"
+}
+
+@book{Tenn90,
+ author = "Tennant, Neil",
+ title = {{Natural Logic}},
+ publisher = "Edinburgh University Press",
+ isbn = "0852245793",
+ year = "1990",
+ paper = "Tenn90.pdf"
+}
+
+@misc{Torl17,
+ author = "Torlak, Emina",
+ title = {{Synthesis and Verifcation for All}},
+ link = "\url{https://www.youtube.com/watch?v=KpDyuMIb_E0}",
+ year = "2017"
+}
+
+@book{Vell06,
+ author = "Vellerman, Daniel J.",
+ title = {{How To Prove It}},
+ publisher = "Cambridge University Press",
+ year = "2006",
+ isbn = "9780511161162",
+ paper = "Vell06.pdf"
+}
+
@misc{Wadl03,
author = "Wadler, Philip",
title = {{The GirardReynolds Isomorphism}},
@@ 9761,6 +12387,145 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@phdthesis{Wall04,
+ author = "Wallenburg, Angela",
+ title = {{Inductive Rules for Proving Correctness of Imperative Programs}},
+ school = "Goteborg University",
+ link = "\url{http://www.cse.chalmers.se/~angelaw/papers/lic.pdf}",
+ year = "2004",
+ abstract =
+ "This thesis is aimed at simplifying the userinteraction in
+ semiinteractive theorem proving for imperative programs. More
+ specically , we describe the creation of customised induction rules
+ that are tailormade for the specific program to verify and thus make
+ the resulting proof simpler. The concern is in user interaction,
+ rather than in proof strength. To achiev e this, two different
+ verication techniques are used.
+
+ In the first approach, we develop an idea where a software testing
+ technique, partition analysis, is used to compute a partition of the
+ domain of the induction variable, based on the branch predicates in
+ the program we wish to prove correct. Based on this partition we
+ derive mechanically a partitioned induction rule, which then inherits
+ the divideandconquer style of partition analysis, and (hopefully) is
+ easier to use than the standard (Peano) induction rule.
+
+ The second part of the thesis continues with a more thorough
+ development of the method. Here the connection to software testing is
+ completely removed and the focus is on inductive theorem proving only.
+ This time, we make use of failed proof attempts in a theorem prover
+ to gain information about the problem structure and create the
+ partition. Then, based on the partition we create an induction rule,
+ in destructor style, that is customised to make the proving of the
+ loop simpler.
+
+ With the customised induction rules, in comparison to standard (Peano)
+ induction or Noetherian induction, the required user interaction is
+ moved to an earlier point in the proof which also becomes more
+ modularised. Moreover, by using destructor style induction we
+ circumvent the problem of creating inverses of functions. The
+ soundness of the customised induction rules created by the method is
+ shown. Furthermore, the machinery of the theorem prover (KeY) is used
+ to make the method automatic. The induction rules are developed to
+ prove the total correctness of loops in an objectoriented language
+ and we concentrate on integers.",
+ paper = "Wall04.pdf",
+ keywords = "printed"
+}
+
+@misc{Wall09,
+ author = "Wallenburg, Angela",
+ title = {{Generalisation of Inductive Formulae based on Proving by
+ Symbolic Execution}},
+ year = "2009",
+ link = "\url{http://www.cse.chalmers.se/~angelaw/papers/awWING2009.pdf}",
+ abstract =
+ "Induction is a powerful method that can be used to prove the total
+ correctness of program loops. Unfortunately the induction proving
+ process in an interactive theorem prover is often very cumbersome. In
+ particular it can be difficult to find the right induction formula.
+ We describe a method for generalising induction formulae by analysing
+ a symbolic proof attempt in a semiinteractive firstorder theorem
+ prover. Based on the proof attempt we introduce universally
+ quantified variables, metavariables and sets of constraints on these.
+ The constraints describe the conditions for a successful proof. By
+ the help of examples, we outline some classes of problems and their
+ associated constraint solutions, and possible ways to automate the
+ constraint solving.",
+ paper = "Wall09.pdf",
+ keywords = "printed"
+}
+
+@mastersthesis{Walt12,
+ author = "van der Walt, Paul",
+ title = {{Reflection in Agda}},
+ school = "Utrecht University",
+ year = "2012",
+ abstract =
+ "This project explores the recent addition to Agda enabling
+ reflection, in the style of Lisp, MetaML, and Template Haskell. It
+ illustrates several possible applications of reflection that arise in
+ dependently typed programming, and details the limitations of the
+ current implementation of reflection. Examples of typesafe
+ metaprograms are given that illustrate the power of reflection coupled
+ with a dependently typed language. Among other things the limitations
+ inherent in having quote and unquote implemented as keywords are
+ highlighted. The fact that lambda terms are returned without typing
+ information is discussed, and a solution is presented. Also provided
+ is a detailed users’ guide to the reflection API and a library of
+ working code examples to illustrate how various common tasks can be
+ performed, along with suggestions for an updated reflection API in a
+ future version of Agda.",
+ paper = "Walk12.pdf",
+ keywords = "printed"
+}
+
+@misc{Weir18,
+ author = "Weirich, Stephanie",
+ title = {{Dependent Types in Haskell}},
+ year = "2018",
+ link = "\url{https://www.youtube.com/watch?v=wNa3MMbhwS4}",
+ abstract =
+ "What has dependent type theory done for Haskell? Over the past ten
+ years, the Glasgow Haskell compiler (GHC) has adopted many type system
+ features inspired by dependent type theory. In this talk, I will
+ discuss the influence of dependent types on the design of GHC and on
+ the practice of Haskell programmers. In particular, I will walk
+ through an extended example and use it to analyze what it means to
+ program with with dependent types in Haskell. Throughout, I will will
+ discuss what we have learned from this experiment in language design:
+ what works now, what doesn't work yet, and what surprised us along
+ the way."
+}
+
+@book{Wink84,
+ author = "Winkler, Franz",
+ title = {{The Church Rosser Property in Computer Algebra and Special
+ Theorem Proving}},
+ publisher = "Wien",
+ year = "1984",
+ isbn = "3853695841"
+}
+
+@misc{Wirt15,
+ author = "Wirth, Niklaus",
+ title = {{The Design of a RISC Architecture and its Implementation
+ with an FPGA}},
+ link =
+ "\url{https://www.inf.ethz.ch/personal/wirth/FPGArelatedWork/RISC.pdf}",
+ year = "2015",
+ paper = "Wirt15.pdf",
+ keywords = "printed"
+}
+
+@book{Yosh97,
+ author = "Yoshida, Masaaki",
+ title = {{Hypergeometric Functions, My Love}},
+ publisher = "Springer",
+ year = "1997",
+ isbn = "9783322901682"
+}
+
@inproceedings{Abad90,
author = "Abadi, Martin and Cardelli, Luca and Curien, PierreLouis
and Levy, JeanJacques",
@@ 10201,7 +12966,7 @@ paper = "Brea89.pdf"
keywords = "printed"
}
@misc{Atke18,
+@misc{Atke18a,
author = "Atkey, Robert",
title = {{Parameterised Notions of Computation}},
link = "\url{https://bentnib.org/paramnotionsjfp.pdf}",
@@ 10223,7 +12988,7 @@ paper = "Brea89.pdf"
soundly and completely model our categorical definitions — with and
without symmetric monoidal parameterisation — and act as prototypical
languages with parameterised effects.",
 paper = "Atke18.pdf",
+ paper = "Atke18a.pdf",
keywords = "printed"
}
@@ 10270,7 +13035,8 @@ paper = "Brea89.pdf"
author = "Bailey, David H. and Borwein, Jonathan M.",
title = {{Highprecision Numerical Integration: Progress and Challenges}},
journal = "J. Symbolic Computation",
 number = "46",
+ volume = "46",
+ number = "7",
pages = "741754",
year = "2011",
abstract =
@@ 12536,6 +15302,7 @@ paper = "Brea89.pdf"
author = "Hamada, Tatsuyoshi",
title = {{MathLibre: Personalizable Computer Environment for
Mathematical Research}},
+ journal = "ACM Communications in Computer Algebra",
volume = "48",
number = "3",
year = "2014",
@@ 13049,6 +15816,19 @@ paper = "Brea89.pdf"
paper = "Klee52.pdf"
}
+@incollection{Knut70,
+ author = "Knuth, Donald E. and Bendix, Peter B.",
+ title = {{Simple Word Problems in Unversal Algebras}},
+ booktitle = "Computational Problems in Abstract Algebra",
+ editor = "Leech, John",
+ publisher = "Pergamon Press",
+ isbn = "080129757",
+ year = "1970",
+ pages = "263298",
+ paper = "Knut70.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Kohl17,
author = "Kohlhase, Michael and De Feo, Luca and Muller, Dennis and
Pfeiffer, Markus and Rabe, Florian and Thiery, Nicolas M.
@@ 14044,7 +16824,7 @@ paper = "Brea89.pdf"
}
@book{Nede14,
 author = "Nederpelt, Rob and Geuvers, Nerman",
+ author = "Nederpelt, Rob and Geuvers, Herman",
title = {{Type Theory and Formal Proof}},
year = "2014",
publisher = "Cambridge University Press",
@@ 14301,7 +17081,7 @@ paper = "Brea89.pdf"
@book{Paul87,
author = "Paulson, Lawrence C.",
title = {{Logic and Computation}},
 publisher = "Press Synticate of Cambridge University",
+ publisher = "Press Syndicate of Cambridge University",
year = "1987",
isbn = "0521346320"
}
@@ 16324,7 +19104,8 @@ paper = "Brea89.pdf"
discovering closed recursive forms. Thus, its proof is both more
straightforward to construct and easier to validate than a direct
proof of the inference algorithm would be.",
 paper = "Aker93.pdf, printed"
+ paper = "Aker93.pdf",
+ keywords = "printed"
}
@inproceedings{Aran05,
@@ 20264,11 +23045,11 @@ paper = "Brea89.pdf"
paper = "Mahb06.pdf"
}
@book{Mahb16,
+@book{Mahb18,
author = "Mahboubi, Assia and Tassi, Enrico and Bertot, Yves and
Gonthier, Georges",
title = {{Mathematical Components}},
 year = "2016",
+ year = "2018",
publisher = "mathcomp.github.io/mcb",
link = "\url{https://mathcomp.github.io/mcb/book.pdf}",
abstract =
@@ 20303,7 +23084,8 @@ paper = "Brea89.pdf"
the first part, the reader has learnt how to prove formally the
infinitude of prime numbers, or the correctnes of the Euclidean's
division algorithm, in a few lines of proof text.",
 paper = "Mahb16.pdf"
+ paper = "Mahb18.pdf",
+ keywords = "printed"
}
@techreport{Male17,
@@ 26255,7 +29037,9 @@ paper = "Brea89.pdf"
inference algorithm can often produce sharper bounds than
unificationbased type inference techniques. At the present time,
however, our treatment of higherorder data structures and functions
 is not as elegant as that of the unification techniques."
+ is not as elegant as that of the unification techniques.",
+ paper = "Bake90.html",
+ keywords = "printed"
}
@article{Bake91,
@@ 43339,7 +46123,8 @@ paper = "Brea89.pdf"
type theory and is offered as being of interest on this basis
(whatever may be thought of the finally satisfactory character of the
theory of types as a foundation for logic and mathematics).",
 paper = "Chur40.pdf"
+ paper = "Chur40.pdf",
+ keywords = "printed"
}
@techreport{Clar92,
@@ 44126,6 +46911,7 @@ paper = "Brea89.pdf"
author = "Garcia, Ronald and Cimini, Matteo",
title = {{Principal Type Schemes for Gradual Programs}},
booktitle = "POPL 15",
+ link = "\url{https://www.cs.ubc.ca/~rxg/ptsgp.pdf}",
comment = "Updated paper",
year = "2017",
abstract =
@@ 44157,6 +46943,7 @@ paper = "Brea89.pdf"
accordingly. The resulting extension enables programs to be
interpreted using either the polymorphic or monomorphic Blame
Calculi.",
+ paper = "Garc17.pdf",
keywords = "printed"
}
@@ 44901,6 +47688,14 @@ paper = "Brea89.pdf"
isbn = "0792377443"
}
+@book{Kauf00a,
+ author = "Kaufmann, Matt and Manolios, Panagiotis and Moore, J Strother",
+ title = {{ComputerAided Reasoning: ACL2 Case Studies}},
+ publisher = "Kluwer Academic Publishers",
+ year = "2000",
+ isbn = "0792378490"
+}
+
@inproceedings{Khan11a,
author = "Khan, Muhammad Taimoor and Schreiner, Wolfgang",
title = {{Towards a Behavioral Analysis of Computer Algebra
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index f2d9b8c..b286cbf 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 632,7 +632,7 @@ paragraph for those unfamiliar with the terms.
number = "9007.0",
year = "1995",
abstract =
 "It is widely recognized that programming languages should oer
+ "It is widely recognized that programming languages should offer
features to help structure programs. To achieve this goal, languages
like Ada , Modula2 , objectoriented languages, and functional
languages have been developed. The structuring techniques available
@@ 656,7 +656,9 @@ paragraph for those unfamiliar with the terms.
implementation and against existing computer algebra systems. Our
development should have an impact both on the programming languages
world and on the computer algebra world.",
 paper = "Baum95.pdf"
+ paper = "Baum95.pdf",
+ keywords = "printed, axiomref"
+
}
\end{chunk}
@@ 3372,6 +3374,49 @@ paragraph for those unfamiliar with the terms.
\end{chunk}
+\index{Siek, Jeremy, G.}
+\index{Taha, Walid}
+\begin{chunk}{axiom.bib}
+@inproceedings{Siek06,
+ author = "Siek, Jeremy, G. and Taha, Walid",
+ title = {{Gradual Typing for Functional Languages}},
+ booktitle = "Proc. of Scheme and Functional Programming Workshop",
+ year = "2006",
+ publisher = "ACM",
+ pages = "8192",
+ abstract =
+ "Static and dynamic type systems have wellknown strengths and
+ weaknesses, and each is better suited for different programming
+ tasks. There have been many efforts to integrate static and dynamic
+ typing and thereby combine the benefits of both typing disciplines in
+ the same language. The flexibility of static typing can be improved
+ by adding a type Dynamic and a typecase form. The safety and
+ performance of dynamic typing can be improved by adding optional type
+ annotations or by performing type inference (as in soft
+ typing). However, there has been little formal work on type systems
+ that allow a programmercontrolled migration between dynamic and
+ static typing. Thatte proposed QuasiStatic Typing, but it does not
+ statically catch all type errors in completely annotated
+ programs. Anderson and Drossopoulou defined a nominal type system
+ for an objectoriented language with optional type annotations.
+ However, developing a sound, gradual type system for functional
+ languages with structural types is an open problem.
+
+ In this paper we present a solution based on the intuition that the
+ structure of a type may be partially known/unknown at compile time
+ and the job of the type system is to catch incompatibilities between
+ the known parts of types. We define the static and dynamic semantics
+ of a $lambda$calculus with optional type annotations and we prove that
+ its type system is sound with respect to the simplytyped λ
+ $lambda$calculus for fullyannotated terms. We prove that this
+ calculus is type safe and that the cost of dynamism is
+ 'payasyougo'.",
+ paper = "Siek06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Siekmann, Jorg H.}
\begin{chunk}{axiom.bib}
@article{Siek89,
@@ 12537,7 +12582,37 @@ when shown in factored form.
\end{chunk}
\section{Coerc ion Survey  Fall 2018}
+\section{Month51}
+
+\subsection{D} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\begin{chunk}{axiom.bib}
+@misc{Daik18,
+ author = "Unknown",
+ title = {{Daikon Invariant Detector Developer Manual}},
+ year = "2018",
+ version = "5.7.2",
+ link =
+ "\url{http://plse.cs.washington.edu/daikon/download/doc/developer.pdf}",
+ paper = "Daik18.pdf"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@misc{Daik18a,
+ author = "Unknown",
+ title = {{Daikon Invariant Detector User Manual}},
+ year = "2018",
+ version = "5.7.2",
+ link =
+ "\url{http://plse.cs.washington.edu/daikon/download/doc/daikon.pdf}",
+ paper = "Daik18a.pdf"
+}
+
+\end{chunk}
+
+\section{Coercion Survey  Fall 2018}
\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 12552,6 +12627,62 @@ when shown in factored form.
\end{chunk}
+\index{Antoy, Sergio}
+\index{Peters, Arthur}
+\begin{chunk}{axiom.bib}
+@article{Anto12,
+ author = "Antoy, Sergio and Peters, Arthur",
+ title = {{Compiling a Functional Logic Language}},
+ booktitle = "11th Int. Symp. on Functional and Logic Programming",
+ journal = "LNCS",
+ volume = "7294",
+ pages = "1731",
+ year = "2012",
+ abstract =
+ "We present the design of a compiler for a functional logic
+ programming language and discuss the compiler's implementation.
+ The source program is abstracted by a constructor based graph
+ rewriting system obtained from a functional logic program after
+ syntax desugaring, lambda lifting and similar transformations
+ provided by a compiler's frontend. This system is
+ nondeterministic and requires a specialized normalization
+ strategy. The target program consists of 3 procedures that execute
+ graph replacements originating from either rewrite or pulltab
+ seps. These procedures are deterministic and easy to encode in an
+ ordinary programming language. We describe the generation of the 3
+ procedures, discuss the correctness of our approach, highlight
+ some key elements of an implementation, and benchmark the
+ performance of a proof of concept. Our compilation scheme is
+ elegant and simple enough to be presented in one page.",
+ paper = "Anto12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Appel, Andrew W.}
+\begin{chunk}{axiom.bib}
+@article{Appe98a,
+ author = "Appel, Andrew W.",
+ title = {{SSA is Functional Programming}},
+ journal = "SIGPLAN Notices",
+ year = "1998",
+ abstract =
+ "Static SingleAssignment (SSA) form is an intermediate language
+ designed to make optimization clean and efficient for imperative
+ language (Fortran, C) compilers. LambdaCalculus is an
+ intermediate language that makes optimization clean and efficient
+ for functional language (Scheme, ML, Haskell) compilers. The SSA
+ community draws pictures of graphs with basic blocks and flow
+ edges, and the functional language community writes lexically
+ nested functions, but (as Richard Kelsey recently pointed out)
+ they're both doing exactly the same thing in different notation.",
+ paper = "Appe98a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Arkoudas, Konstantine}
\index{Musser, David}
\begin{chunk}{axiom.bib}
@@ 12683,6 +12814,18 @@ when shown in factored form.
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Bauer, Andrej}
+\begin{chunk}{axiom.bib}
+@misc{Baue19,
+ author = "Bauer, Andrej",
+ title = {{How to Implement Type Theory in an Hour}},
+ year = "2019",
+ link = "\url{https://vimeo.com/286652934}",
+ comment = "\url{https://github.com/andrejbauer/spartantypetheory}"
+}
+
+\end{chunk}
+
\index{Baker, Henry G.}
\begin{chunk}{axiom.bib}
@article{Bake92,
@@ 12938,6 +13081,19 @@ when shown in factored form.
\end{chunk}
+\index{Bostock, David}
+\begin{chunk}{axiom.bib}
+@book{Bost97,
+ author = "Bostock, David",
+ title = {{Intermediate Logic}},
+ publisher = "Clarendon Press",
+ year = "1997",
+ isbn = "0198751419",
+ paper = "Bost97.pdf"
+}
+
+\end{chunk}
+
\index{Boyer, Robert S.}
\index{Moore, J Strother}
\begin{chunk}{axiom.bib}
@@ 13222,6 +13378,151 @@ when shown in factored form.
\end{chunk}
+\index{Cardelli, Luca}
+\begin{chunk}{axiom.bib}
+@techreport{Card93,
+ author = "Cardelli, Luca",
+ title = {{Typeful Programmnig}},
+ type = "research report",
+ year = "1993",
+ institution = "Digital Equipment Corporation",
+ number = "SRC Research Report 45",
+ abstract =
+ "There exists an identifiable programming style based on the
+ widespread use of type information handled through mechanical
+ typechecking techniques.
+
+ This typeful programming style is in a sense independent of the
+ language it is embedded in; it adapts equally well to functional,
+ imperative, objectoriented, and algebraic programming, and it is
+ not incompatible with relational and concurrent programming.
+
+ The main purpose of this paper is to show how typeful programming
+ is best supported by sophisticated type systems, and how these
+ systems can help in clarifying programming issues and in adding
+ power and regularity to languages.
+
+ We start with an introduction to the notions of types, subtypes
+ and polymorphism. Then we introduce a general framework, derived
+ in part from constructive logic, into which most of the known type
+ systems can be accommodated and extended. The main part of the
+ paper shows how this framework can be adapted systematically to
+ cope with actual programming constructs. For concreteness we
+ describe a particular programming language with advanced features;
+ the emphasis here is on the combination of subtyping and
+ polymorphism. We then discuss how typing concepts apply to large
+ programs. We also sketch how typing applies to system programming;
+ an area which by nature escapes rigid typing. In summary, we
+ compare the most common programming styles, suggesting that many
+ of them are compatible with, and benefit from, a typeful discipline.",
+ paper = "Card93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Castagna, Giuseppe}
+\index{Lanvin, Victor}
+\index{Petrucciani, Tommaso}
+\index{Siek, Jeremy G.}
+\begin{chunk}{axiom.bib}
+@misc{Cast19,
+ author = "Castagna, Giuseppe and Lanvin, Victor and Petrucciani, Tommaso
+ and Siek, Jeremy G.",
+ title = {{Gradual Typing: A New Perspective}},
+ link = "\url{https://www.irif.fr/~gc//papers/popl19.pdf}",
+ year = "2019",
+ abstract =
+ "We define a new, more semantic interpretation of gradual types and use
+ it to “gradualize” two forms of polymorphism: subtyping polymorphism
+ and implicit parametric polymorphism. In particular, we use the new
+ interpretation to define three gradual type systems —HindleyMilner,
+ with subtyping, and with union and intersection types— in terms of two
+ preorders, subtyping and materialization. These systems are defined
+ both declaratively and algorithmically. The declarative presentation
+ consists in adding two subsumptionlike rules, one for each preorder,
+ to the standard rules of each type system. This yields more
+ intelligible and streamlined definitions and shows a direct
+ correlation between cast insertion and materialization. For the
+ algorithmic presentation, we show how it can be defined by reusing
+ existing techniques such as unification and tallying.",
+ paper = "Cast19.pdf"
+}
+
+\end{chunk}
+
+\index{Cervesato, Iliano}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@inproceedings{Cerv96,
+ author = "Cervesato, Iliano and Pfenning, Frank",
+ title = {{A Linear Logical Framework}},
+ booktitle = "Logic in Computer Science",
+ publisher = "IEEE",
+ pages = "264275",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/lics96.pdf}",
+ year = "1996",
+ abstract =
+ "We present the linear type theory LLF as the formal basis for a
+ conservative extension of the LF logical framework. LLF combines
+ the expressive power of dependent types with linear logic to
+ permit the natural and concise represention of a whole new class
+ of deductive systems, names those dealing with state. As an
+ example we encode a version of MiniML with references including
+ its type system, its operational semantics, and a proof of type
+ preservation. Another example is the encoding of a sequent
+ calculus for classical linear logic and its cut elimination
+ theorem. LLF can also be given an operational interpretation as a
+ logic programming language under which the representations above
+ can be used for type inference, evaluation and cutelimination.",
+ paper = "Cerv96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cervesato, Iliano}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@article{Cerv00,
+ author = "Cervesato, Iliano and Pfenning, Frank",
+ title = {{A Linear Logic Framework}},
+ journal = "Information and Computation",
+ volume = "179",
+ number = "1",
+ pages = "1975",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/llf00.pdf}",
+ year = "2002",
+ abstract =
+ "We present the linear type theory $\lambda^{\Pio\&\top}$ as the
+ formal basis for LLF, a conservative extension of the logical
+ framework LF. LLF combines the expressive power of dependent types
+ with linear logic to permit the natural and concise representation
+ of a whole new class of deductive systems, namely those dealing
+ with state. As an example we encode a version of MiniML with
+ mutable references including its type system and its operational
+ semantics, and describe how to take practical advantage of the
+ representation of its computations.",
+ paper = "Cerv00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Chiswell, Ian}
+\index{Hodges, Wilfrid}
+\begin{chunk}{axiom.bib}
+@book{Chis07,
+ author = "Chiswell, Ian and Hodges, Wilfrid",
+ title = {{Mathematical Logic}},
+ publisher = "Oxford University Press",
+ year = "2007",
+ isbn = "9780198571001",
+ paper = "Chis07.pdf"
+}
+
+\end{chunk}
+
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri18,
@@ 13326,6 +13627,78 @@ when shown in factored form.
\end{chunk}
+\index{Cloostermans, Bouke},
+\begin{chunk}{axiom.bib}
+@misc{Cloo12,
+ author = "Cloostermans, Bouke",
+ title = {{QuasiLinear GCD Computation and Factoring RSA Moduli}},
+ school = "Eindhoven Univ. of Technology",
+ comment = "Bachelor Project",
+ year = "2012",
+ abstract =
+ "This bachelor project consists of two parts. First, we describe
+ two subquadratic GCD algorithms and test our implementation of
+ these algorithms. Both algorithms work in a similar recursive way
+ and have a running time of $O(n log^2n log log n)$ where $n$ is
+ the amount of digits of the input. Second, we describe and compare
+ three algorithms which compute GCD's of more than two numbers. Two
+ algorithms use a treelike structure to compute these GCD's and the
+ other algorithm builds a coprime base from the input set. The
+ algorithms which uses a treelike structure to compute these GCD's
+ is faster than building a coprime base although all algorithms are
+ quasilinear. The downside of these algorithm is that they only run
+ well if few common factors exist in the input set. These
+ algorithms have been used to factorize roughly 0.4\% of a 11.1
+ million RSA keys dataset. However, this is only possible if the
+ keys are badly generated.",
+ paper = "Cloo12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Collofello, James S.}
+\begin{chunk}{axiom.bib}
+@techreport{Coll88,
+ author = "Collofello, James S.",
+ title = {{Introduction to Software Verification and Validation}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "SEICM131.1",
+ year = "1988",
+ abstract =
+ "SEI curriculum modules document and explication software
+ engineering topics. They are intended to be useful in a variety of
+ situations  in the preparation of courses, in the planning of
+ individual lectures, in the design of curricula, and in
+ professional selfstudy. Topics do not exist in isolation,
+ however, and the question inevitably arises how one module is
+ related to another. Because modules are written by different
+ authors at different times, the answer could easily be 'not at
+ all'. In a young field struggling to define itself, this would be
+ an unfortunate situation.
+
+ The SEI deliberately employs a number of mechanisms to achieve
+ compatible points of view across curriculum modules and to fill
+ the content gaps between them. Modules such as {\sl Introduction
+ to Software Verification and Validation} is one of the most
+ important devices. In this latest revision, Professor Collofillo
+ has more modules to integrate into a coherent picture than when we
+ released his first draft more than a year ago  modules on
+ quality assurance, unit testing, technical reviews, and formal
+ verification, as well as less directly related modules on
+ specification, requirements definition, and design. We believe you
+ will find this curriculum module interesting and useful, both in
+ its own right and by virtue of the understanding of other modules
+ that it facilitates.",
+ paper = "Coll88.pdf",
+ keywords = "printed"
+
+}
+
+\end{chunk}
+
+
\index{Copeland, B. Jack}
\begin{chunk}{axiom.bib}
@book{Cope04,
@@ 13338,6 +13711,33 @@ when shown in factored form.
\end{chunk}
+\index{Coquand, Thierry}
+\index{Kinoshita, Yoshiki}
+\index{Nordstrom, Bengt}
+\index{Takeyama, Makoto}
+\begin{chunk}{axiom.bib}
+@misc{Coqu19,
+ author = "Coquand, Thierry and Kinoshita, Yoshiki and Nordstrom, Bengt
+ and Takeyama, Makoto",
+ title = {{A Simple TypeTheoretic Language: MiniTT}},
+ year = "2019",
+ link = "\url{http://www.cse.chalmers.se/~bengt/papers/GKminiTT.pdf}",
+ abstract =
+ "This paper presents a formal description of a small functional
+ language with dependent types. The language contains data types,
+ mutual re cursive/inductive definitions and a universe of small
+ types. The syntax, semantics and type system is specified in such a
+ way that the imple mentation of a parser, interpreter and type
+ checker is straightforward. The main difficulty is to design the
+ conversion algorithm in such a way that it works for open expressions.
+ The paper ends with a complete implementation in Haskell (around 400
+ lines of code).",
+ paper = "Coqu19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Cosmo, Roberto Di}
\begin{chunk}{axiom.bib}
@book{Cosm95,
@@ 13367,6 +13767,19 @@ when shown in factored form.
\end{chunk
+\index{Cutland, Nigel}
+\begin{chunk}{axiom.bib}
+@book{Cutl80,
+ author = "Cutland, Nigel",
+ title = {{Computability: An Introduction to Recusive Function Theory}},
+ publisher = "Cambridge University Press",
+ year = "1980",
+ isbn = "0521223849",
+ paper = "Cutl80.pdf"
+}
+
+\end{chunk}
+
\subsection{D} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Dahl, O.J.}
@@ 13383,6 +13796,20 @@ when shown in factored form.
\end{chunk}
+\index{van Dalen, Dirk}
+\begin{chunk}{axiom.bib}
+@book{Dale08,
+ author = "van Dalen, Dirk",
+ title = {{Logic and Structure}},
+ publisher = "Springer",
+ year = "2008",
+ isbn = "9783540208792",
+ paper = "Dale08.pdf"
+
+}
+
+\end{chunk}
+
\index{Damas, Luis}
\index{Milner, Robin}
\begin{chunk}{axiom.bib}
@@ 13470,8 +13897,35 @@ when shown in factored form.
\end{chunk}
+\index{Ding, C.}
+\index{Pei, D.}
+\index{Salomaa, A.}
+\begin{chunk}{axiom.bib}
+@book{Ding96,
+ author = "Ding, C. and Pei, D. and Salomaa, A.",
+ title = {{Chinese Remainder Theorem, Applications in Computing,
+ Coding Cryptography}},
+ publisher = "World Scientific",
+ year = "1996"
+}
+
+\end{chunk}
+
\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Enderton, Herbert B.}
+\begin{chunk}{axiom.bib}
+@book{Ende01,
+ author = "Enderton, Herbert B.",
+ title = {{A Mathematical Introduction to Logic}},
+ publisher = "Harcourt Academic Press",
+ year = "2001",
+ isbn = "0122384520",
+ paper = "Ende01.pdf"
+}
+
+\end{chunk}
+
\index{Ellis, Ferris}
\begin{chunk}{axiom.bib}
@misc{Elli18,
@@ 13511,6 +13965,18 @@ when shown in factored form.
\end{chunk}
+\index{Fitting, Melvin}
+\begin{chunk}{axiom.bib}
+@book{Fitt69,
+ author = "Fitting, Melvin",
+ title = {{Intuitionistic Logic Model Theory and Forcing}},
+ publisher = "North Holland",
+ year = "1969",
+ paper = "Fitt69.pdf"
+}
+
+\end{chunk}
+
\index{Fredrikson, Matt}
\begin{chunk}{axiom.bib}
@misc{Fred16,
@@ 13536,6 +14002,76 @@ when shown in factored form.
\end{chunk}
+\index{Geuvers, Herman}
+\begin{chunk}{axiom.bib}
+@article{Geuv00,
+ author = "Geuvers, Herman",
+ title = {{Induction Is Not Derivable in Second Order Dependent Type Theory}},
+ journal = "LNCS",
+ volume = "2044",
+ year = "2000",
+ pages = "166181",
+ abstract =
+ "This paper proves the nonderivability of induction in second
+ order dependent type theory ($\lambda{}P2$). This is done by
+ providing a model construction for $\lambda{}P2$, based on a
+ saturated sets like interpretation of types as sets of terms of a
+ weakly extensional combinatory algebra. We give countermodels in
+ which the induction principle over natural numbers is not
+ valid. The proof does not depend on the specific encoding for
+ natural numbers that has been chosen (like e.g. polymorphic Church
+ numerals), so in fact we prove that there can not be an encoding
+ of natural numbers in $\lambda{}P2$ such that the induction
+ principle is satisfied. The method extends immediately to other
+ data types, like booleans, lists, trees, etc.
+
+ In the process of the proof we establish some general properties
+ of the models, which we think are of independent
+ interest. Moreover, we show that the Axiom of Choice is not
+ derivable in $\lambda{}P2$.",
+ paper = "Geuv00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Girard, JeanYves}
+\begin{chunk}{axiom.bib}
+@article{Gira87,
+ author = "Girard, JeanYves",
+ title = {{Linear Logic}},
+ journal = "Theoretical Computer Science",
+ volume = "50",
+ year = "1987",
+ pages = "1102",
+ abstract =
+ "The familiar connective of negation is broken into two
+ operations: linear negation which is the purely negative part of
+ negation and the modality ``of course'' which has the meaning of a
+ reaffirmation. Following this basic discovery, a completely new
+ approach to the whole area between constructive logics and
+ programmation is initiated.",
+ paper = "Gira87.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Girard, JeanYves}
+\begin{chunk}{axiom.bib}
+@book{Gira95,
+ author = "Girard, JeanYves",
+ title = {{Linear Logic: Its Syntax and Semantics}},
+ publisher = "Cambridge University Press",
+ year = "1995",
+ link = "\url{http://girard.perso.math.cnrs.fr/Synsem.pdf}",
+ isbn = "9780521559614",
+ paper = "Gira95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Goldblatt, Robert}
\begin{chunk}{axiom.bib}
@book{Gold84,
@@ 13638,6 +14174,38 @@ when shown in factored form.
\end{chunk}
+\index{Gravel, Katherine}
+\index{Jananthan, Hayden}
+\index{Kepner, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Grav18,
+ author = "Gravel, Katherine and Jananthan, Hayden and Kepner, Jeremy",
+ title = {{Visually Representing the Landscape of Mathematical Structures}},
+ link = "\url{https://arxiv.org/abs/1809.05930}",
+ year = "2018",
+ abstract =
+ "The information technology explosion has dramatically increased
+ the application of new mathematical ideas and has led to an
+ increasing use of mathematics across a wide range of fields that
+ have been traditionally labeled 'pure' or 'theoretical'. There is
+ a critical need for tools to make these concepts readily
+ accessible to a broader community. This paper details the creation
+ of visual representations of mathematical structures commonly
+ found in pure mathematics to enable both students and
+ professionals to rapidly understand the relationships among and
+ between mathematical structures. Ten broad mathematical structures
+ were identified and used to make eleven maps, with 187 unique
+ structures in total. The paper presents a method and
+ implementation for categorizing mathematical structures and for
+ drawing relationships between mathematical structures that
+ provides insight for a wide audience. The most in dept map is
+ available online for public use.",
+ comment = "\url{http://www.mit.edu/~kepner/GravelMathMap.pdf}",
+ paper = "Grav18.pdf"
+}
+
+\end{chunk}
+
\index{Grayson, Daniel R.}
\begin{chunk}{axiom.bib}
@misc{Gray18,
@@ 13703,7 +14271,9 @@ when shown in factored form.
The question whether the thesis is true in full generality is actively
discussed from 1960s. We argue that, in full generality, the thesis
 cannot possibly be true."
+ cannot possibly be true.",
+ paper = "Gure18.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 13904,6 +14474,30 @@ when shown in factored form.
\end{chunk}
+\index{Hemann, Jason}
+\index{Friedman, Daniel P.}
+\begin{chunk}{axiom.bib}
+@misc{Hema13,
+ author = "Hemann, Jason and Friedman, Daniel P.",
+ title = {{uKanren: A Minimal Functional Core for Relational Programming}},
+ year = "2013",
+ link = "\url{webyrd.net/scheme2013/papers/HemannMuKanren2013.pdf}",
+ abstract =
+ "This paper presents $\mu$Kanren, a minimalist language in the
+ miniKanren family of relational (logic) programming languages. Its
+ implementation comprises fewer than 40 lines of Scheme. We
+ motivate the need for a minimalist miniKanren language, and
+ iteratively develop a complete search strategy. Finally, we
+ demonstrate that through sufficient userlevel features one
+ regains much of the expressiveness of other miniKanren
+ languages. In our opinion its brevity and simple semantics make
+ $\mu$Kanren uniquely elegant.",
+ paper = "Hema13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Henglein, Friedrich}
\begin{chunk}{axiom.bib}
@phdthesis{Heng89,
@@ 14010,6 +14604,45 @@ when shown in factored form.
\end{chunk}
+\index{Hoare, C.A.R}
+\begin{chunk}{axiom.bib}
+@article{Hoar74,
+ author = "Hoare, C.A.R",
+ title = {{Hints on Programming Language Design}},
+ journal = "Computer Systems Reliability",
+ volume = "20",
+ pages = "505534",
+ year = "1974",
+ link = "\url{http://flint.cs.yale.edu/cs428/doc/HintsPL.pdf}",
+ abstract =
+ "This paper presents the view that a programming language
+ is a tool that should assist the programmer in the most difficult
+ aspects of his art, namely program design, documentation, and
+ debugging. It discusses the objective criteria for evaluating a
+ language design and illustrates them by application to language
+ features of both highlevel languages and machinecode
+ programming. It concludes with an annotated reading list,
+ recomended for all intending language designers.",
+ paper = "Hoar74.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Hrbacek, Karel}
+\index{Jech, Thomas}
+\begin{chunk}{axiom.bib}
+@book{Hrba99,
+ author = "Hrbacek, Karel and Jech, Thomas",
+ title = {{Introduction to Set Theory}},
+ publisher = "Marcel Dekker, Inc",
+ year = "1999",
+ isbn = "0824779150",
+ paper = "Hrba99.pdf"
+}
+
+\end{chunk}
+
\subsection{I} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{J} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 14073,6 +14706,58 @@ when shown in factored form.
\end{chunk}
+\index{Kaliszyk, Cezary}
+\index{Urban, Josef}
+\begin{chunk}{axiom.bib}
+@misc{Kali14,
+ author = "Kaliszyk, Cezary and Urban, Josef",
+ title = {{LearningAssisted Automated Reasoning with Flyspeck}},
+ comment = "arXiv:1211.7012v3",
+ year = "2014",
+ abstract =
+ "The considerable mathematical knowledge encoded by the Flyspeck
+ project is combined with external automated theorem provers (ATPs)
+ and machinelearning premise selection methods trained on the
+ Flyspeck proofs, producing an AI system capable of proving a wide
+ range of mathematical conjectures automatically. The performance
+ of this architecture is evaluated in a bootstrapping scenario
+ emulating the development of Flyspeck from axioms to the last
+ theorem, each time using only the previous theorems and proofs. It
+ is shown that 39\% of the 14185 theorems could be proved in a
+ pushbutton mode (without any highlevel advice and user
+ interaction) in 30 seconds of real time on a fourteen CPU
+ workstation.
+
+ The necessary work involves: (i) an implementation of sound
+ translations of the HOL Light logic to ATP formalisms: untyped
+ firstorder, polymorphic typed firstorder, and typed firstorder,
+ (ii) export of the dependency information from HOL Light and ATP
+ proofs for the machine learners, and (iii) choice of suitable
+ representations and methods for learning from previous proofs, and
+ their integration as advisors with HOL Light. This work is
+ described and discussed here, and an initial analysis of the body
+ of proofs that were found fully automatically is provided.",
+ paper = "Kali14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kaminski, Paul}
+\begin{chunk}{axiom.bib}
+@techreport{Kami12,
+ author = "Kaminski, Paul",
+ title = {{The Role of Autonomy in DoD Systems}},
+ type = "Task Force Report",
+ institution = "Defense Science Board, Dept. of Defense",
+ year = "2012",
+ link = "\url{https://fas.org/irp/agency/dod/dsb/autonomy.pdf}",
+ comment = "verification, validation, and trust"
+
+}
+
+\end{chunk}
+
\index{Kaplan, Marc A.}
\index{Ullman, Jeffrey D.}
\begin{chunk}{axiom.bib}
@@ 14125,6 +14810,40 @@ when shown in factored form.
\end{chunk}
+\index{Knill, Oliver}
+\begin{chunk}{axiom.bib}
+@misc{Knil12,
+ author = "Knill, Oliver",
+ title = {{A Multivariate Chinese Remainder Theorem}},
+ link = "\url{https://arxiv.org/abs/1206.5114}",
+ year = "2012",
+ abstract =
+ "Using an adaptation of Qin Jiushao's method from the 13th
+ century, it is possible to prove that a system of linear modular
+ equations $a(i,1)x(i)+a(i,n)x(n)=b(i)\bmod m(i),i=1,\ldots,n$ has
+ integer solutions if $m(i)>1$ are pairwise relatively prime and in
+ each row, at least one matrix element $a(i,j)$ is relatively prime
+ to $m(i)$. The Chinese remainder theorem is a special case, where
+ $A$ has only one column.",
+ paper = "Knil12.pdf"
+}
+
+\end{chunk}
+
+\index{Knill, Oliver}
+\begin{chunk}{axiom.bib}
+@misc{Knil18,
+ author = "Knill, Oliver",
+ title = {{Some Fundamental Theorems in Mathematics}},
+ link = "\url{https://arxiv.org/pdf/1807.08416.pdf}",
+ year = "2018",
+ abstract =
+ "An expository hitchhikers guide to some theorems in mathematics",
+ paper = "Knil18.pdf"
+}
+
+\end{chunk}
+
\index{Kreitz, Christoph}
\index{Rahli, Vincent}
\begin{chunk}{axiom.bib}
@@ 14154,6 +14873,56 @@ when shown in factored form.
\subsection{L} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Lamport, Leslie}
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@article{Lamp99,
+ author = "Lamport, Leslie and Paulson, Lawrence C.",
+ title = {{Should Your Specification Language Be Typed?}},
+ journal = "ACM Trans. on Document Formatting",
+ volume = "21",
+ number = "3",
+ year = "1999",
+ pages = "502526",
+ abstract =
+ "Most specification languages have a type system. Type systems are
+ hard to get right, and getting them wrong can lead to
+ inconsistencies. Set theory can serve as the basis for a
+ specification language without types. This possibility, which has
+ been widely overlooked, offers many advantages. Untyped set theory
+ is simple and is more flexible than any simple typed
+ formalism. Polymorphism, overloading, and subtyping can make a
+ type system more powerful, but at the cost of increased
+ complexity, and such refinements can never attain the flexibility
+ of having no types at all. Typed formalisms have advantages too,
+ stemming from the power of mechanical type checking. While types
+ serve little purpose in hand proofs, they do help with mechanized
+ proofs. In the absence of verification, type checking can catch
+ errors in specifications. It may be possible to have the best of
+ both worlds by adding typing annotations to an untyped
+ specification language. We consider only specification languages,
+ not programming languages",
+ paper = "Lamp99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Leary, Christopher C.}
+\index{Kristiansen, Lars}
+\begin{chunk}{axiom.bib}
+@book{Lear15,
+ author = "Leary, Christopher C. and Kristiansen, Lars",
+ title = {{A Friendly Introduction to Mathematical Logic}},
+ publisher = "Milne Library, Geneseo",
+ year = "2015",
+ isbn = "9781942341321",
+ paper = "Lear15.pdf"
+
+}
+
+\end{chunk}
+
\index{Lee, Daniel K.}
\index{Crary, Karl}
\index{Harper, Robert}
@@ 14215,6 +14984,78 @@ when shown in factored form.
\end{chunk}
+\index{MacQueen, David B.}
+\begin{chunk}{axiom.bib}
+@misc{Macqxx,
+ author = "MacQueen, David B.",
+ title = {{Reflections on Standard ML}},
+ year = "unknown",
+ abstract =
+ "Standard ML is one of a number of new programming languages
+ developed in the 1980s that are seen as suitable vehicles for
+ serious systems and applications programming. It offers an
+ excellent ratio of expressiveness to language complexity, and
+ provides competitive efficiency. Because of its type and module
+ system, Standard ML manages to combine safety, security, and
+ robustness with much of the flexibility of dynamically typed
+ languages like Lisp. It is also has the most welldeveloped
+ scientific foundation of any major language. Here I review the
+ strengths and weaknesses of Standard ML and describe some of what
+ we have learned through the design, implementation, and use of the
+ language.",
+ paper = "Macqxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{MartinL\"of, P.}
+\begin{chunk}{axiom.bib}
+@article{Mart84,
+ author = "MartinLof, P.",
+ title = {{Constructive Mathematics and Computer Programming}},
+ journal = "Phil. Trans. Royal Society London",
+ volume = "312",
+ year = "1984",
+ pages = "501518",
+ link =
+ "\url{https://royalsocietypublishing.org/doi/pdf/10.1098/rsta.1984.0073}",
+ abstract =
+ "If programming is understood not as the writing of instructions for
+ this or that computing machine but as the design of methods of
+ computation that it is the computer’s duty to execute (a difference
+ that Dijkstra has referred to as the difference between computer
+ science and computing science), then it no longer seems possible to
+ distinguish the discipline of programming from constructive
+ mathematics. This explains why the intuitionistic theory of types
+ (MartinLof 1975 In Logic Colloquium 1973 (ed. H. E. Rose and
+ J. C. Shepherdson), pp. 73  118 . Amsterdam: North Holland), which
+ was originally developed as a symbolism for the precise codification
+ of constructive mathematics, may equally well be viewed as a
+ programming language. As such it provides a precise notation not
+ only, like other programming languages, for the programs themselves
+ but also for the tasks that the programs are supposed to perform.
+ Moreover, the inference rules of the theory of types, which are again
+ completely formal, appear as rules of correct program synthesis. Thus
+ the correctness of a program written in the theory of types is proved
+ formally at the same time as it is being synthesized.",
+ paper = "Mart84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Martzloff, J.C.}
+\begin{chunk}{axiom.bib}
+@book{Mart97a,
+ author = "Martzloff, J.C.",
+ title = {{A History of Chinese Mathematics}},
+ publisher = "Springer Verlag",
+ year = "1997"
+}
+
+\end{chunk}
+
\index{McKenna, Brian}
\begin{chunk}{axiom.bib}
@misc{Mcke14,
@@ 14361,6 +15202,35 @@ when shown in factored form.
\end{chunk}
+\index{Mueller, Robert A.}
+\index{Page, L. Rex}
+\begin{chunk}{axiom.bib}
+@book{Muel88,
+ author = "Mueller, Robert A. and Page, L. Rex",
+ title = {{Symbolic Computing with Lisp and Prolog}},
+ publisher = "Wiley",
+ year = "1988",
+ isbn = "0471607711"
+}
+
+\end{chunk}
+
+\index{Murphy, Robin R.}
+\begin{chunk}{axiom.bib}
+@book{Murp18,
+ author = "Murphy, Robin R.",
+ title = {{Robotics Through Science Fiction}},
+ publisher = "MIT Press",
+ year = "2018",
+ link =
+ "\url{https://mitpress.mit.edu/books/roboticsthroughsciencefiction}",
+ isbn = "9780262536264",
+ comment = "verification, validation, and trust"
+
+}
+
+\end{chunk}
+
\index{Murray, Toby}
\index{van Oorcchot, P.C.}
\begin{chunk}{axiom.bib}
@@ 14454,6 +15324,26 @@ when shown in factored form.
\subsection{O} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Oberhoff, Sebastian}
+\begin{chunk}{axiom.bib}
+@misc{Ober18,
+ author = "Oberhoff, Sebastian",
+ title = {{Incompleteness Ex Machina}},
+ year = "2018",
+ abstract =
+ "In this essay we'll prove Godel's incompleteness theorems
+ twice. First, we'll prove them the good oldfashioed way. Then
+ we'll repeat the feat in the setting of computation. In the
+ process we'll discover that Godel's work, rightly viewed, needs to
+ be split into two parts: the transport of computation into the
+ arena of arithmetic on the one hand and the actual incompleteness
+ theorems on the other. After we're done there will be cake.",
+ paper = "Ober18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Olsson, Ola}
\index{Wallenburg, Angela}
\begin{chunk}{axiom.bib}
@@ 14535,6 +15425,31 @@ when shown in factored form.
\subsection{P} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Pfen06,
+ author = "Pfenning, Frank",
+ title = {{Constraint Logic Programming}},
+ year = "2006",
+ link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/27clp.pdf}",
+ paper = "Pfen06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Pfen07,
+ author = "Pfenning, Frank",
+ title = {{Logic Programming}},
+ year = "2007",
+ link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/lpall.pdf}",
+ paper = "Pfen07.pdf"
+}
+
+\end{chunk}
+
\index{Pittman, Dan}
\begin{chunk}{axiom.bib}
@misc{Pitt18,
@@ 14594,6 +15509,22 @@ when shown in factored form.
\end{chunk}
+\index{Popkorn, Sally}
+\index{Simmons, Harold}
+\begin{chunk}{axiom.bib}
+@book{Popk94,
+ author = "Popkorn, Sally",
+ title = {{First Steps in Modal Logic}},
+ publisher = "Cambridge University Press",
+ year = "1994",
+ isbn = "9780521464826",
+ link =
+ "\url{https://cs.famaf.unc.edu.ar/~careces/ml18/downloads/popkorn.pdf}",
+ paper = "Popk94.pdf"
+}
+
+\end{chunk}
+
\index{Post, Emil L.}
\begin{chunk}{axiom.bib}
@article{Post44,
@@ 14612,6 +15543,48 @@ when shown in factored form.
\subsection{Q} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Deutsch, David}
+\begin{chunk}{axiom.bib}
+@article{Deut85,
+ author = "Deutsch, David",
+ title = {{Quantum Theory, the ChurchTuring Principle and the
+ Universal Quantum Computer}},
+ journal = "Proc. Royal Society of London",
+ volume = "400",
+ pages = "97117",
+ year = "1985",
+ abstract =
+ "It is argued that underlying the ChurchTuring hypothesis there
+ is an implicit physical assertion. Here, this assertion is
+ presented explicitly as a physical principle: 'every finitely
+ realizable physical system can be perfectly simulated by a
+ universal model computing machine operating by finite
+ means'. Classical physics and the universal Turing machine,
+ because the former is continuous and the latter discrete, do not
+ obey the principle, at leeast in the strong form above. A class of
+ model computing machines that is the quantum generalization of the
+ class of Turing machines is described, and it is shown that
+ quantum theory and the 'universal quantum computer' are compatible
+ with the principle. Computing machines resembling the universal
+ quantum computer could, in principle, be built and would have many
+ remarkable properties not reproducible by any Turing
+ machine. These do not include the computation of nonrecursive
+ functions, but they do include 'quantum parallelism', a method by
+ which certain probabilistic tasks can be performed faster by a
+ universal quantum computer than by any classical restriction of
+ it. The intuitive explanation of these properties places an
+ intolerable strain on all interpretations of quantum theory other
+ than Everett's. Some of the numerous connections between the
+ quantum theory of computation and the rest of physics are
+ explored. Quantum complexity theory allows a physically more
+ reasonable definition of the 'complexity' or 'knowledge' in a
+ physical system than does classical complexity theory.",
+ paper = "Deut85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{R} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Rees, Jonathan}
@@ 14706,6 +15679,53 @@ when shown in factored form.
\end{chunk}
+\index{Roessle, Ian}
+\index{Verbeek, Freek}
+\index{Ravindran, Binoy}
+\begin{chunk}{axiom.bib}
+@inproceedings{Roes19,
+ author = "Roessle, Ian and Verbeek, Freek and Ravindran, Binoy",
+ title = {{Formally Verified Big Step Semantics out of x8664 Binaries}},
+ booktitle = "CPP'19",
+ year = "2019",
+ publisher = "ACM",
+ abstract =
+ "This paper presents a methodology for generating formally proven
+ equivalence theorems between decompiled x8664 machine code and
+ big step semantics. These proofs are built on top of two
+ additional contributions. First, a robust and tested formal x8664
+ machine model containing small step semantics for 1625
+ instructions. Second, a decompilation into logic methodology
+ supporting both x8664 assembly and machine code at large
+ scale. This work enables blackbox binary verification, i.e.,
+ formal verification of a binary where source code is
+ unavailable. As such, it can be applied to safetycritical systems
+ that consist of legacy components, or components whose source code
+ is unavailable due to proprietary reasons. The methodology
+ minimizes the trusted code base by leveraging machinelearned
+ semantics to build a formal machine model. We apply the
+ methodolgoy to several case studies, including binaries that
+ heavily rely on the SSE2 floatingpoint instruction set, and
+ binaries that are obtained by compiling code that is obtained by
+ inlining assembly into C code.",
+ paper = "Roes19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Roitman, Judith}
+\begin{chunk}{axiom.bib}
+@misc{Roit13,
+ author = "Roitman, Judith",
+ title = {{Introduction to Modern Set Theory}},
+ link = "\url{http://www.math.ku.edu/~roitman/SetTheory.pdf}",
+ year = "2013",
+ paper = "Roit13.pdf"
+}
+
+\end{chunk}
+
\index{Rossberg, Andreas}
\begin{chunk}{axiom.bib}
@misc{Ross13,
@@ 14862,6 +15882,123 @@ when shown in factored form.
\end{chunk}
+\index{Siciliano, Bruno}
+\index{Oussama, Khatib}
+\begin{chunk}{axiom.bib}
+@book{Sici16,
+ author = "Siciliano, Bruno and Oussama, Khatib",
+ title = {{Springer Handbook of Robotics}},
+ publisher = "Springer",
+ year = "2016",
+ abstract =
+ "Reaching for the human frontier, robotics is vigorously engaged in
+ the growing challenges of new emerging domains. Interacting,
+ exploring, and working with humans, the new generation of robots will
+ increasingly touch people and their lives. The credible prospect of
+ practical robots among humans is the result of the scientific
+ endeavour of a half a century of robotic developments that established
+ robotics as a modern scientific discipline. The Springer Handbook of
+ Robotics brings a widespread and wellstructured compilation of
+ classic and emerging application areas of robotics. From the
+ foundations to the social and ethical implications of robotics, the
+ handbook provides a comprehensive collection of the accomplishments in
+ the field, and constitutes a premise of further advances towards new
+ challenges in robotics. This handbook, edited by two internationally
+ renowned scientists with the support of an outstanding team of seven
+ part editors and onehundred sixtyfour authors, is an authoritative
+ reference for robotics researchers, newcomers to the field, and
+ scholars from related disciplines such as biomechanics, neurosciences,
+ virtual simulation, animation, surgery, and sensor networks among
+ others.",
+ paper = "Sici16.pdf"
+}
+
+\end{chunk}
+
+\index{Sipser, Michael}
+\begin{chunk}{axiom.bib}
+@book{Sips13,
+ author = "Sipser, Michael",
+ title = {{Introduction to the Theory of Computation}},
+ publisher = "Cengage Learning",
+ year = "2013",
+ isbn = "9781133187790",
+ link = "\url{https://theswissbay.ch/pdf/Book/Introduction%20to%20the%20theory%20of%20computation_third%20edition%20%20Michael%20Sipser.pdf}",
+ paper = "Sips13.pdf"
+}
+
+\end{chunk}
+
+\index{Sjoberg, Vilhelm}
+\begin{chunk}{axiom.bib}
+@phdthesis{Sjob15,
+ author = "Sjoberg, Vilhelm",
+ title = {{A Dependently Typed Language with NonTermination}},
+ school = "University of Pennsylvania",
+ year = "2015",
+ abstract =
+ "We propose a fullspectrum dependently typed programming
+ language, {\sl Zombie}, which supports general recursion
+ natively. The Zombie implementation is an elaborating
+ typechecker. We prove type safety for a large subset of the Zombie
+ core language, including features wuch as computational
+ irrelevance, CBVreduction, and propositional equality with a
+ heterogeneous, completely erased elimination form. Zombie does not
+ automatically $\beta$reduce expressions, but instead uses
+ congruence closure for proof and type inference. We give a
+ specification of a subset of the surface language via a
+ bidirectional type system, which works ``uptocongruence'', and
+ an algorithm for elaborating expressions in this language to an
+ explicitly typed core language. We prove that our elaboration
+ algorithm is complete with respect to the source type
+ system. Zombie also features an optional terminationchecker,
+ allowing nonterminating programs returning proofs as well as
+ external proofs about programs.",
+ paper = "Sjob15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Smith, Peter}
+\begin{chunk}{axiom.bib}
+@book{Smit13,
+ author = "Smith, Peter",
+ title = {{An Introduction to Godel's Theorems}},
+ publisher = "Cambridge University Press",
+ year = "2013",
+ isbn = "9781107022843",
+ paper = "Smit13.pdf"
+}
+
+\end{chunk}
+
+\index{Smith, Peter}
+\begin{chunk}{axiom.bib}
+@misc{Smit17,
+ author = "Smith, Peter",
+ title = {{Teach Yourself Logic 2017: A Study Guide}},
+ link =
+"\url{https://www.logicmatters.net/resources/pdfs/TeachYourselfLogic2017.pdf}",
+ year = "2017",
+ paper = "Smit17.pdf"
+}
+
+\end{chunk}
+
+\index{Smullyan, Raymond M.}
+\begin{chunk}{axiom.bib}
+@book{Smul92,
+ author = "Smullyan, Raymond M.",
+ title = {{Godel's Incompleteness Theorems}},
+ publisher = "Oxford University Press",
+ year = "1992",
+ isbn = "0195046722",
+ paper = "Smul92.pdf"
+}
+
+\end{chunk}
+
\index{Soare, Robert I.}
\begin{chunk}{axiom.bib}
@article{Soar96,
@@ 14980,8 +16117,61 @@ when shown in factored form.
\end{chunk}
+\index{Stump, Aaron}
+\begin{chunk}{axiom.bib}
+@misc{Stum18,
+ author = "Stump, Aaron",
+ title = {{Syntax and Semantics of Cedille}},
+ year = "2018",
+ comment = "arXiv:1806.04709v1",
+ paper = "Stum18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Tan, Yong Kiam}
+\index{Myreen, Magnus O.}
+\index{Kumar, Ramana}
+\index{Fox, Anthony}
+\index{Owens, Scott}
+\begin{chunk}{axiom.bib}
+@article{Tanx18,
+ author = "Tan, Yong Kiam and Myreen, Magnus O. and Kumar, Ramana and
+ Fox, Anthony and Owens, Scott",
+ title = {{The Verified CakeML Compiler Backend}},
+ journal = "Functional Programming",
+ year = "2018",
+ link = "\url{https://www.cs.cmu.edu/~yongkiat/files/cakemljfp.pdf}",
+ abstract =
+ "The CakeML compiler is, to the best of our knowledge, the most
+ realistic verified compiler for a functional programming language to
+ date. The architecture of the compiler, a sequence of intermediate
+ languages through which highlevel features are compiled away
+ incrementally, enables verification of each compilation pass at an
+ appropriate level of semantic detail. Parts of the compiler’s
+ implementation resemble mainstream (unverified) compilers for strict
+ functional languages, and it supports several important features and
+ optimisations. These include efficient curried multiargument
+ functions, configurable data representations, efficient exceptions,
+ register allocation, and more. The compiler produces machine code for
+ five architectures: x8664, ARMv6, ARMv8, MIPS64, and RISCV. The
+ generated machine code contains the verified runtime system which
+ includes a verified generational copying garbage collector and a
+ verified arbitrary precision arithmetic (bignum) library. In this
+ paper we present the overall design of the compiler backend, including
+ its 12 intermediate languages. We explain how the semantics and
+ proofs fit together, and provide detail on how the compiler has been
+ bootstrapped inside the logic of a theorem prover. The entire
+ development has been carried out within the HOL4 theorem prover.",
+ paper = "Tanx18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{BreazuTannen, Val}
\index{Coquand, Thierry}
\index{Gunter, Carl A.}
@@ 15047,6 +16237,39 @@ when shown in factored form.
\end{chunk}
+\index{Tarver, Mark}
+\begin{chunk}{axiom.bib}
+@misc{Tarvxx,
+ author = "Tarver, Mark",
+ title = {{A Language for Implementing Arbitrary Logics}},
+ year = "unknown",
+ abstract =
+ "SEQUEL is a newgeneration functional programming language, which
+ allows the specification of types in a notation based on the
+ sequent calculus. The sequent calculus notation suffices for the
+ construction of types for type checking and for the specification
+ of arbitrary logics. Compilation techniques derived from both
+ functional and logic programming are used to derive
+ highperformance ATPs from these specifications.",
+ paper = "Tarvxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tennant, Neil}
+\begin{chunk}{axiom.bib}
+@book{Tenn90,
+ author = "Tennant, Neil",
+ title = {{Natural Logic}},
+ publisher = "Edinburgh University Press",
+ isbn = "0852245793",
+ year = "1990",
+ paper = "Tenn90.pdf"
+}
+
+\end{chunk}
+
\index{Torlak, Emina}
\begin{chunk}{axiom.bib}
@misc{Torl17,
@@ 15061,6 +16284,19 @@ when shown in factored form.
\subsection{U} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Vellerman, Daniel J.}
+\begin{chunk}{axiom.bib}
+@book{Vell06,
+ author = "Vellerman, Daniel J.",
+ title = {{How To Prove It}},
+ publisher = "Cambridge University Press",
+ year = "2006",
+ isbn = "9780511161162",
+ paper = "Vell06.pdf"
+}
+
+\end{chunk}
+
\subsection{W} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Wadler, Philip}
@@ 20594,10 +21830,10 @@ when shown in factored form.
\subsection{N} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Nederpelt, Rob}
\index{Geuvers, Nerman}
+\index{Geuvers, Herman}
\begin{chunk}{axiom.bib}
@book{Nede14,
 author = "Nederpelt, Rob and Geuvers, Nerman",
+ author = "Nederpelt, Rob and Geuvers, Herman",
title = {{Type Theory and Formal Proof}},
year = "2014",
publisher = "Cambridge University Press",
@@ 23391,7 +24627,8 @@ when shown in factored form.
discovering closed recursive forms. Thus, its proof is both more
straightforward to construct and easier to validate than a direct
proof of the inference algorithm would be.",
 paper = "Aker93.pdf, printed"
+ paper = "Aker93.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 64893,6 +66130,7 @@ AddisonWesley. 181187. (1965)
author = "Garcia, Ronald and Cimini, Matteo",
title = {{Principal Type Schemes for Gradual Programs}},
booktitle = "POPL 15",
+ link = "\url{https://www.cs.ubc.ca/~rxg/ptsgp.pdf}",
comment = "Updated paper",
year = "2017",
abstract =
@@ 64924,6 +66162,7 @@ AddisonWesley. 181187. (1965)
accordingly. The resulting extension enables programs to be
interpreted using either the polymorphic or monomorphic Blame
Calculi.",
+ paper = "Garc17.pdf",
keywords = "printed"
}
diff git a/changelog b/changelog
index b1cf840..4f357f7 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20190131 tpd src/axiomwebsite/patches.html 20190131.01.tpd.patch
+20190131 tpd books/bookvolbib add references
20181211 tpd src/axiomwebsite/patches.html 20181211.03.tpd.patch
20181211 tpd src/input/kernel.input add tests
20181211 tpd src/axiomwebsite/patches.html 20181211.02.tpd.patch
diff git a/patch b/patch
index 1ce86de..dce4f9a 100644
 a/patch
+++ b/patch
@@ 1,5 +1,1163 @@
src/input/kernel.input
+books/bookvolbib add references
Goal: Axiom Maintenance
+Goal: Proving Axiom Sane
add tests
+\index{Castagna, Giuseppe}
+\index{Lanvin, Victor}
+\index{Petrucciani, Tommaso}
+\index{Siek, Jeremy G.}
+\begin{chunk}{axiom.bib}
+@misc{Cast19,
+ author = "Castagna, Giuseppe and Lanvin, Victor and Petrucciani, Tommaso
+ and Siek, Jeremy G.",
+ title = {{Gradual Typing: A New Perspective}},
+ link = "\url{https://www.irif.fr/~gc//papers/popl19.pdf}",
+ year = "2019",
+ abstract =
+ "We define a new, more semantic interpretation of gradual types and use
+ it to “gradualize” two forms of polymorphism: subtyping polymorphism
+ and implicit parametric polymorphism. In particular, we use the new
+ interpretation to define three gradual type systems —HindleyMilner,
+ with subtyping, and with union and intersection types— in terms of two
+ preorders, subtyping and materialization. These systems are defined
+ both declaratively and algorithmically. The declarative presentation
+ consists in adding two subsumptionlike rules, one for each preorder,
+ to the standard rules of each type system. This yields more
+ intelligible and streamlined definitions and shows a direct
+ correlation between cast insertion and materialization. For the
+ algorithmic presentation, we show how it can be defined by reusing
+ existing techniques such as unification and tallying.",
+ paper = "Cast19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Siek, Jeremy, G.}
+\index{Taha, Walid}
+\begin{chunk}{axiom.bib}
+@inproceedings{Siek06,
+ author = "Siek, Jeremy, G. and Taha, Walid",
+ title = {{Gradual Typing for Functional Languages}},
+ booktitle = "Proc. of Scheme and Functional Programming Workshop",
+ year = "2006",
+ publisher = "ACM",
+ pages = "8192",
+ abstract =
+ "Static and dynamic type systems have wellknown strengths and
+ weaknesses, and each is better suited for different programming
+ tasks. There have been many efforts to integrate static and dynamic
+ typing and thereby combine the benefits of both typing disciplines in
+ the same language. The flexibility of static typing can be improved
+ by adding a type Dynamic and a typecase form. The safety and
+ performance of dynamic typing can be improved by adding optional type
+ annotations or by performing type inference (as in soft
+ typing). However, there has been little formal work on type systems
+ that allow a programmercontrolled migration between dynamic and
+ static typing. Thatte proposed QuasiStatic Typing, but it does not
+ statically catch all type errors in completely annotated
+ programs. Anderson and Drossopoulou defined a nominal type system
+ for an objectoriented language with optional type annotations.
+ However, developing a sound, gradual type system for functional
+ languages with structural types is an open problem.
+
+ In this paper we present a solution based on the intuition that the
+ structure of a type may be partially known/unknown at compile time
+ and the job of the type system is to catch incompatibilities between
+ the known parts of types. We define the static and dynamic semantics
+ of a $lambda$calculus with optional type annotations and we prove that
+ its type system is sound with respect to the simplytyped λ
+ $lambda$calculus for fullyannotated terms. We prove that this
+ calculus is type safe and that the cost of dynamism is
+ 'payasyougo'.",
+ paper = "Siek06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bauer, Andrej}
+\begin{chunk}{axiom.bib}
+@misc{Baue19,
+ author = "Bauer, Andrej",
+ title = {{How to Implement Type Theory in an Hour}},
+ year = "2019",
+ link = "\url{https://vimeo.com/286652934}",
+ comment = "\url{https://github.com/andrejbauer/spartantypetheory}"
+}
+
+\end{chunk}
+
+\index{Coquand, Thierry}
+\index{Kinoshita, Yoshiki}
+\index{Nordstrom, Bengt}
+\index{Takeyama, Makoto}
+\begin{chunk}{axiom.bib}
+@misc{Coqu19,
+ author = "Coquand, Thierry and Kinoshita, Yoshiki and Nordstrom, Bengt
+ and Takeyama, Makoto",
+ title = {{A Simple TypeTheoretic Language: MiniTT}},
+ year = "2019",
+ link = "\url{http://www.cse.chalmers.se/~bengt/papers/GKminiTT.pdf}",
+ abstract =
+ "This paper presents a formal description of a small functional
+ language with dependent types. The language contains data types,
+ mutual re cursive/inductive definitions and a universe of small
+ types. The syntax, semantics and type system is specified in such a
+ way that the imple mentation of a parser, interpreter and type
+ checker is straightforward. The main difficulty is to design the
+ conversion algorithm in such a way that it works for open expressions.
+ The paper ends with a complete implementation in Haskell (around 400
+ lines of code).",
+ paper = "Coqu19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Pfen06,
+ author = "Pfenning, Frank",
+ title = {{Constraint Logic Programming}},
+ year = "2006",
+ link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/27clp.pdf}",
+ paper = "Pfen06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Pfen07,
+ author = "Pfenning, Frank",
+ title = {{Logic Programming}},
+ year = "2007",
+ link = "\url{http://www.cs.cmu.edu/~fp/courses/lp/lectures/lpall.pdf}",
+ paper = "Pfen07.pdf"
+}
+
+\end{chunk}
+
+\index{Tennant, Neil}
+\begin{chunk}{axiom.bib}
+@book{Tenn90,
+ author = "Tennant, Neil",
+ title = {{Natural Logic}},
+ publisher = "Edinburgh University Press",
+ isbn = "0852245793",
+ year = "1990",
+ paper = "Tenn90.pdf"
+}
+
+\end{chunk}
+
+\index{Vellerman, Daniel J.}
+\begin{chunk}{axiom.bib}
+@book{Vell06,
+ author = "Vellerman, Daniel J.",
+ title = {{How To Prove It}},
+ publisher = "Cambridge University Press",
+ year = "2006",
+ isbn = "9780511161162",
+ paper = "Vell06.pdf"
+}
+
+\end{chunk}
+
+\index{Chiswell, Ian}
+\index{Hodges, Wilfrid}
+\begin{chunk}{axiom.bib}
+@book{Chis07,
+ author = "Chiswell, Ian and Hodges, Wilfrid",
+ title = {{Mathematical Logic}},
+ publisher = "Oxford University Press",
+ year = "2007",
+ isbn = "9780198571001",
+ paper = "Chis07.pdf"
+}
+
+\end{chunk}
+
+\index{Bostock, David}
+\begin{chunk}{axiom.bib}
+@book{Bost97,
+ author = "Bostock, David",
+ title = {{Intermediate Logic}},
+ publisher = "Clarendon Press",
+ year = "1997",
+ isbn = "0198751419",
+ paper = "Bost97.pdf"
+}
+
+\end{chunk}
+
+\index{Enderton, Herbert B.}
+\begin{chunk}{axiom.bib}
+@book{Ende01,
+ author = "Enderton, Herbert B.",
+ title = {{A Mathematical Introduction to Logic}},
+ publisher = "Harcourt Academic Press",
+ year = "2001",
+ isbn = "0122384520",
+ paper = "Ende01.pdf"
+}
+
+\end{chunk}
+
+\index{van Dalen, Dirk}
+\begin{chunk}{axiom.bib}
+@book{Dale08,
+ author = "van Dalen, Dirk",
+ title = {{Logic and Structure}},
+ publisher = "Springer",
+ year = "2008",
+ isbn = "9783540208792",
+ paper = "Dale08.pdf"
+
+}
+
+\end{chunk}
+
+\index{Smith, Peter}
+\begin{chunk}{axiom.bib}
+@book{Smit13,
+ author = "Smith, Peter",
+ title = {{An Introduction to Godel's Theorems}},
+ publisher = "Cambridge University Press",
+ year = "2013",
+ isbn = "9781107022843",
+ paper = "Smit13.pdf"
+}
+
+\end{chunk}
+
+\index{Leary, Christopher C.}
+\index{Kristiansen, Lars}
+\begin{chunk}{axiom.bib}
+@book{Lear15,
+ author = "Leary, Christopher C. and Kristiansen, Lars",
+ title = {{A Friendly Introduction to Mathematical Logic}},
+ publisher = "Milne Library, Geneseo",
+ year = "2015",
+ isbn = "9781942341321",
+ paper = "Lear15.pdf"
+
+}
+
+\end{chunk}
+
+\index{Cutland, Nigel}
+\begin{chunk}{axiom.bib}
+@book{Cutl80,
+ author = "Cutland, Nigel",
+ title = {{Computability: An Introduction to Recusive Function Theory}},
+ publisher = "Cambridge University Press",
+ year = "1980",
+ isbn = "0521223849",
+ paper = "Cutl80.pdf"
+}
+
+\end{chunk}
+
+\index{Hrbacek, Karel}
+\index{Jech, Thomas}
+\begin{chunk}{axiom.bib}
+@book{Hrba99,
+ author = "Hrbacek, Karel and Jech, Thomas",
+ title = {{Introduction to Set Theory}},
+ publisher = "Marcel Dekker, Inc",
+ year = "1999",
+ isbn = "0824779150",
+ paper = "Hrba99.pdf"
+}
+
+\end{chunk}
+
+\index{Roitman, Judith}
+\begin{chunk}{axiom.bib}
+@misc{Roit13,
+ author = "Roitman, Judith",
+ title = {{Introduction to Modern Set Theory}},
+ link = "\url{http://www.math.ku.edu/~roitman/SetTheory.pdf}",
+ year = "2013",
+ paper = "Roit13.pdf"
+}
+
+\end{chunk}
+
+\index{Fitting, Melvin}
+\begin{chunk}{axiom.bib}
+@book{Fitt69,
+ author = "Fitting, Melvin",
+ title = {{Intuitionistic Logic Model Theory and Forcing}},
+ publisher = "North Holland",
+ year = "1969",
+ paper = "Fitt69.pdf"
+}
+
+\end{chunk}
+
+\index{Popkorn, Sally}
+\index{Simmons, Harold}
+\begin{chunk}{axiom.bib}
+@book{Popk94,
+ author = "Popkorn, Sally",
+ title = {{First Steps in Modal Logic}},
+ publisher = "Cambridge University Press",
+ year = "1994",
+ isbn = "9780521464826",
+ link =
+ "\url{https://cs.famaf.unc.edu.ar/~careces/ml18/downloads/popkorn.pdf}",
+ paper = "Popk94.pdf"
+}
+
+\end{chunk}
+
+\index{Sipser, Michael}
+\begin{chunk}{axiom.bib}
+@book{Sips13,
+ author = "Sipser, Michael",
+ title = {{Introduction to the Theory of Computation}},
+ publisher = "Cengage Learning",
+ year = "2013",
+ isbn = "9781133187790",
+ link = "\url{https://theswissbay.ch/pdf/Book/Introduction%20to%20the%20theory%20of%20computation_third%20edition%20%20Michael%20Sipser.pdf}",
+ paper = "Sips13.pdf"
+}
+
+\end{chunk}
+
+\index{Smullyan, Raymond M.}
+\begin{chunk}{axiom.bib}
+@book{Smul92,
+ author = "Smullyan, Raymond M.",
+ title = {{Godel's Incompleteness Theorems}},
+ publisher = "Oxford University Press",
+ year = "1992",
+ isbn = "0195046722",
+ paper = "Smul92.pdf"
+}
+
+\end{chunk}
+
+\index{Smith, Peter}
+\begin{chunk}{axiom.bib}
+@misc{Smit17,
+ author = "Smith, Peter",
+ title = {{Teach Yourself Logic 2017: A Study Guide}},
+ link =
+"\url{https://www.logicmatters.net/resources/pdfs/TeachYourselfLogic2017.pdf}",
+ year = "2017",
+ paper = "Smit17.pdf"
+}
+
+\end{chunk}
+
+\index{Murphy, Robin R.}
+\begin{chunk}{axiom.bib}
+@book{Murp18,
+ author = "Murphy, Robin R.",
+ title = {{Robotics Through Science Fiction}},
+ publisher = "MIT Press",
+ year = "2018",
+ link =
+ "\url{https://mitpress.mit.edu/books/roboticsthroughsciencefiction}",
+ isbn = "9780262536264",
+ comment = "verification, validation, and trust"
+
+}
+
+\end{chunk}
+
+\index{Kaminski, Paul}
+\begin{chunk}{axiom.bib}
+@techreport{Kami12,
+ author = "Kaminski, Paul",
+ title = {{The Role of Autonomy in DoD Systems}},
+ type = "Task Force Report",
+ institution = "Defense Science Board, Dept. of Defense",
+ year = "2012",
+ link = "\url{https://fas.org/irp/agency/dod/dsb/autonomy.pdf}",
+ comment = "verification, validation, and trust"
+
+}
+
+\end{chunk}
+
+\index{Siciliano, Bruno}
+\index{Oussama, Khatib}
+\begin{chunk}{axiom.bib}
+@book{Sici16,
+ author = "Siciliano, Bruno and Oussama, Khatib",
+ title = {{Springer Handbook of Robotics}},
+ publisher = "Springer",
+ year = "2016",
+ abstract =
+ "Reaching for the human frontier, robotics is vigorously engaged in
+ the growing challenges of new emerging domains. Interacting,
+ exploring, and working with humans, the new generation of robots will
+ increasingly touch people and their lives. The credible prospect of
+ practical robots among humans is the result of the scientific
+ endeavour of a half a century of robotic developments that established
+ robotics as a modern scientific discipline. The Springer Handbook of
+ Robotics brings a widespread and wellstructured compilation of
+ classic and emerging application areas of robotics. From the
+ foundations to the social and ethical implications of robotics, the
+ handbook provides a comprehensive collection of the accomplishments in
+ the field, and constitutes a premise of further advances towards new
+ challenges in robotics. This handbook, edited by two internationally
+ renowned scientists with the support of an outstanding team of seven
+ part editors and onehundred sixtyfour authors, is an authoritative
+ reference for robotics researchers, newcomers to the field, and
+ scholars from related disciplines such as biomechanics, neurosciences,
+ virtual simulation, animation, surgery, and sensor networks among
+ others.",
+ paper = "Sici16.pdf"
+}
+
+\end{chunk}
+
+\index{MartinL\"of, P.}
+\begin{chunk}{axiom.bib}
+@article{Mart84,
+ author = "MartinL\"of, P.",
+ title = {{Constructive Mathematics and Computer Programming}},
+ journal = "Phil. Trans. Royal Society London",
+ volume = "312",
+ year = "1984",
+ pages = "501518",
+ link =
+ "\url{https://royalsocietypublishing.org/doi/pdf/10.1098/rsta.1984.0073}",
+ abstract =
+ "If programming is understood not as the writing of instructions for
+ this or that computing machine but as the design of methods of
+ computation that it is the computer’s duty to execute (a difference
+ that Dijkstra has referred to as the difference between computer
+ science and computing science), then it no longer seems possible to
+ distinguish the discipline of programming from constructive
+ mathematics. This explains why the intuitionistic theory of types
+ (MartinLof 1975 In Logic Colloquium 1973 (ed. H. E. Rose and
+ J. C. Shepherdson), pp. 73  118 . Amsterdam: North Holland), which
+ was originally developed as a symbolism for the precise codification
+ of constructive mathematics, may equally well be viewed as a
+ programming language. As such it provides a precise notation not
+ only, like other programming languages, for the programs themselves
+ but also for the tasks that the programs are supposed to perform.
+ Moreover, the inference rules of the theory of types, which are again
+ completely formal, appear as rules of correct program synthesis. Thus
+ the correctness of a program written in the theory of types is proved
+ formally at the same time as it is being synthesized.",
+ paper = "Mart84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Oberhoff, Sebastian}
+\begin{chunk}{axiom.bib}
+@misc{Ober18,
+ author = "Oberhoff, Sebastian",
+ title = {{Incompleteness Ex Machina}}
+ year = "2018",
+ abstract =
+ "In this essay we'll prove Godel's incompleteness theorems
+ twice. First, we'll prove them the good oldfashioed way. Then
+ we'll repeat the feat in the setting of computation. In the
+ process we'll discover that Godel's work, rightly viewed, needs to
+ be split into two parts: the transport of computation into the
+ arena of arithmetic on the one hand and the actual incompleteness
+ theorems on the other. After we're done there will be cake.",
+ paper = "Ober18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Deutsch, David}
+\begin{chunk}{axiom.bib}
+@article{Deut85,
+ author = "Deutsch, David",
+ title = {{Quantum Theory, the ChurchTuring Principle and the
+ Universal Quantum Computer}},
+ journal = "Proc. Royal Society of London",
+ volume = "400",
+ pages = "97117",
+ year = "1985",
+ abstract =
+ "It is argued that underlying the ChurchTuring hypothesis there
+ is an implicit physical assertion. Here, this assertion is
+ presented explicitly as a physical principle: 'every finitely
+ realizable physical system can be perfectly simulated by a
+ universal model computing machine operating by finite
+ means'. Classical physics and the universal Turing machine,
+ because the former is continuous and the latter discrete, do not
+ obey the principle, at leeast in the strong form above. A class of
+ model computing machines that is the quantum generalization of the
+ class of Turing machines is described, and it is shown that
+ quantum theory and the 'universal quantum computer' are compatible
+ with the principle. Computing machines resembling the universal
+ quantum computer could, in principle, be built and would have many
+ remarkable properties not reproducible by any Turing
+ machine. These do not include the computation of nonrecursive
+ functions, but they do include 'quantum parallelism', a method by
+ which certain probabilistic tasks can be performed faster by a
+ universal quantum computer than by any classical restriction of
+ it. The intuitive explanation of these properties places an
+ intolerable strain on all interpretations of quantum theory other
+ than Everett's. Some of the numerous connections between the
+ quantum theory of computation and the rest of physics are
+ explored. Quantum complexity theory allows a physically more
+ reasonable definition of the 'complexity' or 'knowledge' in a
+ physical system than does classical complexity theory.",
+ paper = "Deut85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Antoy, Sergio}
+\index{Peters, Arthur}
+\begin{chunk}{axiom.bib}
+@article{Anto12,
+ author = "Antoy, Sergio and Peters, Arthur",
+ title = {{Compiling a Functional Logic Language}},
+ booktitle = "11th Int. Symp. on Functional and Logic Programming",
+ journal = "LNCS",
+ volume = "7294",
+ pages = "1731",
+ year = "2012",
+ abstract =
+ "We present the design of a compiler for a functional logic
+ programming language and discuss the compiler's implementation.
+ The source program is abstracted by a constructor based graph
+ rewriting system obtained from a functional logic program after
+ syntax desugaring, lambda lifting and similar transformations
+ provided by a compiler's frontend. This system is
+ nondeterministic and requires a specialized normalization
+ strategy. The target program consists of 3 procedures that execute
+ graph replacements originating from either rewrite or pulltab
+ seps. These procedures are deterministic and easy to encode in an
+ ordinary programming language. We describe the generation of the 3
+ procedures, discuss the correctness of our approach, highlight
+ some key elements of an implementation, and benchmark the
+ performance of a proof of concept. Our compilation scheme is
+ elegant and simple enough to be presented in one page.",
+ paper = "Anto12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{MacQueen, David B.}
+\begin{chunk}{axiom.bib}
+@misc{Macqxx,
+ author = "MacQueen, David B.",
+ title = {{Reflections on Standard ML}},
+ year = "unknown",
+ abstract =
+ "Standard ML is one of a number of new programming languages
+ developed in the 1980s that are seen as suitable vehicles for
+ serious systems and applications programming. It offers an
+ excellent ratio of expressiveness to language complexity, and
+ provides competitive efficiency. Because of its type and module
+ system, Standard ML manages to combine safety, security, and
+ robustness with much of the flexibility of dynamically typed
+ languages like Lisp. It is also has the most welldeveloped
+ scientific foundation of any major language. Here I review the
+ strengths and weaknesses of Standard ML and describe some of what
+ we have learned through the design, implementation, and use of the
+ language.",
+ paper = "Macqxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Tarver, Mark}
+\begin{chunk}{axiom.bib}
+@misc{Tarvxx,
+ author = "Tarver, Mark",
+ title = {{A Language for Implementing Arbitrary Logics}},
+ year = "unknown",
+ abstract =
+ "SEQUEL is a newgeneration functional programming language, which
+ allows the specification of types in a notation based on the
+ sequent calculus. The sequent calculus notation suffices for the
+ construction of types for type checking and for the specification
+ of arbitrary logics. Compilation techniques derived from both
+ functional and logic programming are used to derive
+ highperformance ATPs from these specifications.",
+ paper = "Tarvxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Collofello, James S.}
+\begin{chunk}{axiom.bib}
+@techreport{Coll88,
+ author = "Collofello, James S.",
+ title = {{Introduction to Software Verification and Validation}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "SEICM131.1",
+ year = "1988",
+ abstract =
+ "SEI curriculum modules document and explication software
+ engineering topics. They are intended to be useful in a variety of
+ situations  in the preparation of courses, in the planning of
+ individual lectures, in the design of curricula, and in
+ professional selfstudy. Topics do not exist in isolation,
+ however, and the question inevitably arises how one module is
+ related to another. Because modules are written by different
+ authors at different times, the answer could easily be 'not at
+ all'. In a young field struggling to define itself, this would be
+ an unfortunate situation.
+
+ The SEI deliberately employs a number of mechanisms to achieve
+ compatible points of view across curriculum modules and to fill
+ the content gaps between them. Modules such as {\sl Introduction
+ to Software Verification and Validation} is one of the most
+ important devices. In this latest revision, Professor Collofillo
+ has more modules to integrate into a coherent picture than when we
+ released his first draft more than a year ago  modules on
+ quality assurance, unit testing, technical reviews, and formal
+ verification, as well as less directly related modules on
+ specification, requirements definition, and design. We believe you
+ will find this curriculum module interesting and useful, both in
+ its own right and by virtue of the understanding of other modules
+ that it facilitates.",
+ paper = "Coll88.pdf",
+ keywords = "printed"
+
+}
+
+\end{chunk}
+
+\index{Gravel, Katherine}
+\index{Jananthan, Hayden}
+\index{Kepner, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Grav18,
+ author = "Gravel, Katherine and Jananthan, Hayden and Kepner, Jeremy",
+ title = {{Visually Representing the Landscape of Mathematical Structures}},
+ link = "\url{https://arxiv.org/abs/1809.05930}",
+ year = "2018",
+ abstract =
+ "The information technology explosion has dramatically increased
+ the application of new mathematical ideas and has led to an
+ increasing use of mathematics across a wide range of fields that
+ have been traditionally labeled 'pure' or 'theoretical'. There is
+ a critical need for tools to make these concepts readily
+ accessible to a broader community. This paper details the creation
+ of visual representations of mathematical structures commonly
+ found in pure mathematics to enable both students and
+ professionals to rapidly understand the relationships among and
+ between mathematical structures. Ten broad mathematical structures
+ were identified and used to make eleven maps, with 187 unique
+ structures in total. The paper presents a method and
+ implementation for categorizing mathematical structures and for
+ drawing relationships between mathematical structures that
+ provides insight for a wide audience. The most in dept map is
+ available online for public use.",
+ comment = "\url{http://www.mit.edu/~kepner/GravelMathMap.pdf}",
+ paper = "Grav18.pdf"
+}
+
+\end{chunk}
+
+\index{Hoare, C.A.R}
+\begin{chunk}{axiom.bib}
+@article{Hoar74,
+ author = "Hoare, C.A.R",
+ title = {{Hints on Programming Language Design}},
+ journal = "Computer Systems Reliability",
+ volume = "20",
+ pages = "505534",
+ year = "1974",
+ link = "\url{http://flint.cs.yale.edu/cs428/doc/HintsPL.pdf}",
+ abstract =
+ "This paper presents the view that a programming language
+ is a tool that should assist the programmer in the most difficult
+ aspects of his art, namely program design, documentation, and
+ debugging. It discusses the objective criteria for evaluating a
+ language design and illustrates them by application to language
+ features of both highlevel languages and machinecode
+ programming. It concludes with an annotated reading list,
+ recomended for all intending language designers.",
+ paper = "Hoar74.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Hemann, Jason}
+\index{Friedman, Daniel P.}
+\begin{chunk}{axiom.bib}
+@misc{Hema13,
+ author = "Hemann, Jason and Friedman, Daniel P.",
+ title = {{uKanren: A Minimal Functional Core for Relational Programming}},
+ year = "2013",
+ link = "\url{webyrd.net/scheme2013/papers/HemannMuKanren2013.pdf}",
+ abstract =
+ "This paper presents $\mu$Kanren, a minimalist language in the
+ miniKanren family of relational (logic) programming languages. Its
+ implementation comprises fewer than 40 lines of Scheme. We
+ motivate the need for a minimalist miniKanren language, and
+ iteratively develop a complete search strategy. Finally, we
+ demonstrate that through sufficient userlevel features one
+ regains much of the expressiveness of other miniKanren
+ languages. In our opinion its brevity and simple semantics make
+ $\mu$Kanren uniquely elegant.",
+ paper = "Hema13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cardelli, Luca}
+\begin{chunk}{axiom.bib}
+@techreport{Card93,
+ author = "Cardelli, Luca",
+ title = {{Typeful Programmnig}},
+ type = "research report",
+ year = "1993",
+ institution = "Digital Equipment Corporation",
+ number = "SRC Research Report 45",
+ abstract =
+ "There exists an identifiable programming style based on the
+ widespread use of type information handled through mechanical
+ typechecking techniques.
+
+ This typeful programming style is in a sense independent of the
+ language it is embedded in; it adapts equally well to functional,
+ imperative, objectoriented, and algebraic programming, and it is
+ not incompatible with relational and concurrent programming.
+
+ The main purpose of this paper is to show how typeful programming
+ is best supported by sophisticated type systems, and how these
+ systems can help in clarifying programming issues and in adding
+ power and regularity to languages.
+
+ We start with an introduction to the notions of types, subtypes
+ and polymorphism. Then we introduce a general framework, derived
+ in part from constructive logic, into which most of the known type
+ systems can be accommodated and extended. The main part of the
+ paper shows how this framework can be adapted systematically to
+ cope with actual programming constructs. For concreteness we
+ describe a particular programming language with advanced features;
+ the emphasis here is on the combination of subtyping and
+ polymorphism. We then discuss how typing concepts apply to large
+ programs. We also sketch how typing applies to system programming;
+ an area which by nature escapes rigid typing. In summary, we
+ compare the most common programming styles, suggesting that many
+ of them are compatible with, and benefit from, a typeful discipline.",
+ paper = "Card93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Lamport, Leslie}
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@article{Lamp99,
+ author = "Lamport, Leslie and Paulson, Lawrence C.",
+ title = {{Should Your Specification Language Be Typed?}},
+ journal = "ACM Trans. on Document Formatting",
+ volume = "21",
+ number = "3",
+ year = "1999",
+ pages = "502526",
+ abstract =
+ "Most specification languages have a type system. Type systems are
+ hard to get right, and getting them wrong can lead to
+ inconsistencies. Set theory can serve as the basis for a
+ specification language without types. This possibility, which has
+ been widely overlooked, offers many advantages. Untyped set theory
+ is simple and is more flexible than any simple typed
+ formalism. Polymorphism, overloading, and subtyping can make a
+ type system more powerful, but at the cost of increased
+ complexity, and such refinements can never attain the flexibility
+ of having no types at all. Typed formalisms have advantages too,
+ stemming from the power of mechanical type checking. While types
+ serve little purpose in hand proofs, they do help with mechanized
+ proofs. In the absence of verification, type checking can catch
+ errors in specifications. It may be possible to have the best of
+ both worlds by adding typing annotations to an untyped
+ specification language. We consider only specification languages,
+ not programming languages",
+ paper = "Lamp99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Appel, Andrew W.}
+\begin{chunk}{axiom.bib}
+@article{Appe98a,
+ author = "Appel, Andrew W.",
+ title = {{SSA is Functional Programming}},
+ journal = "SIGPLAN Notices",
+ year = "1998",
+ abstract =
+ "Static SingleAssignment (SSA) form is an intermediate language
+ designed to make optimization clean and efficient for imperative
+ language (Fortran, C) compilers. LambdaCalculus is an
+ intermediate language that makes optimization clean and efficient
+ for functional language (Scheme, ML, Haskell) compilers. The SSA
+ community draws pictures of graphs with basic blocks and flow
+ edges, and the functional language community writes lexically
+ nested functions, but (as Richard Kelsey recently pointed out)
+ they're both doing exactly the same thing in different notation.",
+ paper = "Appe98a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cloostermans, Bouke},
+\begin{chunk}{axiom.bib}
+@misc{Cloo12,
+ author = "Cloostermans, Bouke",
+ title = {{QuasiLinear GCD Computation and Factoring RSA Moduli}},
+ school = "Eindhoven Univ. of Technology",
+ comment = "Bachelor Project",
+ year = "2012",
+ abstract =
+ "This bachelor project consists of two parts. First, we describe
+ two subquadratic GCD algorithms and test our implementation of
+ these algorithms. Both algorithms work in a similar recursive way
+ and have a running time of $O(n log^2n log log n)$ where $n$ is
+ the amount of digits of the input. Second, we describe and compare
+ three algorithms which compute GCD's of more than two numbers. Two
+ algorithms use a treelike structure to compute these GCD's and the
+ other algorithm builds a coprime base from the input set. The
+ algorithms which uses a treelike structure to compute these GCD's
+ is faster than building a coprime base although all algorithms are
+ quasilinear. The downside of these algorithm is that they only run
+ well if few common factors exist in the input set. These
+ algorithms have been used to factorize roughly 0.4\% of a 11.1
+ million RSA keys dataset. However, this is only possible if the
+ keys are badly generated.",
+ paper = "Cloo12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Roessle, Ian}
+\index{Verbeek, Freek}
+\index{Ravindran, Binoy}
+\begin{chunk}{axiom.bib}
+@proceedings{Roes19,
+ author = "Roessle, Ian and Verbeek, Freek and Ravindran, Binoy",
+ title = {{Formally Verified Big Step Semantics out of x8664 Binaries}},
+ booktitle = "CPP'19",
+ year = "2019",
+ publisher = "ACM",
+ abstract =
+ "This paper presents a methodology for generating formally proven
+ equivalence theorems between decompiled x8664 machine code and
+ big step semantics. These proofs are built on top of two
+ additional contributions. First, a robust and tested formal x8664
+ machine model containing small step semantics for 1625
+ instructions. Second, a decompilation into logic methodology
+ supporting both x8664 assembly and machine code at large
+ scale. This work enables blackbox binary verification, i.e.,
+ formal verification of a binary where source code is
+ unavailable. As such, it can be applied to safetycritical systems
+ that consist of legacy components, or components whose source code
+ is unavailable due to proprietary reasons. The methodology
+ minimizes the trusted code base by leveraging machinelearned
+ semantics to build a formal machine model. We apply the
+ methodolgoy to several case studies, including binaries that
+ heavily rely on the SSE2 floatingpoint instruction set, and
+ binaries that are obtained by compiling code that is obtained by
+ inlining assembly into C code.",
+ paper = "Roes19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kaliszyk, Cezary}
+\index{Urban, Josef}
+\begin{chunk}{axiom.bib}
+@misc{Kali14,
+ author = "Kaliszyk, Cezary and Urban, Josef",
+ title = {{LearningAssisted Automated Reasoning with Flyspeck}},
+ comment = "arXiv:1211.7012v3",
+ year = "2014",
+ abstract =
+ "The considerable mathematical knowledge encoded by the Flyspeck
+ project is combined with external automated theorem provers (ATPs)
+ and machinelearning premise selection methods trained on the
+ Flyspeck proofs, producing an AI system capable of proving a wide
+ range of mathematical conjectures automatically. The performance
+ of this architecture is evaluated in a bootstrapping scenario
+ emulating the development of Flyspeck from axioms to the last
+ theorem, each time using only the previous theorems and proofs. It
+ is shown that 39\% of the 14185 theorems could be proved in a
+ pushbutton mode (without any highlevel advice and user
+ interaction) in 30 seconds of real time on a fourteen CPU
+ workstation.
+
+ The necessary work involves: (i) an implementation of sound
+ translations of the HOL Light logic to ATP formalisms: untyped
+ firstorder, polymorphic typed firstorder, and typed firstorder,
+ (ii) export of the dependency information from HOL Light and ATP
+ proofs for the machine learners, and (iii) choice of suitable
+ representations and methods for learning from previous proofs, and
+ their integration as advisors with HOL Light. This work is
+ described and discussed here, and an initial analysis of the body
+ of proofs that were found fully automatically is provided.",
+ paper = "Kali14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Mueller, Robert A.}
+\index{Page, L. Rex}
+\begin{chunk}{axiom.bib}
+@book{Muel88,
+ author = "Mueller, Robert A. and Page, L. Rex",
+ title = {{Symbolic Computing with Lisp and Prolog}},
+ publisher = "Wiley",
+ year = "1988",
+ isbn = "0471607711"
+}
+
+\end{chunk}
+
+\index{Stump, Aaron}
+\begin{chunk}{axiom.bib}
+@misc{Stum18,
+ author = "Stump, Aaron",
+ title = {{Syntax and Semantics of Cedille}},
+ year = "2018",
+ comment = "arXiv:1806.04709v1",
+ paper = "Stum18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Geuvers, Herman}
+\begin{chunk}{axiom.bib}
+@article{Geuv00,
+ author = "Geuvers, Herman",
+ title = {{Induction Is NOt Derivable in Second Order Dependent Type Theory}},
+ journal = "LNCS",
+ volume = "2044",
+ year = "2000",
+ pages = "166181",
+ abstract =
+ "This paper proves the nonderivability of induction in second
+ order dependent type theory ($\lambda{}P2$). This is done by
+ providing a model construction for $\lambda{}P2$, based on a
+ saturated sets like interpretation of types as sets of terms of a
+ weakly extensional combinatory algebra. We give countermodels in
+ which the induction principle over natural numbers is not
+ valid. The proof does not depend on the specific encoding for
+ natural numbers that has been chosen (like e.g. polymorphic Church
+ numerals), so in fact we prove that there can not be an encoding
+ of natural numbers in $\lambda{}P2$ such that the induction
+ principle is satisfied. The method extends immediately to other
+ data types, like booleans, lists, trees, etc.
+
+ In the process of the proof we establish some general properties
+ of the models, which we think are of independent
+ interest. Moreover, we show that the Axiom of Choice is not
+ derivable in $\lambda{}P2$.",
+ paper = "Geuv00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sjoberg, Vilhelm}
+\begin{chunk}{axiom.bib}
+@phdthesis{Sjob15,
+ author = "Sjoberg, Vilhelm",
+ title = {{A Dependently Typed Language with NonTermination}},
+ school = "University of Pennsylvania",
+ year = "2015",
+ abstract =
+ "We propose a fullspectrum dependently typed programming
+ language, {\sl Zombie}, which supports general recursion
+ natively. The Zombie implementation is an elaborating
+ typechecker. We prove type safety for a large subset of the Zombie
+ core language, including features wuch as computational
+ irrelevance, CBVreduction, and propositional equality with a
+ heterogeneous, completely erased elimination form. Zombie does not
+ automatically $\beta$reduce expressions, but instead uses
+ congruence closure for proof and type inference. We give a
+ specification of a subset of the surface language via a
+ bidirectional type system, which works ``uptocongruence'', and
+ an algorithm for elaborating expressions in this language to an
+ explicitly typed core language. We prove that our elaboration
+ algorithm is complete with respect to the source type
+ system. Zombie also features an optional terminationchecker,
+ allowing nonterminating programs returning proofs as well as
+ external proofs about programs.",
+ paper = "Sjob15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cervesato, Iliano}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@inproceedings{Cerv96,
+ author = "Cervesato, Iliano and Pfenning, Frank",
+ paper = {{A Linear Logical Framework}},
+ booktitle = "Logic in Computer Science",
+ publisher = "IEEE",
+ year = "1996",
+ abstract =
+ "We present the linear type theory LLF as the formal basis for a
+ conservative extension of the LF logical framework. LLF combines
+ the expressive power of dependent types with linear logic to
+ permit the natural and concise represention of a whole new class
+ of deductive systems, names those dealing with state. As an
+ example we encode a version of MiniML with references including
+ its type system, its operational semantics, and a proof of type
+ preservation. Another example is the encoding of a sequent
+ calculus for classical linear logic and its cut elimination
+ theorem. LLF can also be given an operational interpretation as a
+ logic programming language under which the representations above
+ can be used for type inference, evaluation and cutelimination.",
+ paper = "Cerv96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cervesato, Iliano}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@article{Cerv00,
+ author = "Cervesato, Iliano and Pfenning, Frank",
+ title = {{A Linear Logic Framework}},
+ journal = "Information and Computation",
+ volume = "179",
+ number = "1",
+ pages = "1975",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/llf00.pdf}",
+ year = "2002",
+ abstract =
+ "We present the linear type theory $\labmda^{\PIo\&\top}$ as the
+ formal basis for LLF, a conservative extension of the logical
+ framework LF. LLF combines the expressive power of dependent types
+ with linear logic to permit the natural and concise representation
+ of a whole new class of deductive systems, namely those dealing
+ with state. As an example we encode a version of MiniML with
+ mutable references including its type system and its operational
+ semantics, and describe how to take practical advantage of the
+ representation of its computations.",
+ paper = "Cerv00.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Girard, JeanYves}
+\begin{chunk}{axiom.bib}
+@book{Gira95,
+ author = "Girard, JeanYves",
+ title = {{Linear Logic: Its Syntax and Semantics}},
+ publisher = "Cambridge University Press",
+ year = "1995",
+ link = "\url{http://girard.perso.math.cnrs.fr/Synsem.pdf}",
+ isbn = "9780521559614",
+ paper = "Gira95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Girard, JeanYves}
+\begin{chunk}{axiom.bib}
+@article{Gira87,
+ author = "Girard, JeanYves",
+ title = {{Linear Logic}},
+ journal = "Theoretical Computer Science",
+ volume = "50",
+ year = "1987",
+ pages = "1102"
+ abstract =
+ "The familiar connective of negation is broken into two
+ operations: linear negation which is the purely negative part of
+ negation and the modality ``of course'' which has the meaning of a
+ reaffirmation. Following this basic discovery, a completely new
+ approach to the whole area between constructive logics and
+ programmation is initiated.",
+ paper = "Gira87.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Knill, Oliver}
+\begin{chunk}{axiom.bib}
+@misc{Knil18,
+ author = "Knill, Oliver",
+ title = {{Some Fundamental Theorems in Mathematics}}
+ link = "\url{https://arxiv.org/pdf/1807.08416.pdf}",
+ year = "2018",
+ abstract =
+ "An expository hitchhikers guide to some theorems in mathematics",
+ paper = "Knil18.pdf"
+}
+
+\end{chunk}
+
+\index{Martzloff, J.C.}
+\begin{chunk}{axiom.bib}
+@book{Mart97a,
+ author = "Martzloff, J.C.",
+ title = {{A History of Chinese Mathematics}},
+ publisher = "Springer Verlag",
+ year = "1997"
+}
+
+\end{chunk}
+
+\index{Ding, C.}
+\index{Pei, D.}
+\index{Salomaa, A.}
+\begin{chunk}{axiom.bib}
+@book{Ding96,
+ author = "Ding, C. and Pei, D. and Salomaa, A.",
+ title = {{Chinese Remainder Theorem, Applications in Computing,
+ Coding Cryptography}},
+ publisher = "World Scientific",
+ year = "1996"
+}
+
+\end{chunk}
+
+\index{Knill, Oliver}
+\begin{chunk}{axiom.bib}
+@misc{Knil12,
+ author = "Knill, Oliver",
+ title = {{A Multivariate Chinese Remainder Theorem}},
+ link = "\url{https://arxiv.org/abs/1206.5114}",
+ year = "2012"
+ abstract =
+ "Using an adaptation of Qin Jiushao's method from the 13th
+ century, it is possible to prove that a system of linear modular
+ equations $a(i,1)x(i)+a(i,n)x(n)=b(i)\bmod m(i),i=1,\ldots,n$ has
+ integer solutions if $m(i)>1$ are pairwise relatively prime and in
+ each row, at least one matrix element $a(i,j)$ is relatively prime
+ to $m(i)$. The Chinese remainder theorem is a special case, where
+ $A$ has only one column.",
+ paper = "Knil12.pdf"
+}
+
+\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index 450402b..ea9d912 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5976,6 +5976,8 @@ books/bookvolbib add references
books/bookvol13 add section
20181211.02.tpd.patch
src/input/kernel.input
+20190131.01.tpd.patch
+books/bookvolbib add references

1.9.1