From 912f3927f1b95c7f3bbfd832bbe1fcd7ba13d808 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Thu, 17 Aug 2017 04:14:07 -0400
Subject: [PATCH] books/bookvolbib Add references
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Goal: Axiom references
\index{Richardson, Dan}
\index{Fitch, John P.}
\begin{chunk}{axiom.bib}
@inproceedings{Rich94,
author = "Richardson, Dan and Fitch, John P.",
title = "The identity problem for elementary functions and constants",
booktitle = "ACM Proc. of ISSAC 94",
pages = "285-290",
isbn = "0-89791-638-7",
year = "1994",
abstract =
"A solution for a version of the identify problem is proposed for a
class of functions including the elementary functions. Given f(x),
g(x), defined at some point $\beta$ we decide whether or not
$f(x) \equiv g(x)$
in some neighbourhood of $\beta$. This problem is first reduced to a
problem about zero equivalence of elementary constants. Then a semi
algorithm is given to solve the elementary constant problem. This semi
algorithm is guaranteed to give the correct answer whenever it
terminates, and it terminates unless the problem being considered
contains a counterexample to Schanuel's conjecture.",
paper = "Rich94.pdf"
}
\end{chunk}
\index{Bertrand, Laurent}
\begin{chunk}{axiom.bib}
@inproceedings{Bert94,
author = "Bertrand, Laurent",
title = "On the Implementation of a new Algorithm for the Computation
of Hyperelliptic Integrals",
booktitle = "ISSAC 94",
isbn = "0-89791-638-7",
pages = "211-215",
year = "1994",
abstract =
"In this paper, we present an implementation in Maple of a new
aJgorithm for the algebraic function integration problem in the
particular case of hyperelliptic integrals. This algo- rithm is based
on the general algorithm of Trager [9] and on the arithmetic in the
Jacobian of hyperelliptic curves of Cantor [2].",
paper = "Bert94.pdf"
}
\end{chunk}
\index{Baddoura, Jamil}
\begin{chunk}{axiom.bib}
@inproceedings{Badd94,
author = "Baddoura, Jamil",
title = "A Conjecture On Integration in Finite Terms with Elementary
Functions and Polylogarithms",
booktitle = "ISSAC 94",
year = "1994",
pages = "158-162",
isbn = "0-89791-638-7",
abstract =
"In this abstract, we report on a conjecture that gives the form of an
integral if it can be expressed using elementary functions and
polylogarithms. The conjecture is proved by the author in the cases of
the dilogarithm and the trilogarithm [3] and consists of a
generalization of Liouville's theorem on integration in finite terms
with elementary functions. Those last structure theorems, for the
dilogarithm and the trilogarithm, are the first case of structure
theorems where logarithms can appear with non-constant
coefficients. In order to prove the conjecture for higher
polylogarithms we need to find the functional identities, for the
polylogarithms that we are using, that characterize all the possible
algebraic relations among the considered polylogarithms of functions
that are built up from the rational functions by taking the considered
polylogarithms, exponentials, logarithms and algebraics. The task of
finding those functional identities seems to be a difficult one and is
an unsolved problem for the most part to this date.",
paper = "Badd94.pdf",
}
\end{chunk}
\index{Fateman, Richard}
\begin{chunk}{axiom.bib}
@inproceedings{Fate92a,
author = "Fateman, Richard",
title = "Honest Plotting, Global Extrema, and Interval Arithmetic",
booktitle = "ISSAC 92",
year = "1992",
pages = "216-223",
isbn = "0-89791-489-9",
abstract =
"A computer program to honestly plot curves y = f(x) must locate
maxima and minima in the domain of the graph. To do so it may have to
solve a classic problem in computation – global optimization.
Reducing an easy problem to a hard one is usually not an ad- vantage,
but in fact there is a route to solving both problems if the function
can be evaluated using interval arithmetic. Since some computer
algebra systems supply a version of interval arithmetic, it seems we
have the ingredients for a solution.
In this paper we address a particular problem how to compute and
display ``honest'' graphs of 2-D mathematical curves. By
``honest'' we mean that no significant features (such as the
location of poles, the values at maxima or minima, or the behavior
of a curve at asymptotes) are misrepresented, By “mathematical” we
mean curves like those generally needed in scientific disciplines
where functions are represented by composi- tion of common
mathematical operations: rational operations ($+, –, *, /$),
exponential and log, trigonometric functions as well as continuous
and differentiable functions from applied mathematics.",
paper = "Fate92a.pdf"
}
\end{chunk}
\index{Einwohner, T.}
\index{Fateman, Richard J.}
\begin{chunk}{axiom.bib}
@inproceedings{Einw95,
author = "Einwohner, T. and Fateman, Richard J.",
title = "Searching Techniques for Integral Tables",
booktitle = "ISSAC 95",
year = "1995",
pages = "133-139",
abstract =
"We describe the design of data structures and a computer program for
storing a table of symbolic indefinite or definite integrals and
retrieving user-requested integrals on demand. Typical times are so
short that a preliminary look-up attempt prior to any algorithmic
integration approach seems justified. In one such test for a table
with around 700 entries, matches were found requiring an average of
2.8 milliseconds per request, on a Hewlett Packard 9000/712
workstation.",
paper = "Eiwn95.pdf"
}
\end{chunk}
\index{G{\'o}mez-D{\'\i}az, Teresa}
\begin{chunk}{axiom.bib}
@misc{Gome94,
author = "Gomez-Diaz, Teresa",
title = "The Possible Solutions to the Control Problem",
year = "1994"
}
\end{chunk}
\index{Jeffrey, David}
\begin{chunk}{axiom.bib}
@misc{Jeffxx,
author = "Jeffrey, David",
title = "Real Integration on Domains of Maximum Extent",
abstract =
"General purpose computer algebra systems are used by people with
widely varying backgrounds. Amongst the many difficulties that face
the developer because of this, one that is particularly relevant to
the subject of this talk is the fact that different users attach
different meanings, or definitions, to the same symbols. When a user
asks a CAS to integrate a function, it is not clear which definition
of integral should be used. Some of the disagreements over the
``correct'' value of an integral reduce to the fact that the different
parties are using different defintions. This talk therefore starts by
defining my version of integration. According to this definition,
functions returned as integrals should not only differentiate to the
function supplied by the user, they should also satisfy global
continuity properties. In order to achieve these properties, the idea
of a rectifying transform is intrduced. For the problem of integrating
a rational trigonometric function, a new rectifying transform is
described."
}
\end{chunk}
\index{Jeffrey, D. J.}
\begin{chunk}{axiom.bib}
@misc{Jeffxxa,
author = "Jeffrey, D. J.",
title = "The Integration of Functions Containing Fractional Powers",
abstract =
"An algorithm is developed for integrating functions that contain
fractional powers. The algorithm addresses the following points. The
integral must be valid for all possible values of the variable,
including those values of the variable that make the integrand, and
hence the integral, take complex values. The algorithm must allow for
the fact that there are two possible interpretations of the cube root
as a real number (in fact of any odd root), and produce correct
integrals for both interpretations (it is shown that it is possible
for the functiona form of the integral to change with the
interpretation). Finally, all simplifications, especially of complex
quantities, must follow correct rules, what are here derived using the
concept of the unwinding number."
}
\end{chunk}
\index{Jeffrey, D.J.}
\index{Corless, R.M.}
\begin{chunk}{axiom.bib}
@misc{Jeffxxb,
author = "Jeffrey, D.J. and Corless, R.M.",
title = {Explorations of uses of the unwinding number $\Kappa$},
}
\end{chunk}
\index{Ager, Tryg A.}
\index{Ravaglia, R.A.}
\index{Dooley, Sam}
\begin{chunk}{axiom.bib}
@misc{Ager88,
author = "Ager, Tryg A. and Ravaglia, R.A. and Dooley, Sam",
title = "Representation of Inference in Computer Algebra Systems with
Applications to Intelligent Tutoring",
year = "1988",
abstract =
"Presently computer algebra systems share with calculators the
property that a sequence of computations is not a unified
computational sequence, thereby allowing fallacies to occur. We argue
that if computer algebra systems operate in a framework of strict
mathematical proof, fallacies are eliminated. We show that this is
possible in a working interactive system REQD. We explain why
computational algebra, done under the strict constraints of proof, is
relevant to uses of computer algebra systems in instruction."
}
\end{chunk}
\index{Davenport, James H.}
\index{Faure, Christ\'ele}
\begin{chunk}{axiom.bib}
@misc{Faurxx,
author = {Davenport, James and Faure, Christ\'ele},
title = "Parameters in Computer Algebra",
abstract =
"One of the main strengths of computer algebra is being able to solve
a family of problems with one computation. In order to express not
only one problem but a family of problems, one introduces some symbols
which are in fact the parameters common to all the problems of the family.
The user must be able to understand in which way these parameters
affect the result when he looks at the answer. This is not the case in
most current Computer Algebra Systems we know because the form of the
answer is never explicitly conditioned by the values of the
parameters. We have introduced multi-valued expressions called
{\sl conditional expressions}, in which each potential value is associated
with a condition on some parameters. This is used, in particular, to
capture the situation in integration, where the form of the answer can
depend on whether certain quantities are positive, negative, or zero.",
keywords = "axiomref, provisos"
}
\end{chunk}
\index{Norman, Arthur C.}
\begin{chunk}{axiom.bib}
@inproceedings{Norm90,
author = "Norman, Arthur C.",
title = "A Critical-Pair/Completion based Integration ALgorithm",
booktitle = "ISSAC 90",
pages = "201-205",
isbn = "0-201-54892-5",
abstract =
"In 1976 Risch [1] proposed a scheme for finding the integrals of
forms built up out of transcendental functions that viewed general
functions as rational forms in a suitable differential field and
represented the polynomial parts of those forms in a distributed
rather than recursive way. By using a data representation where all
variables were (more or less) equally important this new method seemed
to side-step some of the complications that had appeared in his
previous scheme [2] where various side-constraints had to be
propagated between the levels present in a tower of separate
extensions of differential fields, otherwise seen as levels in
recursive datastructures. An initial implementation of the method was
prepared in the context of the SCRATCHPAD/1 algebra system and
demonstrated at the 1976 SYMSAC meeting at Yorktown Heights, a
subsequent version for Reduce [3][5] came after that, and made it
possible to try the method on a large range of integrals. These
practical studies showed up some problems with the method and its
implementation. The presentation given here re-expresses the 1976
Risch method in terms of rewrite rules, and thus exposes the major
problem it suffers from as a manifestation of the fact that in certain
circumstances the set of rewrites generated is not confluent. This
difficulty is then attacked using a critical-pair/completion (CPC)
approach. For very many integrands it is then easy to see that the
initial set of rewrites used in the early implementations [1] and [3]
do not need any extension, and this fact explains the high level of
competence of the programs involved despite their shaky theoretical
foundations. For a further large collection of problems even a simple
CPC scheme converges rapidly; when the techniques presented here are
applied to the REDUCE integration test suite in all applicable cases a
short computation succeeds in completing the set of rewrites and hence
gives a secure basis for testing for integrability. This paper
describes the implementation of the CPC process and discusses current
limitations to and possible future extended applications of it.",
paper = "Norm90.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Davenport, J.H.}
\begin{chunk}{axiom.bib}
@misc{Davexx,
author = "Davenport, J.H.",
title = "Computer algebra -- past, present and future",
abstract =
"Computer algebra started in 1953, and there were several systems in
existence in the 1960's. Those inspired by physical applications
largely implemented ``high school'' algebra and, from the point of
view of today's much larger machines and more sophisticated
programming languages, the miracle is that they worked at all, or as
efficiently as they did.
By the end of the 1960's it was clear that more sophisticated
algorithms were necessary, either to solve problems for which the
``high school'' algorithms were inefficient on large data (e.g. gcd or
factorization), or problems for which the ``high school'' techniques
were not really algorithms at all (e.g. integration, solution of sets
of equations).
Hence the 1970's (and indeed much of the 1980's) were the ``age of
algorithms''. It rapidly became obvious that these algorithms required
more complex data structures and mathematical objects: finite fields,
ideals, algebraic curves, divisor class groups to name but a few. This
led to the growth of new systems, such as Axiom (formerly Scratchpad),
Maple, Mathematica and Reduce 3. It is currently the case that many
more algorithms are known than are implemented, and certainly that few
systems implement even a reasonable cross-section of the known
algorithms.
At the present, there are two main trends. One is the rush to
implement, which is causing a lot of duplication of work, but there is
also a realisation that these systems need to be able to communicate,
and that it is inherently impossible to have all the best algorithms
in one system. To take an example, why implement from scratch enough
group theory to analyse blocks of imprimitivity in a permutation
group, when Cayley has all this and much more? However, parts of
integration theory require this analysis.
The other trend is the tendency to more ``structure-oriented''
algorithms, i.e. algorithms which take accound of the structure of the
problem. To name two, there is Gatemann's work on polynomial equation
systems with symmetry, and Richardson's work on roots of polynomials
which can be written as $p(x,x^n)$ with $p$ of low degree.
The paper concludes with some speculations on the future of computer
algebra.",
paper = "Davexx.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Bronstein, Manuel}
\begin{chunk}{axiom.bib}
@misc{Bronxxa,
author = "Bronstein, Manuel",
title = "Symbolic Integration: towards Practical Algorithms",
abstract =
"After reviewing the Risch algorithm for the integration of elementary
functions and the underlying theory, we descrbe the successive
improvements in the field, and the current ``rational'' approach to
symbolic integration. We describe how a technique discovered by
Hermite a century ago can be efficiently applied to rational,
algebraic, elementary transcendental and mixed elementary functions."
}
\end{chunk}
\index{Temme, N.M.}
\begin{chunk}{axiom.bib}
@misc{Temmxx,
author = "Temme, N.M.",
title = "Uniform Asymptotic Expansions of Integrals",
abstract =
"The purpose of the paper is to give an account of several aspects of
uniform asymptotic expansions of integrals. We give examples of
standard forms, the role of critical points and methods to construct
the experiences."
}
\end{chunk}
\index{L\'opez, Jos\'e L.}
\begin{chunk}{axiom.bib}
@article{Lope99,
author = {L\'opez, Jos\'e L.},
title = "Asymptotic expansions of integrals: The term-by-term integration
method",
year = "1999",
journal = "Journal of Computational and Applied Mathematics",
volume = "102",
pages = "181-194",
abstract =
"The classical term-by-term integration technique used for obtaining
asymptotic expansions of integrals requires the integrand to have an
uniform asymptotic expansion in the integration variable. A
modification of this method is presented in which the uniformity
conditions provides the term-by-term integration technique a large
range of applicability. As a consequence of this generality, Watson's
lemma and the integration by parts technique applied to Laplace's and
a special family of Fourier's transforms become corollaries of the
term-by-term integration method."
paper = "Lope99.pdf"
}
\end{chunk}
\index{Nordsieck, Arnold}
\begin{chunk}{axiom.bib}
@article{Nord62,
author = "Nordsieck, Arnold",
title = "On Numerical Integration of Ordinary Differential Equations",
journal = "Mathematics of Computations",
volume = "XVI",
year = "1962",
pages = "22-49",
abstract =
"A reliable efficient general-purpose method for automatic digital
computer integration of systems of ordinary differential equations is
described. The method operates with the current values of the higher
derivatives of a polynomial approximating the solution. It is
thoroughly stable under all circumstances, incorporates automatic
starting and automatic choice and revision of elementary interval
size, approximately minimizes the amount of computation for a
specified accuracy of solution, and applies to any system of
differential equations with derivatives continuous or piecewise
continuous with finite jumps. ILLIAC library subroutine F7, University
of Illinois Digital Computer Laboratory, is a digital computer program
applying this method."
}
\end{chunk}
\index{Faure, Christ\'ele}
\index{Davenport, James H.}
\index{Naciri, Hanane}
\begin{chunk}{axiom.bib}
@techreport{Faur00,
author = "Faure, Christele and Davenport, James H. and Naciri, Hanane",
title = Multi-Valued Computer Algebra",
year = "2000",
type = "technical report",
institution = "INRIA CAFE",
number = "4001",
abstract =
"One of the main strengths of computer algebra is being able to solve
a family of problems with one computation. In order to express not
only one problem but a family of problems, one introduces some symbols
which are in fact the parameters common to all the problems of the
family. The user must be able to understand in which way these
parameters affect the result when he looks at the answer. Otherwise it
may lead to completely wrong calculations, which when used for
numerical applications bring nonsensical answers. This is the case in
most current Computer Algebra Systems we know because the form of the
answer is never explicitly conditioned by the values of the
parameters. The user is not even informed that the given answer may be
wrong in some cases then computer algebra systems can not be entirely
trustworthy. We have introduced multi-valued expressions called
conditional expressions, in which each potential value is associated
with a condition on some parameters. This is used, in particular, to
capture the situation in integration, where the form of the answer can
depend on whether certain quantities are positive, negative or
zero. We show that it is also necessary when solving modular linear
equations or deducing congruence conditions from complex expressions."
paper = "Faur00.pdf"
\end{chunk}
---
books/bookvolbib.pamphlet | 521 ++++++++++++++++++++++++++++++++++++++++-
changelog | 2 +
patch | 472 ++++++++++++++++++++++++++++++++++++-
src/axiom-website/patches.html | 2 +
4 files changed, 985 insertions(+), 12 deletions(-)
diff --git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 45fbc91..0f84aa9 100644
--- a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ -1887,7 +1887,7 @@ paper = "Brea89.pdf"
\index{Howe, Douglas J.}
\begin{chunk}{axiom.bib}
-@techreprot{Howe87,
+@techreport{Howe87,
author = "Howe, Douglas J.",
title = "The Computational Behaviour of Girard's Paradox",
institution = "Cornell University",
@@ -4911,6 +4911,40 @@ when shown in factored form.
\index{Fateman, Richard}
\begin{chunk}{axiom.bib}
+@inproceedings{Fate92a,
+ author = "Fateman, Richard",
+ title = "Honest Plotting, Global Extrema, and Interval Arithmetic",
+ booktitle = "ISSAC 92",
+ year = "1992",
+ pages = "216-223",
+ isbn = "0-89791-489-9",
+ abstract =
+ "A computer program to honestly plot curves y = f(x) must locate
+ maxima and minima in the domain of the graph. To do so it may have to
+ solve a classic problem in computation – global optimization.
+ Reducing an easy problem to a hard one is usually not an ad- vantage,
+ but in fact there is a route to solving both problems if the function
+ can be evaluated using interval arithmetic. Since some computer
+ algebra systems supply a version of interval arithmetic, it seems we
+ have the ingredients for a solution.
+
+ In this paper we address a particular problem how to compute and
+ display ``honest'' graphs of 2-D mathematical curves. By
+ ``honest'' we mean that no significant features (such as the
+ location of poles, the values at maxima or minima, or the behavior
+ of a curve at asymptotes) are misrepresented, By “mathematical” we
+ mean curves like those generally needed in scientific disciplines
+ where functions are represented by composi- tion of common
+ mathematical operations: rational operations ($+, –, *, /$),
+ exponential and log, trigonometric functions as well as continuous
+ and differentiable functions from applied mathematics.",
+ paper = "Fate92a.pdf"
+}
+
+\end{chunk}
+
+\index{Fateman, Richard}
+\begin{chunk}{axiom.bib}
@misc{Fate00b,
author = "Fateman, Richard",
title = "The (finite field) Fast Fourier Transform",
@@ -5768,6 +5802,29 @@ when shown in factored form.
\end{chunk}
+\index{Einwohner, T.}
+\index{Fateman, Richard J.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Einw95,
+ author = "Einwohner, T. and Fateman, Richard J.",
+ title = "Searching Techniques for Integral Tables",
+ booktitle = "ISSAC 95",
+ year = "1995",
+ pages = "133-139",
+ abstract =
+ "We describe the design of data structures and a computer program for
+ storing a table of symbolic indefinite or definite integrals and
+ retrieving user-requested integrals on demand. Typical times are so
+ short that a preliminary look-up attempt prior to any algorithmic
+ integration approach seems justified. In one such test for a table
+ with around 700 entries, matches were found requiring an average of
+ 2.8 milliseconds per request, on a Hewlett Packard 9000/712
+ workstation.",
+ paper = "Eiwn95.pdf"
+}
+
+\end{chunk}
+
\index{Kaltofen, Erich}
\index{Morozov, Dmitriy}
\index{Yuhasz, George}
@@ -8438,6 +8495,59 @@ when shown in factored form.
\section{Special Functions} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Baddoura, Jamil}
+\begin{chunk}{axiom.bib}
+@inproceedings{Badd94,
+ author = "Baddoura, Jamil",
+ title = "A Conjecture On Integration in Finite Terms with Elementary
+ Functions and Polylogarithms",
+ booktitle = "ISSAC 94",
+ year = "1994",
+ pages = "158-162",
+ isbn = "0-89791-638-7",
+ abstract =
+ "In this abstract, we report on a conjecture that gives the form of an
+ integral if it can be expressed using elementary functions and
+ polylogarithms. The conjecture is proved by the author in the cases of
+ the dilogarithm and the trilogarithm [3] and consists of a
+ generalization of Liouville's theorem on integration in finite terms
+ with elementary functions. Those last structure theorems, for the
+ dilogarithm and the trilogarithm, are the first case of structure
+ theorems where logarithms can appear with non-constant
+ coefficients. In order to prove the conjecture for higher
+ polylogarithms we need to find the functional identities, for the
+ polylogarithms that we are using, that characterize all the possible
+ algebraic relations among the considered polylogarithms of functions
+ that are built up from the rational functions by taking the considered
+ polylogarithms, exponentials, logarithms and algebraics. The task of
+ finding those functional identities seems to be a difficult one and is
+ an unsolved problem for the most part to this date.",
+ paper = "Badd94.pdf",
+}
+
+\end{chunk}
+
+\index{Bertrand, Laurent}
+\begin{chunk}{axiom.bib}
+@inproceedings{Bert94,
+ author = "Bertrand, Laurent",
+ title = "On the Implementation of a new Algorithm for the Computation
+ of Hyperelliptic Integrals",
+ booktitle = "ISSAC 94",
+ isbn = "0-89791-638-7",
+ pages = "211-215",
+ year = "1994",
+ abstract =
+ "In this paper, we present an implementation in Maple of a new
+ aJgorithm for the algebraic function integration problem in the
+ particular case of hyperelliptic integrals. This algorithm is based
+ on the general algorithm of Trager [9] and on the arithmetic in the
+ Jacobian of hyperelliptic curves of Cantor [2].",
+ paper = "Bert94.pdf"
+}
+
+\end{chunk}
+
\index{Corless, Robert M.}
\index{Jeffrey, David J.}
\index{Watt, Stephen M.}
@@ -8873,6 +8983,28 @@ Martin, U.
\end{chunk}
+\index{Ager, Tryg A.}
+\index{Ravaglia, R.A.}
+\index{Dooley, Sam}
+\begin{chunk}{axiom.bib}
+@misc{Ager88,
+ author = "Ager, Tryg A. and Ravaglia, R.A. and Dooley, Sam",
+ title = "Representation of Inference in Computer Algebra Systems with
+ Applications to Intelligent Tutoring",
+ year = "1988",
+ abstract =
+ "Presently computer algebra systems share with calculators the
+ property that a sequence of computations is not a unified
+ computational sequence, thereby allowing fallacies to occur. We argue
+ that if computer algebra systems operate in a framework of strict
+ mathematical proof, fallacies are eliminated. We show that this is
+ possible in a working interactive system REQD. We explain why
+ computational algebra, done under the strict constraints of proof, is
+ relevant to uses of computer algebra systems in instruction."
+}
+
+\end{chunk}
+
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@misc{Avig14,
@@ -12717,6 +12849,32 @@ in Lecture Notes in Computer Science, Springer ISBN 978-3-540-85520-0
\end{chunk}
+\index{Nordsieck, Arnold}
+\begin{chunk}{axiom.bib}
+@article{Nord62,
+ author = "Nordsieck, Arnold",
+ title = "On Numerical Integration of Ordinary Differential Equations",
+ journal = "Mathematics of Computations",
+ volume = "XVI",
+ year = "1962",
+ pages = "22-49",
+ abstract =
+ "A reliable efficient general-purpose method for automatic digital
+ computer integration of systems of ordinary differential equations is
+ described. The method operates with the current values of the higher
+ derivatives of a polynomial approximating the solution. It is
+ thoroughly stable under all circumstances, incorporates automatic
+ starting and automatic choice and revision of elementary interval
+ size, approximately minimizes the amount of computation for a
+ specified accuracy of solution, and applies to any system of
+ differential equations with derivatives continuous or piecewise
+ continuous with finite jumps. ILLIAC library subroutine F7, University
+ of Illinois Digital Computer Laboratory, is a digital computer program
+ applying this method."
+}
+
+\end{chunk}
+
\section{Advanced Documentation} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Bostock, Mike}
@@ -13354,6 +13512,22 @@ J. Symbolic COmputations 36 pp 855-889
\section{Integration} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Bronstein, Manuel}
+\begin{chunk}{axiom.bib}
+@misc{Bronxxa,
+ author = "Bronstein, Manuel",
+ title = "Symbolic Integration: towards Practical Algorithms",
+ abstract =
+ "After reviewing the Risch algorithm for the integration of elementary
+ functions and the underlying theory, we descrbe the successive
+ improvements in the field, and the current ``rational'' approach to
+ symbolic integration. We describe how a technique discovered by
+ Hermite a century ago can be efficiently applied to rational,
+ algebraic, elementary transcendental and mixed elementary functions."
+}
+
+\end{chunk}
+
\index{Kaltofen, Erich}
\begin{chunk}{axiom.bib}
@TechReport{Kalt84b,
@@ -14284,6 +14458,31 @@ Vol 13(2) pp 93-118, (1835)
\end{chunk}
+\index{L\'opez, Jos\'e L.}
+\begin{chunk}{axiom.bib}
+@article{Lope99,
+ author = {L\'opez, Jos\'e L.},
+ title = "Asymptotic expansions of integrals: The term-by-term integration
+ method",
+ year = "1999",
+ journal = "Journal of Computational and Applied Mathematics",
+ volume = "102",
+ pages = "181-194",
+ abstract =
+ "The classical term-by-term integration technique used for obtaining
+ asymptotic expansions of integrals requires the integrand to have an
+ uniform asymptotic expansion in the integration variable. A
+ modification of this method is presented in which the uniformity
+ conditions provides the term-by-term integration technique a large
+ range of applicability. As a consequence of this generality, Watson's
+ lemma and the integration by parts technique applied to Laplace's and
+ a special family of Fourier's transforms become corollaries of the
+ term-by-term integration method.",
+ paper = "Lope99.pdf"
+}
+
+\end{chunk}
+
\index{Marchisotto, Elena Anne}
\index{Zakeri, Gholem-All}
\begin{chunk}{ignore}
@@ -14351,6 +14550,17 @@ CACM Aug 1971 Vol 14 No 8 pp548-560
\end{chunk}
+\index{Ng, Edward W.}
+\begin{chunk}{axiom.bib}
+@techreport{Ngxx77,
+ author = "Ng, Edward W.",
+ title = "Observations on Approximate Integrations",
+ year = "1977",
+ paper = "Ngxx77.pdf"
+}
+
+\end{chunk}
+
\index{Norman, Arthur C.}
\index{Davenport, James H.}
\begin{chunk}{ignore}
@@ -14371,6 +14581,59 @@ CACM Aug 1971 Vol 14 No 8 pp548-560
\end{chunk}
+\index{Norman, Arthur C.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Norm90,
+ author = "Norman, Arthur C.",
+ title = "A Critical-Pair/Completion based Integration ALgorithm",
+ booktitle = "ISSAC 90",
+ pages = "201-205",
+ isbn = "0-201-54892-5",
+ abstract =
+ "In 1976 Risch [1] proposed a scheme for finding the integrals of
+ forms built up out of transcendental functions that viewed general
+ functions as rational forms in a suitable differential field and
+ represented the polynomial parts of those forms in a distributed
+ rather than recursive way. By using a data representation where all
+ variables were (more or less) equally important this new method seemed
+ to side-step some of the complications that had appeared in his
+ previous scheme [2] where various side-constraints had to be
+ propagated between the levels present in a tower of separate
+ extensions of differential fields, otherwise seen as levels in
+ recursive data structures.
+
+ An initial implementation of the method was
+ prepared in the context of the SCRATCHPAD/1 algebra system and
+ demonstrated at the 1976 SYMSAC meeting at Yorktown Heights, a
+ subsequent version for Reduce [3][5] came after that, and made it
+ possible to try the method on a large range of integrals. These
+ practical studies showed up some problems with the method and its
+ implementation.
+
+ The presentation given here re-expresses the 1976
+ Risch method in terms of rewrite rules, and thus exposes the major
+ problem it suffers from as a manifestation of the fact that in certain
+ circumstances the set of rewrites generated is not confluent. This
+ difficulty is then attacked using a critical-pair/completion (CPC)
+ approach. For very many integrands it is then easy to see that the
+ initial set of rewrites used in the early implementations [1] and [3]
+ do not need any extension, and this fact explains the high level of
+ competence of the programs involved despite their shaky theoretical
+ foundations. For a further large collection of problems even a simple
+ CPC scheme converges rapidly; when the techniques presented here are
+ applied to the REDUCE integration test suite in all applicable cases a
+ short computation succeeds in completing the set of rewrites and hence
+ gives a secure basis for testing for integrability.
+
+ This paper describes the implementation of the CPC process and
+ discusses current limitations to and possible future extended
+ applications of it.",
+ paper = "Norm90.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Ostrowski, A.}
\begin{chunk}{ignore}
\bibitem[Ostrowski 46]{Ost46} Ostrowski, A.
@@ -14790,6 +15053,49 @@ Ph.D Diss. MIT, May 1961; also Computers and Thought, Feigenbaum and Feldman.
\end{chunk}
+\index{Temme, N.M.}
+\begin{chunk}{axiom.bib}
+@misc{Temmxx,
+ author = "Temme, N.M.",
+ title = "Uniform Asymptotic Expansions of Integrals",
+ abstract =
+ "The purpose of the paper is to give an account of several aspects of
+ uniform asymptotic expansions of integrals. We give examples of
+ standard forms, the role of critical points and methods to construct
+ the experiences."
+}
+
+\end{chunk}
+
+\index{Temme, N.M.}
+\begin{chunk}{axiom.bib}
+@article{Temm95,
+ author = "Temme, N.M.",
+ title = "Uniform asymptotic expansions of integrals: a selection of
+ problems",
+ journal = "Journal of Computational and Applied Mathematics",
+ volume = "65",
+ number = "1-3",
+ year = "1995",
+ pages = "395-417",
+ abstract =
+ "On the occasion of the conference we mention examples of Stieltjes'
+ work on asymptotics of special functions. The remaining part of the
+ paper gives a selection of asymptotic methods for integrals, in
+ particular on uniform approximations. We discuss several “standard”
+ problems and examples, in which known special functions (error
+ functions, Airy functions, Bessel functions, etc.) are needed to
+ construct uniform approximations. Finally, we discuss the recent
+ interest and new insights in the Stokes phenomenon. An extensive
+ bibliography on uniform asymptotic methods for integrals is given,
+ together with references to recent papers on the Stokes phenomenon for
+ integrals and related topics.",
+ paper = "Temm95.pdf"
+
+}
+
+\end{chunk}
+
\index{Terelius, Bjorn}
\begin{chunk}{axiom.bib}
@mastersthesis{Tere09,
@@ -20655,6 +20961,59 @@ Proc ISSAC 97 pp172-175 (1997)
\section{To Be Classified} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Davenport, J.H.}
+\begin{chunk}{axiom.bib}
+@misc{Davexx,
+ author = "Davenport, J.H.",
+ title = "Computer algebra -- past, present and future",
+ abstract =
+ "Computer algebra started in 1953, and there were several systems in
+ existence in the 1960's. Those inspired by physical applications
+ largely implemented ``high school'' algebra and, from the point of
+ view of today's much larger machines and more sophisticated
+ programming languages, the miracle is that they worked at all, or as
+ efficiently as they did.
+
+ By the end of the 1960's it was clear that more sophisticated
+ algorithms were necessary, either to solve problems for which the
+ ``high school'' algorithms were inefficient on large data (e.g. gcd or
+ factorization), or problems for which the ``high school'' techniques
+ were not really algorithms at all (e.g. integration, solution of sets
+ of equations).
+
+ Hence the 1970's (and indeed much of the 1980's) were the ``age of
+ algorithms''. It rapidly became obvious that these algorithms required
+ more complex data structures and mathematical objects: finite fields,
+ ideals, algebraic curves, divisor class groups to name but a few. This
+ led to the growth of new systems, such as Axiom (formerly Scratchpad),
+ Maple, Mathematica and Reduce 3. It is currently the case that many
+ more algorithms are known than are implemented, and certainly that few
+ systems implement even a reasonable cross-section of the known
+ algorithms.
+
+ At the present, there are two main trends. One is the rush to
+ implement, which is causing a lot of duplication of work, but there is
+ also a realisation that these systems need to be able to communicate,
+ and that it is inherently impossible to have all the best algorithms
+ in one system. To take an example, why implement from scratch enough
+ group theory to analyse blocks of imprimitivity in a permutation
+ group, when Cayley has all this and much more? However, parts of
+ integration theory require this analysis.
+
+ The other trend is the tendency to more ``structure-oriented''
+ algorithms, i.e. algorithms which take accound of the structure of the
+ problem. To name two, there is Gatemann's work on polynomial equation
+ systems with symmetry, and Richardson's work on roots of polynomials
+ which can be written as $p(x,x^n)$ with $p$ of low degree.
+
+ The paper concludes with some speculations on the future of computer
+ algebra.",
+ paper = "Davexx.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Fateman, Richard J.}
\begin{chunk}{axiom.bib}
@phdthesis{Fate72,
@@ -21262,6 +21621,64 @@ Proc ISSAC 97 pp172-175 (1997)
\end{chunk}
+\index{Jeffrey, David}
+\begin{chunk}{axiom.bib}
+@misc{Jeffxx,
+ author = "Jeffrey, David",
+ title = "Real Integration on Domains of Maximum Extent",
+ abstract =
+ "General purpose computer algebra systems are used by people with
+ widely varying backgrounds. Amongst the many difficulties that face
+ the developer because of this, one that is particularly relevant to
+ the subject of this talk is the fact that different users attach
+ different meanings, or definitions, to the same symbols. When a user
+ asks a CAS to integrate a function, it is not clear which definition
+ of integral should be used. Some of the disagreements over the
+ ``correct'' value of an integral reduce to the fact that the different
+ parties are using different defintions. This talk therefore starts by
+ defining my version of integration. According to this definition,
+ functions returned as integrals should not only differentiate to the
+ function supplied by the user, they should also satisfy global
+ continuity properties. In order to achieve these properties, the idea
+ of a rectifying transform is intrduced. For the problem of integrating
+ a rational trigonometric function, a new rectifying transform is
+ described."
+}
+
+\end{chunk}
+
+\index{Jeffrey, D. J.}
+\begin{chunk}{axiom.bib}
+@misc{Jeffxxa,
+ author = "Jeffrey, D. J.",
+ title = "The Integration of Functions Containing Fractional Powers",
+ abstract =
+ "An algorithm is developed for integrating functions that contain
+ fractional powers. The algorithm addresses the following points. The
+ integral must be valid for all possible values of the variable,
+ including those values of the variable that make the integrand, and
+ hence the integral, take complex values. The algorithm must allow for
+ the fact that there are two possible interpretations of the cube root
+ as a real number (in fact of any odd root), and produce correct
+ integrals for both interpretations (it is shown that it is possible
+ for the functiona form of the integral to change with the
+ interpretation). Finally, all simplifications, especially of complex
+ quantities, must follow correct rules, what are here derived using the
+ concept of the unwinding number."
+}
+
+\end{chunk}
+
+\index{Jeffrey, D.J.}
+\index{Corless, R.M.}
+\begin{chunk}{axiom.bib}
+@misc{Jeffxxb,
+ author = "Jeffrey, D.J. and Corless, R.M.",
+ title = {Explorations of uses of the unwinding number $\Kappa$},
+}
+
+\end{chunk}
+
\index{Kohlhase, Michael}
\begin{chunk}{axiom.bib}
@misc{Kohl08,
@@ -27258,13 +27675,50 @@ December 1992.
\end{chunk}
+\index{Faure, Christ\'ele}
+\index{Davenport, James H.}
+\index{Naciri, Hanane}
+\begin{chunk}{axiom.bib}
+@techreport{Faur00,
+ author = "Faure, Christele and Davenport, James H. and Naciri, Hanane",
+ title = "Multi-Valued Computer Algebra",
+ year = "2000",
+ type = "technical report",
+ institution = "INRIA CAFE",
+ number = "4001",
+ abstract =
+ "One of the main strengths of computer algebra is being able to solve
+ a family of problems with one computation. In order to express not
+ only one problem but a family of problems, one introduces some symbols
+ which are in fact the parameters common to all the problems of the
+ family. The user must be able to understand in which way these
+ parameters affect the result when he looks at the answer. Otherwise it
+ may lead to completely wrong calculations, which when used for
+ numerical applications bring nonsensical answers. This is the case in
+ most current Computer Algebra Systems we know because the form of the
+ answer is never explicitly conditioned by the values of the
+ parameters. The user is not even informed that the given answer may be
+ wrong in some cases then computer algebra systems can not be entirely
+ trustworthy. We have introduced multi-valued expressions called
+ conditional expressions, in which each potential value is associated
+ with a condition on some parameters. This is used, in particular, to
+ capture the situation in integration, where the form of the answer can
+ depend on whether certain quantities are positive, negative or
+ zero. We show that it is also necessary when solving modular linear
+ equations or deducing congruence conditions from complex expressions.",
+ paper = "Faur00.pdf"
+}
+
+\end{chunk}
+
\index{Davenport, James H.}
\index{Faure, Christ\'ele}
\begin{chunk}{axiom.bib}
-@misc{Davexx,
- author = {Davenport, James; Faure, Christ\'ele},
+@misc{Dave94,
+ author = {Davenport, James and Faure, Christ\'ele},
title = "The Unknown in Computer Algebra",
link = "\url{http://axiom-wiki.newsynthesis.org/public/refs/TheUnknownInComputerAlgebra.pdf}",
+ year = "1994",
abstract = "
Computer algebra systems have to deal with the confusion between
``programming variables'' and ``mathematical symbols''. We claim that
@@ -27272,7 +27726,7 @@ December 1992.
are unknown, but whose type is known. For examples $x^p \ne x$ if $x$
is a symbol, but $x^p = x$ if $x \in GF(p)$. We show how we have
extended Axiom to deal with this concept.",
- paper = "Davexx.pdf",
+ paper = "Dave94.pdf",
keywords = "axiomref"
}
@@ -29223,12 +29677,29 @@ TPHOLS 2001, Edinburgh
\end{chunk}
-\index{Faure, Christ\'ele}
\index{Davenport, James H.}
-\begin{chunk}{ignore}
-\bibitem[Faure 00a]{FDN00a} Faure, Christ\'ele; Davenport, James
+\index{Faure, Christ\'ele}
+\begin{chunk}{axiom.bib}
+@misc{Faurxx,
+ author = {Davenport, James and Faure, Christ\'ele},
title = "Parameters in Computer Algebra",
- keywords = "axiomref"
+ abstract =
+ "One of the main strengths of computer algebra is being able to solve
+ a family of problems with one computation. In order to express not
+ only one problem but a family of problems, one introduces some symbols
+ which are in fact the parameters common to all the problems of the family.
+
+ The user must be able to understand in which way these parameters
+ affect the result when he looks at the answer. This is not the case in
+ most current Computer Algebra Systems we know because the form of the
+ answer is never explicitly conditioned by the values of the
+ parameters. We have introduced multi-valued expressions called
+ {\sl conditional expressions}, in which each potential value is associated
+ with a condition on some parameters. This is used, in particular, to
+ capture the situation in integration, where the form of the answer can
+ depend on whether certain quantities are positive, negative, or zero.",
+ keywords = "axiomref, provisos"
+}
\end{chunk}
@@ -30106,6 +30577,16 @@ IMACS Symposium SC-1993
\index{G{\'o}mez-D{\'\i}az, Teresa}
\begin{chunk}{axiom.bib}
+@misc{Gome94,
+ author = "Gomez-Diaz, Teresa",
+ title = "The Possible Solutions to the Control Problem",
+ year = "1994"
+}
+
+\end{chunk}
+
+\index{G{\'o}mez-D{\'\i}az, Teresa}
+\begin{chunk}{axiom.bib}
@article{Gome96,
author = "Gomez-Diaz, Theresa",
title = "Examples of using dynamic constructible closure",
@@ -44762,10 +45243,28 @@ Proc. AMS Vol 117 No 4 April 1993
\index{Richardson, Dan}
\index{Fitch, John P.}
-\begin{chunk}{ignore}
-\bibitem[Richardson 94]{RF94} Richardson, Dan; Fitch, John
+\begin{chunk}{axiom.bib}
+@inproceedings{Rich94,
+ author = "Richardson, Dan and Fitch, John P.",
title = "The identity problem for elementary functions and constants",
-ACM Proc. of ISSAC 94 pp285-290 ISBN 0-89791-638-7
+ booktitle = "ACM Proc. of ISSAC 94",
+ pages = "285-290",
+ isbn = "0-89791-638-7",
+ year = "1994",
+ abstract =
+ "A solution for a version of the identify problem is proposed for a
+ class of functions including the elementary functions. Given f(x),
+ g(x), defined at some point $\beta$ we decide whether or not
+ $f(x) \equiv g(x)$
+ in some neighbourhood of $\beta$. This problem is first reduced to a
+ problem about zero equivalence of elementary constants. Then a semi
+ algorithm is given to solve the elementary constant problem. This semi
+ algorithm is guaranteed to give the correct answer whenever it
+ terminates, and it terminates unless the problem being considered
+ contains a counterexample to Schanuel's conjecture.",
+ paper = "Rich94.pdf"
+
+}
\end{chunk}
diff --git a/changelog b/changelog
index c972f2a..079f2ec 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,5 @@
+20170817 tpd src/axiom-website/patches.html 20170817.01.tpd.patch
+20170617 tpd books/bookvolbib Add references
20170816 tpd src/axiom-website/patches.html 20170816.01.tpd.patch
20170816 tpd goals -- a newly added file to explain current goals
20170814 tpd src/axiom-website/patches.html 20170814.04.tpd.patch
diff --git a/patch b/patch
index 53691db..dbd24af 100644
--- a/patch
+++ b/patch
@@ -1,2 +1,472 @@
-goals -- a newly added file to explain current goals
+books/bookvolbib Add references
+Goal: Axiom references
+
+\index{Richardson, Dan}
+\index{Fitch, John P.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Rich94,
+ author = "Richardson, Dan and Fitch, John P.",
+ title = "The identity problem for elementary functions and constants",
+ booktitle = "ACM Proc. of ISSAC 94",
+ pages = "285-290",
+ isbn = "0-89791-638-7",
+ year = "1994",
+ abstract =
+ "A solution for a version of the identify problem is proposed for a
+ class of functions including the elementary functions. Given f(x),
+ g(x), defined at some point $\beta$ we decide whether or not
+ $f(x) \equiv g(x)$
+ in some neighbourhood of $\beta$. This problem is first reduced to a
+ problem about zero equivalence of elementary constants. Then a semi
+ algorithm is given to solve the elementary constant problem. This semi
+ algorithm is guaranteed to give the correct answer whenever it
+ terminates, and it terminates unless the problem being considered
+ contains a counterexample to Schanuel's conjecture.",
+ paper = "Rich94.pdf"
+
+}
+
+\end{chunk}
+
+\index{Bertrand, Laurent}
+\begin{chunk}{axiom.bib}
+@inproceedings{Bert94,
+ author = "Bertrand, Laurent",
+ title = "On the Implementation of a new Algorithm for the Computation
+ of Hyperelliptic Integrals",
+ booktitle = "ISSAC 94",
+ isbn = "0-89791-638-7",
+ pages = "211-215",
+ year = "1994",
+ abstract =
+ "In this paper, we present an implementation in Maple of a new
+ aJgorithm for the algebraic function integration problem in the
+ particular case of hyperelliptic integrals. This algo- rithm is based
+ on the general algorithm of Trager [9] and on the arithmetic in the
+ Jacobian of hyperelliptic curves of Cantor [2].",
+ paper = "Bert94.pdf"
+}
+
+\end{chunk}
+
+\index{Baddoura, Jamil}
+\begin{chunk}{axiom.bib}
+@inproceedings{Badd94,
+ author = "Baddoura, Jamil",
+ title = "A Conjecture On Integration in Finite Terms with Elementary
+ Functions and Polylogarithms",
+ booktitle = "ISSAC 94",
+ year = "1994",
+ pages = "158-162",
+ isbn = "0-89791-638-7",
+ abstract =
+ "In this abstract, we report on a conjecture that gives the form of an
+ integral if it can be expressed using elementary functions and
+ polylogarithms. The conjecture is proved by the author in the cases of
+ the dilogarithm and the trilogarithm [3] and consists of a
+ generalization of Liouville's theorem on integration in finite terms
+ with elementary functions. Those last structure theorems, for the
+ dilogarithm and the trilogarithm, are the first case of structure
+ theorems where logarithms can appear with non-constant
+ coefficients. In order to prove the conjecture for higher
+ polylogarithms we need to find the functional identities, for the
+ polylogarithms that we are using, that characterize all the possible
+ algebraic relations among the considered polylogarithms of functions
+ that are built up from the rational functions by taking the considered
+ polylogarithms, exponentials, logarithms and algebraics. The task of
+ finding those functional identities seems to be a difficult one and is
+ an unsolved problem for the most part to this date.",
+ paper = "Badd94.pdf",
+}
+
+\end{chunk}
+
+\index{Fateman, Richard}
+\begin{chunk}{axiom.bib}
+@inproceedings{Fate92a,
+ author = "Fateman, Richard",
+ title = "Honest Plotting, Global Extrema, and Interval Arithmetic",
+ booktitle = "ISSAC 92",
+ year = "1992",
+ pages = "216-223",
+ isbn = "0-89791-489-9",
+ abstract =
+ "A computer program to honestly plot curves y = f(x) must locate
+ maxima and minima in the domain of the graph. To do so it may have to
+ solve a classic problem in computation – global optimization.
+ Reducing an easy problem to a hard one is usually not an ad- vantage,
+ but in fact there is a route to solving both problems if the function
+ can be evaluated using interval arithmetic. Since some computer
+ algebra systems supply a version of interval arithmetic, it seems we
+ have the ingredients for a solution.
+
+ In this paper we address a particular problem how to compute and
+ display ``honest'' graphs of 2-D mathematical curves. By
+ ``honest'' we mean that no significant features (such as the
+ location of poles, the values at maxima or minima, or the behavior
+ of a curve at asymptotes) are misrepresented, By “mathematical” we
+ mean curves like those generally needed in scientific disciplines
+ where functions are represented by composi- tion of common
+ mathematical operations: rational operations ($+, –, *, /$),
+ exponential and log, trigonometric functions as well as continuous
+ and differentiable functions from applied mathematics.",
+ paper = "Fate92a.pdf"
+}
+
+\end{chunk}
+
+\index{Einwohner, T.}
+\index{Fateman, Richard J.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Einw95,
+ author = "Einwohner, T. and Fateman, Richard J.",
+ title = "Searching Techniques for Integral Tables",
+ booktitle = "ISSAC 95",
+ year = "1995",
+ pages = "133-139",
+ abstract =
+ "We describe the design of data structures and a computer program for
+ storing a table of symbolic indefinite or definite integrals and
+ retrieving user-requested integrals on demand. Typical times are so
+ short that a preliminary look-up attempt prior to any algorithmic
+ integration approach seems justified. In one such test for a table
+ with around 700 entries, matches were found requiring an average of
+ 2.8 milliseconds per request, on a Hewlett Packard 9000/712
+ workstation.",
+ paper = "Eiwn95.pdf"
+}
+
+\end{chunk}
+
+\index{G{\'o}mez-D{\'\i}az, Teresa}
+\begin{chunk}{axiom.bib}
+@misc{Gome94,
+ author = "Gomez-Diaz, Teresa",
+ title = "The Possible Solutions to the Control Problem",
+ year = "1994"
+}
+
+\end{chunk}
+
+\index{Jeffrey, David}
+\begin{chunk}{axiom.bib}
+@misc{Jeffxx,
+ author = "Jeffrey, David",
+ title = "Real Integration on Domains of Maximum Extent",
+ abstract =
+ "General purpose computer algebra systems are used by people with
+ widely varying backgrounds. Amongst the many difficulties that face
+ the developer because of this, one that is particularly relevant to
+ the subject of this talk is the fact that different users attach
+ different meanings, or definitions, to the same symbols. When a user
+ asks a CAS to integrate a function, it is not clear which definition
+ of integral should be used. Some of the disagreements over the
+ ``correct'' value of an integral reduce to the fact that the different
+ parties are using different defintions. This talk therefore starts by
+ defining my version of integration. According to this definition,
+ functions returned as integrals should not only differentiate to the
+ function supplied by the user, they should also satisfy global
+ continuity properties. In order to achieve these properties, the idea
+ of a rectifying transform is intrduced. For the problem of integrating
+ a rational trigonometric function, a new rectifying transform is
+ described."
+}
+
+\end{chunk}
+
+\index{Jeffrey, D. J.}
+\begin{chunk}{axiom.bib}
+@misc{Jeffxxa,
+ author = "Jeffrey, D. J.",
+ title = "The Integration of Functions Containing Fractional Powers",
+ abstract =
+ "An algorithm is developed for integrating functions that contain
+ fractional powers. The algorithm addresses the following points. The
+ integral must be valid for all possible values of the variable,
+ including those values of the variable that make the integrand, and
+ hence the integral, take complex values. The algorithm must allow for
+ the fact that there are two possible interpretations of the cube root
+ as a real number (in fact of any odd root), and produce correct
+ integrals for both interpretations (it is shown that it is possible
+ for the functiona form of the integral to change with the
+ interpretation). Finally, all simplifications, especially of complex
+ quantities, must follow correct rules, what are here derived using the
+ concept of the unwinding number."
+}
+
+\end{chunk}
+
+\index{Jeffrey, D.J.}
+\index{Corless, R.M.}
+\begin{chunk}{axiom.bib}
+@misc{Jeffxxb,
+ author = "Jeffrey, D.J. and Corless, R.M.",
+ title = {Explorations of uses of the unwinding number $\Kappa$},
+}
+
+\end{chunk}
+
+\index{Ager, Tryg A.}
+\index{Ravaglia, R.A.}
+\index{Dooley, Sam}
+\begin{chunk}{axiom.bib}
+@misc{Ager88,
+ author = "Ager, Tryg A. and Ravaglia, R.A. and Dooley, Sam",
+ title = "Representation of Inference in Computer Algebra Systems with
+ Applications to Intelligent Tutoring",
+ year = "1988",
+ abstract =
+ "Presently computer algebra systems share with calculators the
+ property that a sequence of computations is not a unified
+ computational sequence, thereby allowing fallacies to occur. We argue
+ that if computer algebra systems operate in a framework of strict
+ mathematical proof, fallacies are eliminated. We show that this is
+ possible in a working interactive system REQD. We explain why
+ computational algebra, done under the strict constraints of proof, is
+ relevant to uses of computer algebra systems in instruction."
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\index{Faure, Christ\'ele}
+\begin{chunk}{axiom.bib}
+@misc{Faurxx,
+ author = {Davenport, James and Faure, Christ\'ele},
+ title = "Parameters in Computer Algebra",
+ abstract =
+ "One of the main strengths of computer algebra is being able to solve
+ a family of problems with one computation. In order to express not
+ only one problem but a family of problems, one introduces some symbols
+ which are in fact the parameters common to all the problems of the family.
+
+ The user must be able to understand in which way these parameters
+ affect the result when he looks at the answer. This is not the case in
+ most current Computer Algebra Systems we know because the form of the
+ answer is never explicitly conditioned by the values of the
+ parameters. We have introduced multi-valued expressions called
+ {\sl conditional expressions}, in which each potential value is associated
+ with a condition on some parameters. This is used, in particular, to
+ capture the situation in integration, where the form of the answer can
+ depend on whether certain quantities are positive, negative, or zero.",
+ keywords = "axiomref, provisos"
+}
+
+\end{chunk}
+
+\index{Norman, Arthur C.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Norm90,
+ author = "Norman, Arthur C.",
+ title = "A Critical-Pair/Completion based Integration ALgorithm",
+ booktitle = "ISSAC 90",
+ pages = "201-205",
+ isbn = "0-201-54892-5",
+ abstract =
+ "In 1976 Risch [1] proposed a scheme for finding the integrals of
+ forms built up out of transcendental functions that viewed general
+ functions as rational forms in a suitable differential field and
+ represented the polynomial parts of those forms in a distributed
+ rather than recursive way. By using a data representation where all
+ variables were (more or less) equally important this new method seemed
+ to side-step some of the complications that had appeared in his
+ previous scheme [2] where various side-constraints had to be
+ propagated between the levels present in a tower of separate
+ extensions of differential fields, otherwise seen as levels in
+ recursive datastructures. An initial implementation of the method was
+ prepared in the context of the SCRATCHPAD/1 algebra system and
+ demonstrated at the 1976 SYMSAC meeting at Yorktown Heights, a
+ subsequent version for Reduce [3][5] came after that, and made it
+ possible to try the method on a large range of integrals. These
+ practical studies showed up some problems with the method and its
+ implementation. The presentation given here re-expresses the 1976
+ Risch method in terms of rewrite rules, and thus exposes the major
+ problem it suffers from as a manifestation of the fact that in certain
+ circumstances the set of rewrites generated is not confluent. This
+ difficulty is then attacked using a critical-pair/completion (CPC)
+ approach. For very many integrands it is then easy to see that the
+ initial set of rewrites used in the early implementations [1] and [3]
+ do not need any extension, and this fact explains the high level of
+ competence of the programs involved despite their shaky theoretical
+ foundations. For a further large collection of problems even a simple
+ CPC scheme converges rapidly; when the techniques presented here are
+ applied to the REDUCE integration test suite in all applicable cases a
+ short computation succeeds in completing the set of rewrites and hence
+ gives a secure basis for testing for integrability. This paper
+ describes the implementation of the CPC process and discusses current
+ limitations to and possible future extended applications of it.",
+ paper = "Norm90.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Davenport, J.H.}
+\begin{chunk}{axiom.bib}
+@misc{Davexx,
+ author = "Davenport, J.H.",
+ title = "Computer algebra -- past, present and future",
+ abstract =
+ "Computer algebra started in 1953, and there were several systems in
+ existence in the 1960's. Those inspired by physical applications
+ largely implemented ``high school'' algebra and, from the point of
+ view of today's much larger machines and more sophisticated
+ programming languages, the miracle is that they worked at all, or as
+ efficiently as they did.
+
+ By the end of the 1960's it was clear that more sophisticated
+ algorithms were necessary, either to solve problems for which the
+ ``high school'' algorithms were inefficient on large data (e.g. gcd or
+ factorization), or problems for which the ``high school'' techniques
+ were not really algorithms at all (e.g. integration, solution of sets
+ of equations).
+
+ Hence the 1970's (and indeed much of the 1980's) were the ``age of
+ algorithms''. It rapidly became obvious that these algorithms required
+ more complex data structures and mathematical objects: finite fields,
+ ideals, algebraic curves, divisor class groups to name but a few. This
+ led to the growth of new systems, such as Axiom (formerly Scratchpad),
+ Maple, Mathematica and Reduce 3. It is currently the case that many
+ more algorithms are known than are implemented, and certainly that few
+ systems implement even a reasonable cross-section of the known
+ algorithms.
+
+ At the present, there are two main trends. One is the rush to
+ implement, which is causing a lot of duplication of work, but there is
+ also a realisation that these systems need to be able to communicate,
+ and that it is inherently impossible to have all the best algorithms
+ in one system. To take an example, why implement from scratch enough
+ group theory to analyse blocks of imprimitivity in a permutation
+ group, when Cayley has all this and much more? However, parts of
+ integration theory require this analysis.
+
+ The other trend is the tendency to more ``structure-oriented''
+ algorithms, i.e. algorithms which take accound of the structure of the
+ problem. To name two, there is Gatemann's work on polynomial equation
+ systems with symmetry, and Richardson's work on roots of polynomials
+ which can be written as $p(x,x^n)$ with $p$ of low degree.
+
+ The paper concludes with some speculations on the future of computer
+ algebra.",
+ paper = "Davexx.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Bronstein, Manuel}
+\begin{chunk}{axiom.bib}
+@misc{Bronxxa,
+ author = "Bronstein, Manuel",
+ title = "Symbolic Integration: towards Practical Algorithms",
+ abstract =
+ "After reviewing the Risch algorithm for the integration of elementary
+ functions and the underlying theory, we descrbe the successive
+ improvements in the field, and the current ``rational'' approach to
+ symbolic integration. We describe how a technique discovered by
+ Hermite a century ago can be efficiently applied to rational,
+ algebraic, elementary transcendental and mixed elementary functions."
+}
+
+\end{chunk}
+
+\index{Temme, N.M.}
+\begin{chunk}{axiom.bib}
+@misc{Temmxx,
+ author = "Temme, N.M.",
+ title = "Uniform Asymptotic Expansions of Integrals",
+ abstract =
+ "The purpose of the paper is to give an account of several aspects of
+ uniform asymptotic expansions of integrals. We give examples of
+ standard forms, the role of critical points and methods to construct
+ the experiences."
+}
+
+\end{chunk}
+
+\index{L\'opez, Jos\'e L.}
+\begin{chunk}{axiom.bib}
+@article{Lope99,
+ author = {L\'opez, Jos\'e L.},
+ title = "Asymptotic expansions of integrals: The term-by-term integration
+ method",
+ year = "1999",
+ journal = "Journal of Computational and Applied Mathematics",
+ volume = "102",
+ pages = "181-194",
+ abstract =
+ "The classical term-by-term integration technique used for obtaining
+ asymptotic expansions of integrals requires the integrand to have an
+ uniform asymptotic expansion in the integration variable. A
+ modification of this method is presented in which the uniformity
+ conditions provides the term-by-term integration technique a large
+ range of applicability. As a consequence of this generality, Watson's
+ lemma and the integration by parts technique applied to Laplace's and
+ a special family of Fourier's transforms become corollaries of the
+ term-by-term integration method."
+ paper = "Lope99.pdf"
+}
+
+\end{chunk}
+
+\index{Nordsieck, Arnold}
+\begin{chunk}{axiom.bib}
+@article{Nord62,
+ author = "Nordsieck, Arnold",
+ title = "On Numerical Integration of Ordinary Differential Equations",
+ journal = "Mathematics of Computations",
+ volume = "XVI",
+ year = "1962",
+ pages = "22-49",
+ abstract =
+ "A reliable efficient general-purpose method for automatic digital
+ computer integration of systems of ordinary differential equations is
+ described. The method operates with the current values of the higher
+ derivatives of a polynomial approximating the solution. It is
+ thoroughly stable under all circumstances, incorporates automatic
+ starting and automatic choice and revision of elementary interval
+ size, approximately minimizes the amount of computation for a
+ specified accuracy of solution, and applies to any system of
+ differential equations with derivatives continuous or piecewise
+ continuous with finite jumps. ILLIAC library subroutine F7, University
+ of Illinois Digital Computer Laboratory, is a digital computer program
+ applying this method."
+}
+
+\end{chunk}
+
+\index{Faure, Christ\'ele}
+\index{Davenport, James H.}
+\index{Naciri, Hanane}
+\begin{chunk}{axiom.bib}
+@techreport{Faur00,
+ author = "Faure, Christele and Davenport, James H. and Naciri, Hanane",
+ title = Multi-Valued Computer Algebra",
+ year = "2000",
+ type = "technical report",
+ institution = "INRIA CAFE",
+ number = "4001",
+ abstract =
+ "One of the main strengths of computer algebra is being able to solve
+ a family of problems with one computation. In order to express not
+ only one problem but a family of problems, one introduces some symbols
+ which are in fact the parameters common to all the problems of the
+ family. The user must be able to understand in which way these
+ parameters affect the result when he looks at the answer. Otherwise it
+ may lead to completely wrong calculations, which when used for
+ numerical applications bring nonsensical answers. This is the case in
+ most current Computer Algebra Systems we know because the form of the
+ answer is never explicitly conditioned by the values of the
+ parameters. The user is not even informed that the given answer may be
+ wrong in some cases then computer algebra systems can not be entirely
+ trustworthy. We have introduced multi-valued expressions called
+ conditional expressions, in which each potential value is associated
+ with a condition on some parameters. This is used, in particular, to
+ capture the situation in integration, where the form of the answer can
+ depend on whether certain quantities are positive, negative or
+ zero. We show that it is also necessary when solving modular linear
+ equations or deducing congruence conditions from complex expressions."
+ paper = "Faur00.pdf"
+
+\end{chunk}
diff --git a/src/axiom-website/patches.html b/src/axiom-website/patches.html
index 05c5418..7cac3a0 100644
--- a/src/axiom-website/patches.html
+++ b/src/axiom-website/patches.html
@@ -5794,6 +5794,8 @@ books/bookvol13 review of Cardelli (Card85) paper

books/bookvolbib Axiom References in External Literature

20170816.01.tpd.patch
goals -- a newly added file to explain current goals

+20170817.01.tpd.patch
+books/bookvolbib Add references

--
1.9.1