From b9349c3e2db75815bba6aa89bf1c174384293469 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Wed, 23 Aug 2017 01:59:35 0400
Subject: [PATCH] books/bookvolbib Add proof references
Goal: Axiom references
\index{Melquiond, Guillaume}
\begin{chunk}{axiom.bib}
@article{Melq12,
author = "Melquiond, Guillaume",
title = "Floatingpoint arithmetic in the Coq system",
journal = "Information and Computation",
volume = "216",
pages = "1423",
year = "2012",
link = "\url{https://www.lri.fr/~melquion/doc/08mc8article.pdf}",
abstract =
"The process of proving some mathematical theorems can be greatly
reduced by relying on numericallyintensive computations with a
certified arithmetic. This article presents a formalization of
floatingpoint arithmetic that makes it possible to efficiently
compute inside the proofs of the Coq system. This certified library is
a multiradix and multiprecision implementation free from underflow
and overflow. It provides the basic arithmetic operators and a few
elementary functions.",
paper = "Melq12.pdf"
}
\end{chunk}
\index{Akers, Robert Lawrence}
\begin{chunk}{axiom.bib}
@phdthesis{Aker93,
author = "Akers, Robert Lawrence",
title = "Strong Static Type Checking for Functional Common Lisp",
school = "Univerity of Texas at Austin",
year = "1995",
link = "\url{ftp://www.cs.utexas.edu/pub/boyer/clireports/096.pdf}",
abstract =
"This thesis presents a type system which supports the strong static
type checking of programs developed in an applicative subset of the
Common Lisp language. The Lisp subset is augmented with a guard
construct for function definitions, which allows type restrictions to
be placed on the arguments. Guards support the analysis and safe use
of partial functions, like CAR, which are welldefined only for
arguments of a certain type.
A language of type descriptors characterizes the type
domain. Descriptors are composed into function signatures which
characterize the guard and which map various combinations of actual
parameter types to possible result types. From a base of signatures
for a small collection of primitive functions, the system infers
signatures for newly submitted functions.
The system includes a type inference algorithm which handles
constructs beyond the constraints of MLstyle systems. Most notable
are the free use of CONS to construct objects of undeclared type and
the use of IF forms whose two result components have unrelated types,
resulting in ad hoc polymorphism. Accordingly, the type descriptor
language accommodates disjunction, unrestricted CONS, recursive type
forms, and ad hoc polymorphic function signatures. As with traditional
type inference systems, unification is a central algorithm, but the
richness of our type language complicates many component algorithms,
including unification. Special treatment is given to recognizer
functions, which are predicates determining whether an object is of a
particular type. Type inference in this setting is undecidable, so the
algorithm is heuristic and is not complete.
The semantics of the system are in terms of a function which
determines whether values satisfy descriptors with respect to a
binding of type variables. The soundness of each signature emitted by
the system is validated by a signature checker, whose properties are
formally specified with respect to the formal semantics and proven to
hold. The checker algorithm is substantially simpler than the
inference algorithm, as it need not perform operations such as
discovering closed recursive forms. Thus, its proof is both more
straightforward to construct and easier to validate than a direct
proof of the inference algorithm would be.",
paper = "Aker93.pdf"
}
\end{chunk}
\index{Byrd, William}
\begin{chunk}{axiom.bib}
@misc{Byrd17,
author = "Byrd, William",
title = "The Most Beautiful Program Ever Written",
link = "\url{https://www.youtube.com/watch?v=OyfBQmvr2Hc}",
comment = "See miniKanren and Barliman (program synthesis with proof)"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@misc{Unkn13,
author = "Unknown",
title = "HindleyMilner Type Inference",
link = "\url{https://www7.in.tum.de/um/courses/seminar/sove/SS2013/final/hindleymilner.slides.pdf}",
paper = "Unkn13.pdf"
}
\end{chunk}

books/bookvolbib.pamphlet  99 ++++++
changelog  2 +
patch  731 +++++
src/axiomwebsite/patches.html  2 +
4 files changed, 177 insertions(+), 657 deletions()
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index aa5edc0..22ad8e5 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 9230,6 +9230,59 @@ Martin, U.
\end{chunk}
+\index{Akers, Robert Lawrence}
+\begin{chunk}{axiom.bib}
+@phdthesis{Aker93,
+ author = "Akers, Robert Lawrence",
+ title = "Strong Static Type Checking for Functional Common Lisp",
+ school = "Univerity of Texas at Austin",
+ year = "1995",
+ link = "\url{ftp://www.cs.utexas.edu/pub/boyer/clireports/096.pdf}",
+ abstract =
+ "This thesis presents a type system which supports the strong static
+ type checking of programs developed in an applicative subset of the
+ Common Lisp language. The Lisp subset is augmented with a guard
+ construct for function definitions, which allows type restrictions to
+ be placed on the arguments. Guards support the analysis and safe use
+ of partial functions, like CAR, which are welldefined only for
+ arguments of a certain type.
+
+ A language of type descriptors characterizes the type
+ domain. Descriptors are composed into function signatures which
+ characterize the guard and which map various combinations of actual
+ parameter types to possible result types. From a base of signatures
+ for a small collection of primitive functions, the system infers
+ signatures for newly submitted functions.
+
+ The system includes a type inference algorithm which handles
+ constructs beyond the constraints of MLstyle systems. Most notable
+ are the free use of CONS to construct objects of undeclared type and
+ the use of IF forms whose two result components have unrelated types,
+ resulting in ad hoc polymorphism. Accordingly, the type descriptor
+ language accommodates disjunction, unrestricted CONS, recursive type
+ forms, and ad hoc polymorphic function signatures. As with traditional
+ type inference systems, unification is a central algorithm, but the
+ richness of our type language complicates many component algorithms,
+ including unification. Special treatment is given to recognizer
+ functions, which are predicates determining whether an object is of a
+ particular type. Type inference in this setting is undecidable, so the
+ algorithm is heuristic and is not complete.
+
+ The semantics of the system are in terms of a function which
+ determines whether values satisfy descriptors with respect to a
+ binding of type variables. The soundness of each signature emitted by
+ the system is validated by a signature checker, whose properties are
+ formally specified with respect to the formal semantics and proven to
+ hold. The checker algorithm is substantially simpler than the
+ inference algorithm, as it need not perform operations such as
+ discovering closed recursive forms. Thus, its proof is both more
+ straightforward to construct and easier to validate than a direct
+ proof of the inference algorithm would be.",
+ paper = "Aker93.pdf"
+}
+
+\end{chunk}
+
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@misc{Avig14,
@@ 9935,6 +9988,17 @@ Martin, U.
\end{chunk}
+\index{Byrd, William}
+\begin{chunk}{axiom.bib}
+@misc{Byrd17,
+ author = "Byrd, William",
+ title = "The Most Beautiful Program Ever Written",
+ link = "\url{https://www.youtube.com/watch?v=OyfBQmvr2Hc}",
+ comment = "See miniKanren and Barliman (program synthesis with proof)"
+}
+
+\end{chunk}
+
\subsection{C} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Casteran, Pierre}
@@ 11676,6 +11740,30 @@ England, Matthew; Wilson, David
\end{chunk}
+\index{Melquiond, Guillaume}
+\begin{chunk}{axiom.bib}
+@article{Melq12,
+ author = "Melquiond, Guillaume",
+ title = "Floatingpoint arithmetic in the Coq system",
+ journal = "Information and Computation",
+ volume = "216",
+ pages = "1423",
+ year = "2012",
+ link = "\url{https://www.lri.fr/~melquion/doc/08mc8article.pdf}",
+ abstract =
+ "The process of proving some mathematical theorems can be greatly
+ reduced by relying on numericallyintensive computations with a
+ certified arithmetic. This article presents a formalization of
+ floatingpoint arithmetic that makes it possible to efficiently
+ compute inside the proofs of the Coq system. This certified library is
+ a multiradix and multiprecision implementation free from underflow
+ and overflow. It provides the basic arithmetic operators and a few
+ elementary functions.",
+ paper = "Melq12.pdf"
+}
+
+\end{chunk}
+
\index{Meshveliani, Sergei D.}
\begin{chunk}{axiom.bib}
@inproceedings{Mesh01,
@@ 40354,6 +40442,17 @@ IBM Manual, March 1988
\subsection{U} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{chunk}{axiom.bib}
+@misc{Unkn13,
+ author = "Unknown",
+ title = "HindleyMilner Type Inference",
+ link = "\url{https://www7.in.tum.de/um/courses/seminar/sove/SS2013/final/hindleymilner.slides.pdf}",
+ paper = "Unkn13.pdf"
+
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
@misc{Unkn16,
author = "Unknown",
title = "Computer Algebra Systems",
diff git a/changelog b/changelog
index e87c31d..76bd8ef 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20170823 tpd src/axiomwebsite/patches.html 20170823.01.tpd.patch
+20170823 tpd books/bookvolbib Add proof references
20170819 tpd src/axiomwebsite/patches.html 20170819.01.tpd.patch
20170819 tpd books/bookvolbib Add references
20170817 tpd src/axiomwebsite/patches.html 20170817.01.tpd.patch
diff git a/patch b/patch
index d5261aa..2d37cbd 100644
 a/patch
+++ b/patch
@@ 1,686 +1,103 @@
books/bookvolbib Add references
+books/bookvolbib Add proof references
Goal: Axiom references
\index{Stoutemyer, David R.}
+\index{Melquiond, Guillaume}
\begin{chunk}{axiom.bib}
@article{Stou79,
 author = "Stoutemyer, David R.",
 title = "LISP Based Symbolic Math Systems",
 journal = "Byte Magazine",
 volume = "8",
 pages = "176192",
 year = "1979",
 link = "\url{https://ia902603.us.archive.org/30/items/bytemagazine197908/1979_08_BYTE_0408_LISP.pdf}",
 comment =
 "SCRATCHPAD is a very large computeralgebra system implemented by the
 IBM Thomas J. Watson Research Center. It is available there on an IBM
 370, and it is available from other IBM corporate sites via
 telephone. Regrettably, this fine system has not yet been released to
 the public, but it is discussed here because of its novel features.

 In its entirety, the system occupies about 1600K bytes on an IBM 370
 with virtual storage, for which an additional minimum of 100 K bytes
 is recommeded for workspace. The variety of builtin transformations
 currently lies between that of REDUCE and MACSYMA. However, each of
 the three systems has features that none of the others possess, and
 one of these features may be a decisive advantage for a particular
 application. Here are some highlights of the SCRATCHPAD system:
 \begin{itemize}
 \item The system provides singleprecision floatingpoint arithmetic
 as well as indefiniteprecision rational arithmetic
 \item The builtin unavoidable and optional algebraic transformations
 are approximately similar to those of MACSYMA.
 \item The builtin exponential, logarithmic, and trigonometric
 transformations are approximately similar to those of REDUCE.
 \item Besides builtin symbolic matrix algebra, APL like array
 operations are included, and they are even further generalized to
 permit symbolic operations of nonhomogeneous arrays and on arrays
 of indefinite or infinite size.
 \item Symbolic differentiation and integration are builtin, with
 the latter employing the powerful RischNormal algorithm.
 \item There is a particularly elegant builtin facility for
 determining Taylor series expansions.
 \item There is a builtin SOLVE function capable of determining the
 exact solution to a system of linear equations.
 \item There is a powerful patternmatching facility which serves as
 the primary mechanism for user level extensions. The associated syntax
 is at a very high level, being the closest of all computer algebra
 systems to the declarative, nonprocedural notation of mathematics.
 To implement the trigonometric multipleangle expansions, we can
 merely enter the rewrite rules:
 \[cos(n*x) == 2*cos(x)*cos((n1)*x)cos((n2)*x), n{\rm\ in\ }
 (2,3,...), x{\rm\ arb}\]
 \[sin(n*x) == 2*cos(x)*sin((n1)*x)sin((n2)*x), n{\rm\ in\ }
 (2,3,...),x{\rm\ arb}\]
 Then, whenever we subsequently enter an expression such as
 $cos(4*b)$, the response will be a corresponding expanded
 expression such as
 \[8*cos(B)  8*cos^2(B)+1\]
 Thus, programs resemble a collection of math formulae, much as they
 would appear in a book or article.
 \item SCRATCHPAD has a particularly powerful yet easily used mechanism
 for controlling the output format of expressions.. For example, the user
 can specify that an expression be displayed as a power series in x,
 with coefficients which are factored rational functions in b and c,
 etc. For large expressions, such fine control over the output may
 mean the difference between an important new discovery and an
 inconprehensible mess.
 \end{itemize}

 This generalized recursive format idea is so natural and effective
 that SCRATCHPAD is now absorbing the idea into an internal
 representation. A study of the polynomial additional algorithm in
 the previous section reveals that it is written to be applicable
 to any coefficient domain which has the algebraic properties of a
 {\sl ring}. The coefficients could be matrices, powerseries, etc.
 That coefficient domain could in turn have yet another coefficient
 domain, and so on. With a careful modular design, packages to treat
 each of these domains can be dynamically linked together so that
 code can be shared and combined in new ways without extensive
 rewriting and duplication. Then not only the output, but also the
 internal computations can be selected most suitably for a particular
 application.

 For further information about SCRATCHPAD, contact Richard Jenks
 at the IBM Thomas J. Watson Research Center, Yorktown Heights, NY
 10598",
 paper = "Stou79.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Shackell, John}
\begin{chunk}{axiom.bib}
@article{Shac90,
 author = "Shackell, John",
 title = "Growth Estimates for ExpLog Functions",
 journal = "J. Symbolic Computation",
 volume = "10",
 pages = "611632",
 abstract =
 "Explog functions are those obtained from the constant 1 and the
 variable X by means of arithmetic operations and the function symbols
 exp() and log(). This paper gives an explicit algorithm for
 determining eventual dominance of these functions modulo an oracle for
 deciding zero equivalence of constant terms. This also provides
 another proof that the dominance problem for explog functions is
 Turingreducible to the identity problem for constant terms."
}

\end{chunk}

\index{Bronstein, Manuel}
\begin{chunk}{axiom.bib}
@article{Bron88,
 author = "Bronstein, Manuel",
 title = "The Transcendental Risch Differential Equation",
 journal = "J. Symbolic Computation",
 volume = "9",
 year = "1988",
 pages = "4960",
 abstract =
 "We present a new rational algorithm for solving Risch differential
 equations in towers of transcendental elementary extensions. In
 contrast to a recent algorithm of Davenport we do not require a
 progressive reduction of the denominators involved, but use weak
 normality to obtain a formula for the denominator of a possible
 solution. Implementation timings show this approach to be faster than
 a Hermitelike reduction.",
 paper = "Bron88.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Erd\'elyi, A.}
\begin{chunk}{axiom.bib}
@book{Erde56,
 author = {Erd\'elyi, A.},
 title = "Asymptotic Expansions",
 year = "1956",
 isbn = "9780486155050",
 publisher = "Dover Publications"
}

\end{chunk}

\index{Gradshteyn, I.S.}
\index{Ryzhik, I.M.}
\begin{chunk}{axiom.bib}
@incollection{Grad80,
 author = "Gradshteyn, I.S. and Ryzhik, I.M.",
 title = "Definite Integrals of Elementary Functions",
 booktitle = "Table of Integrals, Series, and Products",
 publisher = "Academic Press",
 year = "1980",
 comment = "Chapter 34"
}

\end{chunk}

\index{Piquette, J. C.}
\begin{chunk}{axiom.bib}
@article{Piqu89,
 author = "Piquette, J. C.",
 title = "Special Function Integration",
 journal = "ACM SIGSAM Bulletin",
 volume = "23",
 number = "2",
 year = "1989",
 pages = "1121",
 abstract =
 "This article describes a method by which the integration capabilities
 of symbolicmathematics computer programs can be extended to include
 integrals that contain special functions. A summary of the theory that
 forms the basis of the method is given in Appendix A. A few integrals
 that have been evaluated using the method are presented in Appendix
 B. A more thorough development and explanation of the method is given
 in Piquette, in review (b)."
}

\end{chunk}

\index{Clarkson, M.}
\begin{chunk}{axiom.bib}
@article{Clar89,
 author = "Clarkson, M.",
 title = "MACSYMA's inverse Laplace transform",
 journal = "ACM SIGSAM Bulletin",
 volume = "23",
 number = "1",
 year = "1989"
 pages = "3338",
 abstract =
 "The inverse Laplace transform capability of MACSYMA has been improved
 and extended. It has been extended to evaluate certain limits, sums,
 derivatives and integrals of Laplace transforms. It also takes
 advantage of the inverse Laplace transform convolution theorem, and
 can deal with a wider range of symbolic parameters.",
 paper = "Clar89.pdf"
}

\end{chunk}

\index{Schou, Wayne C.}
\index{Broughan, Kevin A.}
\begin{chunk}{axiom.bib}
@article{Scho89,
 author = "Schou, Wayne C. and Broughan, Kevin A.",
 title = "The Risch Algorithms of MACSYMA and SENAC",
 journal = "ACM SIGSAM",
 volume = "23",
 number = "3",
 year = "1989",
 abstract =
 "The purpose of this paper is to report on a computer implementation
 of the Risch algorithm for the symbolic integration of rational
 functions containing nested exponential and logarithms. For the class
 of transcendental functions, the Risch algorithm [4] represents a
 practical method for symbolic integration. Because the Risch algorithm
 describes a decision procedure for transcendental integration it is an
 ideal final step in an integration package. Although the decision
 characteristic cannot be fully realised in a computer system, because
 of major algebraic problems such as factorisation, zeroequivalence
 and simplification, the potential advantages are considerable.",
 paper = "Scho89.pdf",
}

\end{chunk}

\index{Smith, Paul}
\index{Sterling, Leon}
\begin{chunk}{axiom.bib}
@article{Smit83,
 author = "Smith, Paul and Sterling, Leon",
 title = "Of Integration by Man and Machine",
 journal = "ACM SIGSAM",
 volume = "17",
 number = "34",
 year = "1983",
 abstract =
 "We describe a symbolic integration problem arising from an
 application in engineering. A solution is given and compared with the
 solution generated by the REDUCE integration package running at
 Cambridge. Nontrivial symbol manipulation, particularly
 simplification, is necessary to reconcile the answers.",
 paper = "Smit83.pdf"
}

\end{chunk}

\index{Renbao, Zhong}
\begin{chunk}{axiom.bib}
@article{Renb82,
 author = "Renbao, Zhong",
 title = "An Algorithm for Avoiding Complex Numbers in Rational Function
 Integration",
 journal = "ACM SIGSAM",
 volume = "16",
 number = "3",
 pages = "3032",
 year = "1982",
 abstract =
 "Given a proper rational function $A(x)/B(x)$ where $A(x)$ and $B(x)$
 both are in $R[x]$ with $gcd(A(x), B(x))= 1$, $B(x)$ monic and
 $deg(A(x)) < deg(B(x))$, from the Hermite algorithm for rational
 function integration in [3], we obtain
 \[\int{frac{A(x)}{B(x)}~dx = S(x)+\int{\frac{T(x)}{B^*(x)}~dx\]
 where $S(x)$ is a rational function
 which is called the rational part of the integral of $A(x)/B(x)$ in
 eq. (1), $B^*(x)$ is the greatest squarefree factor of $B(x)$, and
 $T(x)$ is in $R[x]$ with $deg(T(x)) < deg(B^*(x))$. The integral of
 $T(x)/B^*(x)$ is called the transcendental part of the integral of
 $A(x)/B(x)$ in eq. (1).",
 paper = "Renb82.pdf"
}

\end{chunk}

\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dave82a,
 author = "Davenport, Jamess H.",
 title = "The Parallel Risch Algorithm (I)",
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 year = "1982",
+@article{Melq12,
+ author = "Melquiond, Guillaume",
+ title = "Floatingpoint arithmetic in the Coq system",
+ journal = "Information and Computation",
+ volume = "216",
+ pages = "1423",
+ year = "2012",
+ link = "\url{https://www.lri.fr/~melquion/doc/08mc8article.pdf}",
abstract =
 "In this paper we review the socalled ``parallel Risch'' algorithm for
 the integration of transcendental functions, and explain what the
 problems with it are. We prove a positive result in the case of
 logarithmic integrands.",
 paper = "Dave82a.pdf"
+ "The process of proving some mathematical theorems can be greatly
+ reduced by relying on numericallyintensive computations with a
+ certified arithmetic. This article presents a formalization of
+ floatingpoint arithmetic that makes it possible to efficiently
+ compute inside the proofs of the Coq system. This certified library is
+ a multiradix and multiprecision implementation free from underflow
+ and overflow. It provides the basic arithmetic operators and a few
+ elementary functions.",
+ paper = "Melq12.pdf"
}
\end{chunk}
\index{Davenport, James H.}
+\index{Akers, Robert Lawrence}
\begin{chunk}{axiom.bib}
@article{Dave85b
 author = "Davenport, Jamess H.",
 title = "The Parallel Risch Algorithm (II)",
 journal = "ACM TOMS",
 volume = "11",
 number = "4",
 pages = "356362",
 year = "1985",
 abstract =
 "It is proved that, under the usual restrictions, the denominator of
 the integral of a purely logarithmic function is the expected one,
 that is, all factors of the denominator of the integrand have their
 multiplicity decreased by one. Furthermore, it is determined which new
 logarithms may appear in the integration.",
 paper = "Dave85b.pdf"
}

\end{chunk}

\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dave82b,
 author = "Davenport, Jamess H.",
 title = "The Parallel Risch Algorithm (III): use of tangents",
 journal = "ACM SIGSAM",
 volume = "16",
 number = "3",
 pages = "36",
 year = "1982",
 abstract =
 "In this note, we look at the extension to the parallel Risch
 algorithm (see, e.g., the papers by Norman & Moore [1977], Norman &
 Davenport [1979], ffitch [1981] or Davenport [1982] for a description
 of the basic algorithm) which represents trigonometric functions in
 terms of tangents, rather than instead of complex exponentials.",
 paper = "Dave82b.pdf"
}

\end{chunk}

\index{Kempelmann, Helmut}
\begin{chunk}{axiom.bib}
@article{Kemp81,
 author = "Kempelmann, Helmut",
 title = "Recursive Algorithm for the Fast Calculation of the Limit of
 Derivatives at Points of Indeterminateness",
 journal = "ACM SIGSAM",
 volume = "15",
 number = "4",
 year = "1981",
 pages = "1011",
+@phdthesis{Aker93,
+ author = "Akers, Robert Lawrence",
+ title = "Strong Static Type Checking for Functional Common Lisp",
+ school = "Univerity of Texas at Austin",
+ year = "1995",
+ link = "\url{ftp://www.cs.utexas.edu/pub/boyer/clireports/096.pdf}",
abstract =
 "It is a common method in probability and queueing theory to gain the
 $n$th moment $E[x^n]$ of a random variable X
 with density function $f_x(x)$
 by the $n$th derivative of the corresponding Laplace transform $L(s)$ at
 the point $s = 0$
 \[E[x^n] = (1)^n\cdot L^{n}(O)\]
 Quite often we encounter indetermined
 expressions of the form $0/0$ which normally are treated by the rule of
 L'Hospital. This is a time and memory consuming task requiring
 greatest common divisor cancellations. This paper presents an
 algorithm that calculates only those derivatives of numerator and
 denominator which do not equal zero when taking the limit /1/. The
 algorithm has been implemented in REDUCE /2/. It is simpler and more
 efficient than that one proposed by /3/.",
 paper = "Kemp81.pdf"
}
+ "This thesis presents a type system which supports the strong static
+ type checking of programs developed in an applicative subset of the
+ Common Lisp language. The Lisp subset is augmented with a guard
+ construct for function definitions, which allows type restrictions to
+ be placed on the arguments. Guards support the analysis and safe use
+ of partial functions, like CAR, which are welldefined only for
+ arguments of a certain type.
\end{chunk}
+ A language of type descriptors characterizes the type
+ domain. Descriptors are composed into function signatures which
+ characterize the guard and which map various combinations of actual
+ parameter types to possible result types. From a base of signatures
+ for a small collection of primitive functions, the system infers
+ signatures for newly submitted functions.
\index{Norfolk, Timothy S.}
\begin{chunk}{axiom.bib}
@article{Norf82,
 author = "Norfolk, Timothy S.",
 title = "Symbolic Computation of Residues at Poles and Essential
 Singularities",
 journal = "ACM SIGSAM",
 volume = "16",
 number = "1",
 year = "1982",
 pages = "1723",
 abstract =
 "Although most books on the theory of complex variables include a
 classification of the types of isolated singularities, and the
 applications of residue theory, very few concern themselves with
 methods of computing residues. In this paper we derive some results on
 the calculation of residues at poles, and some special classes of
 essential singularities, with a view to implementing an algorithm in
 the VAXIMA computer algebra system.",
 paper = "Norf82.pdf"
}
+ The system includes a type inference algorithm which handles
+ constructs beyond the constraints of MLstyle systems. Most notable
+ are the free use of CONS to construct objects of undeclared type and
+ the use of IF forms whose two result components have unrelated types,
+ resulting in ad hoc polymorphism. Accordingly, the type descriptor
+ language accommodates disjunction, unrestricted CONS, recursive type
+ forms, and ad hoc polymorphic function signatures. As with traditional
+ type inference systems, unification is a central algorithm, but the
+ richness of our type language complicates many component algorithms,
+ including unification. Special treatment is given to recognizer
+ functions, which are predicates determining whether an object is of a
+ particular type. Type inference in this setting is undecidable, so the
+ algorithm is heuristic and is not complete.
\end{chunk}

\index{Belovari, G.}
\begin{chunk}{axiom.bib}
@article{Belo83,
 author = "Belovari, G.",
 title = "Complex Analysis in Symbolic Computing of some Definite Integrals",
 journal = "ACM SIGSAM",
 volume = "17",
 number = "2",
 year = "1983",
 pages = "611",
 paper = "Belo83.pdf"
+ The semantics of the system are in terms of a function which
+ determines whether values satisfy descriptors with respect to a
+ binding of type variables. The soundness of each signature emitted by
+ the system is validated by a signature checker, whose properties are
+ formally specified with respect to the formal semantics and proven to
+ hold. The checker algorithm is substantially simpler than the
+ inference algorithm, as it need not perform operations such as
+ discovering closed recursive forms. Thus, its proof is both more
+ straightforward to construct and easier to validate than a direct
+ proof of the inference algorithm would be.",
+ paper = "Aker93.pdf"
}
\end{chunk}
\index{Wang, Paul S.}
+\index{Byrd, William}
\begin{chunk}{axiom.bib}
@phdthesis{Wang71,
 author = "Wang, Paul S.",
 title = "Evaluation of Definite Integrals by Symbolic Manipulation",
 school = "MIT",
 year = "1971",
 link = "\url{http://publications.csail.mit.edu/lcs/pubs/pdf/MITLCSTR092.pdf}",
 comment = "MIT/LCS/TR92",
 abstract =
 "A heuristic computer program for the evaluation of real definite
 integrals of elementary functions is described This program, called
 WANDERER, (WANg's DEfinite integRal EvaluatoR), evaluates many proper
 and improper integrals. The improper integrals may have a finite or
 infinite range of integration. Evaluation by contour integration and
 residue theory is among the methods used. A program called DELIMITER
 (DEfinitive LIMIT EvaluatoR) is used for the limit computations needed
 in evaluating some definite integrals. DELIMITER is a heuristic
 program written for computing limits of real or complex analytic
 functions. For real functions of a real variable, onesided as well
 been implmented in the MACSYMA system, a symbolic and algebraic
 manipulation system being developed at Project MAC, MIT. A typical
 problem in applied mathematics, namely asymptotic analysis of a
 definite integral, is solved using MACSYMA to demonstrate the
 usefulness of such a system and the facilities provided by WANDERER."
 paper = "Wang71.pdf"
+@misc{Byrd17,
+ author = "Byrd, William",
+ title = "The Most Beautiful Program Ever Written",
+ link = "\url{https://www.youtube.com/watch?v=OyfBQmvr2Hc}",
+ comment = "See miniKanren and Barliman (program synthesis with proof)"
}
\end{chunk}
\index{Harrington, Steven J.}
\begin{chunk}{axiom.bib}
@article{Harr79,
 author = "Harrington, Steven J.",
 title = "A Symbolic Limit Evaluation Program in REDUCE",
 journal = "ACM SIGSAM",
 volume = "13",
 number = "1",
 year = "1979",
 pages = "2731",
 abstract =
 "A method for the automatic evaluation of algebraic limits is
 described. It combines many of the techniques previously employed,
 including topdown recursive evaluation, power series expansion, and
 L'Hôpital's rule. It introduces the concept of a special algebraic
 form for limits. The method has been implemented in MODEREDUCE.",
 paper = "Harr79.pdf"
}

\end{chunk}
+@misc{Unkn13,
+ author = "Unknown",
+ title = "HindleyMilner Type Inference",
+ link = "\url{https://www7.in.tum.de/um/courses/seminar/sove/SS2013/final/hindleymilner.slides.pdf}",
+ paper = "Unkn13.pdf"
\index{Norton, Lewis M.}
\begin{chunk}{axiom.bib}
@article{Nort80,
 author = "Norton, Lewis M.",
 title = "A Note about Laplace Transform Tables for Computer use",
 journal = "ACM SIGSAM",
 volume = "14",
 number = "2",
 year = "1980",
 pages = "3031",
 abstract =
 "The purpose of this note is to give another illustration of the fact
 that the best way for a human being to represent or process
 information is not necessarily the best way for a computer. The
 example concerns the use of a table of inverse Laplace transforms
 within a program, written in the REDUCE language [1] for symbolic
 algebraic manipulation, which solves linear ordinary differential
 equations with constant coefficients using Laplace transform
 methods. (See [2] for discussion of an earlier program which solved
 such equations.)",
 paper = "Nort80.pdf"
}
\end{chunk}
\index{Stoutemyer, David R.}
\begin{chunk}{axiom.bib}
@article{Stou76,
 author = "Stoutemyer, David R.",
 title = "Automatic Simplification for the Absolutevalue Function and its
 Relatives",
 journal = "ACM SIGSAM",
 volume = "10",
 number = "4",
 year = "1976",
 pages = "4849",
 abstract =
 "Computer symbolic mathematics has made impressive progress for the
 automatic simplification of rational expressions, algebraic
 expressions, and elementary transcendental expressions. However,
 existing computeralgebra systems tend to provide little or no
 simplification for the absolutevalue function or for its relatives
 such as the signum, unit ramp, unit step, max, min, modulo, and Dirac
 delta functions. Although these functions lack certain desireable
 properties that are helpful for canonical simplification, there are
 opportunities for some ad hoc simplification. Moreover, a perusal of
 most mathematics, engineering, and scientific journals or texts
 reveals that these functions are too prevalent to be ignored.This
 article describes specific simplification rules implemented in a
 program that supplements the builtin rules for the MACSYMA ABS and
 SIGNUM functions.",
 paper = "Stou76.pdf"
}

\end{chunk}

\index{Kanoui, Henry}
\begin{chunk}{axiom.bib}
@article{Kano76,
 author = "Kanoui, Henry",
 title = "Some Aspects of Symbolic Integration via Predicate Logic
 Programming",
 journal = "ACM SIGSAM",
 volume = "10",
 number = "4",
 year = "1976",
 pages = "2942",
 abstract =
 "During the past years, various algebraic manipulations systems have
 been described in the literature. Most of them are implemented via
 ``classic'' programming languages like Fortran, Lisp, PL1 ... We propose
 an alternative approach: the use of Predicate Logic as a programming
 language.",
 paper = "Kano76.pdf"
}

\end{chunk}

\index{Gentleman, W. Morven}
\begin{chunk}{axiom.bib}
@article{Gent74,
 author = "Gentleman, W. Morven",
 title = "Experience with Truncated Power Series",
 journal = "ACM SIGSAM",
 volume = "8",
 number = "3",
 year = "1974",
 pages = "6162",
 abstract =
 "The truncated power series package in ALTRAN has been available for
 over a year now, and it has proved itself to be a useful and exciting
 addition to the armoury of symbolic algebra. A wide variety of
 problems have been attacked with this tool: moreover, through use in
 the classroom, we have had the opportunity to observe how a large
 number of people react to the availability of this tool.",
 paper = "Gent74.pdf"
}

\end{chunk}

\index{Loos, Rudiger}
\begin{chunk}{Loos72a,
@article{Loos72a,
 author = "Loos, Rudiger",
 title = "Analytic Treatment of Three Similar Fredholm Integral Equations
 of the second kind with REDUCE 2",
 journal = "ACM SIGSAM",
 volume = "21",
 year = "1972",
 pages = "3240"
}

\end{chunk}

\index{Collins, George E.}
\begin{chunk}{axiom.bib}
@article{Coll69,
 author = "Collins, George E.",
 title = "Algorithmic Approaches to Symbolic Integration and SImplification",
 journal = "ACM SIGSAM",
 volume = "12",
 year = "1969",
 pages = "5016",
 abstract =
 "This panel session followed the format announced by SIGSAM Chairman
 Carl Engelman in the announcement published in SIGSAM Bulletin No. 10
 (October 1968). Carl gave a brief (five or ten minutes) introduction
 to the subject and introduced Professor Joel Moses (M. I. T.). Joel
 presented an excellent exposition of the recent research
 accomplishments of the other panel members, synthesizing their work
 into a single large comprehensible picture. His presentation was
 greatly enhanced by a series of 27 carefully prepared slides
 containing critical examples and basic formulas, and was certainly the
 feature of the show. A panel discussion followed, with some audience
 participation. Panel members were Dr. W. S. Brown (Bell Telephone
 Laboratories), Professor B. F. Caviness (Duke University), Dr. Daniel
 Richardson and Dr. R. H. Risch (IBM).",
 paper = "Coll69.pdf"
}

\end{chunk}

\index{Kasper, Toni}
\begin{chunk}{axiom.bib}
@article{Kasp80,
 author = "Kasper, Toni",
 title = "Integration in Finite Terms: The Liouville Theory",
 journal = "ACM SIGSAM",
 volume = "14",
 number = "4",
 year = "1980",
 pages = "28",
 abstract =
 "The search for elementary antiderivatives leads from classical
 analysis through modern algebra to contemporary research in computer
 algorithms.",
 paper = "Kasp80.pdf"
}

\end{chunk}

\index{Munroe, M.E.}
\begin{chunk}{axiom.bib}
@book{Munr53,
 author = "Munroe, M.E.",
 title = "Introduction to Measure and Integration",
 publisher = "AddisonWesley",
 year = "1953"
}

\end{chunk}

\index{Hardy, G.}
\index{Littlewood, J.E.}
\index{Polya, G.}
\begin{chunk}{axiom.bib}
@book{Hard64,
 author = "Hardy, G. and Littlewood, J.E. and Polya, G.",
 title = "Inequalities",
 publisher = "Cambridge University Press",
 year = "1964"
}

\end{chunk}

\index{Baddoura, Jamil}
\begin{chunk}{axiom.bib}
@article{Badd06,
 author = "Baddoura, Jamil",
 title = "Integration in Finite Terms with Elementary Functions and
 Dilogarithms",
 journal = "J. Symbolic Computation",
 volume = "41",
 number = "8",
 year = "2006",
 pages = "909942",
 abstract =
 "In this paper, we report on a new theorem that generalizes
 Liouville’s theorem on integration in finite terms. The new theorem
 allows dilogarithms to occur in the integral in addition to
 transcendental elementary functions. The proof is based on two
 identities for the dilogarithm, that characterize all the possible
 algebraic relations among dilogarithms of functions that are built up
 from the rational functions by taking transcendental exponentials,
 dilogarithms, and logarithms. This means that we assume the integral
 lies in a transcendental tower.",
 paper = "Badd06.pdf"
}

\end{chunk}

\index{Kahan, William}
\begin{chunk}{axiom.bib}
@misc{Kaha90,
 author = "Kahan, William",
 title = "The Persistence of Irrationals in Some Integrals",
 year = "1990",
 abstract =
 "Computer algebra systems are expected to simplify formulas they
 obtain for symbolic integrals whenever they can, and often they
 succeed. However, the formulas so obtained may then produce incorrect
 results for symblic definite integrals"
}

\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index e054731..8fddb5a 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5798,6 +5798,8 @@ goals  a newly added file to explain current goals
books/bookvolbib Add references
20170819.01.tpd.patch
books/bookvolbib Add references
+20170823.01.tpd.patch
+books/bookvolbib Add proof references

1.9.1