From 0a626d280d6a54ffcfa7c4871d618382227c92f0 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Wed, 16 May 2018 03:09:17 0400
Subject: [PATCH] books/bookvolbib add references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Correct
\index{Walther, J.S.}
\begin{chunk}{axiom.bib}
@misc{Walt71,
author = "Walther, J.S.",
title = {{A Unified Algorithm for Elementary Functions}},
link = "\url{}",
year = "1971",
abstract =
"This paper describes a single unified algorithm for the calculation
of elementary functions including multipli cation, division, sin,
cos, tan, arctan, sinh, cosh, tanh, arctanh, In, exp and squareroot.
The basis for the algorithm is coordinate rotation in a linear,
circular, or hyperbolic coordinate system depending on which function
is to be calculated. The only operations re quired are shifting,
adding, subtracting and the recall of prestored constants. The
limited domain of con vergence of the algorithm is calculated,
leading to a discussion of the modifications required to extend the
domain for floating point calculations.
A hardware floating point processor using the algo rithm was built at
HewlettPackard Laboratories. The block diagram of the processor, the
microprogram control used for the algorithm, and measures of actual
performance are shown.",
paper = "Walt71.pdf"
}
\end{chunk}
\index{Friedman, Daniel P.}
\index{Wise, David S.}
\begin{chunk}{axiom.bib}
@techreport{Frie76,
author = "Friedman, Daniel P. and Wise, David S.",
title = {{CONS should not Evaluate its Arguments}},
institution = "Indiana University",
number = "TR44",
year = "1976",
abstract =
"The constructor function which allocates and fills records in
recursive, sideeffectfree procedural languages is redefined to be a
nonstrict (Vuillemin 1974) elementary operation. Instead of
evaluating its arguments, it builds suspensions of them which are not
coerced until the suspensions is accessed by strict elementary
function. The resulting evalutation procedures are strictly more
powerful than existing schemes for languages such as LISP. The main
results are that Landin's streams are subsumed into McCarthy's LISP
merely by the redefinition of elementar functions, that invocations of
LISP's evaluator can be minimized by redefining the elemntary
functions without redefining the interpreter, and as a strong
conjecture, that redefining the elementary functions yields the least
fixedpoint semantics for McCarthy's evalution scheme. This new
insight into the role of construction functions will do much to ease
the interface between recursive programmers and iterative programmers,
as well as the interface between programmers and data structure
designers.",
paper = "Frie16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sarma, Gopal}
\index{Hay, Nick J.}
\begin{chunk}{axiom.bib}
@article{Sarm17,
author = "Sarma, Gopal and Hay, Nick J.",
title = {{Robust Computer Algebra, Theorem Proving, and Oracle AI}},
journal = "Informatica",
volume = "41",
number = "3",
link = "\url{https://arxiv.org/pdf/1708.02553.pdf}",
year = "2017",
abstract =
"In the context of superintelligent AI systems, the term 'oracle' has
two meanings. One refers to modular systems queried for
domainspecific tasks. Another usage, referring to a class of systems
which may be useful for addressing the value alignment and AI control
problems, is a superintelligent AI system that only answers questions.
The aim of this manuscript is to survey contemporary research problems
related to oracles which align with longterm research goals of AI
safety. We examine existing question answering systems and argue that
their high degree of architectural heterogeneity makes them poor
candidates for rigorous analysis as oracles. On the other hand, we
identify computer algebra systems (CASs) as being primitive examples
of domainspecific oracles for mathematics and argue that efforts to
integrate computer algebra systems with theorem provers, systems which
have largely been developed independent of one another, provide a
concrete set of problems related to the notion of provable safety that
has emerged in the AI safety community. We review approaches to
interfacing CASs with theorem provers, describe welldefined
architectural deficiencies that have been identified with CASs, and
suggest possible lines of research and practical software projects for
scientists interested in AI safety.",
paper = "Sarm17.pdf",
keywords = "printed, axiomref"
}
\end{chunk}
\index{Keller, Chantal}
\begin{chunk}{axiom.bib}
@phdthesis{Kell13,
author = "Keller, C.",
title = {{A Matter of Trust: Skeptical Commuication Between Coq and
External Provers}},
school = "Ecole Polytechnique",
year = "2013",
link =
"\url{https://www.lri.fr/~keller/Documentsrecherche/Publications/thesis13.pdf}",
abstract =
"This thesis studies the cooperation between the Coq proof assistant
and external provers through proof witnesses. We concentrate on two
different kinds of provers that can return certicates: first, answers
coming from SAT and SMT solvers can be checked in Coq to increase both
the confidence in these solvers and Coq 's automation; second,
theorems established in interactive provers based on HigherOrder
Logic can be exported to Coq and checked again, in order to offer the
possibility to produce formal developments which mix these two
different logical paradigms. It ended up in two software : SMTCoq, a
bidirectional cooperation between Coq and SAT/SMT solvers, and
HOLLIGHTCOQ, a tool importing HOL Light theorems into Coq.
For both tools, we took great care to define a modular and efficient
architecture, based on three clearly separated ingredients: an
embedding of the formalism of the external tool inside Coq which is
carefully translated into Coq terms, a certified checker to establish
the proofs using the certicates and a Ocaml preprocessor to transform
proof witnesses coming from different provers into a generic
certificate. This division allows that a change in the format of proof
witnesses only affects the preprocessor, but no proved Coq code.
Another fundamental component for efficiency and modularity is
computational reflection, which exploits the computational power of
Coq to establish generic and small proofs based on the certicates.",
paper = "Kell13.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm07,
author = "Farmer, William M.",
title = {{Biform Theories in Chiron}},
journal = "LNCS",
volume = "4573",
pages = "6679",
year = "2007",
abstract =
"An axiomatic theory represents mathematical knowledge declaratively
as a set of axioms. An algorithmic theory represents mathematical
knowledge procedurally as a set of algorithms. A biform theory is
simultaneously an axiomatic theory and an algorithmic theory. It
represents mathematical knowledge both declaratively and procedurally.
Since the algorithms of algorithmic theories manipulate th e syntax of
expressions, biform theories — as well as algorithmic theories — are
difficult to formalize in a traditional logic without the means to
reason about syntax. Chiron is a derivative of
vonNeumannBernaysG̈odel ( NBG ) set theory that is intended to be a
practical, generalpurpose logic for mechanizing mathematics. It
includes elements of type theory, a scheme for handling undefinedness,
and a facility for reasoning about the syntax of expressions. It is an
exceptionally wellsuited logic for formalizing biform theories. This
paper defines the notion of a biform theory, gives an overview of
Chiron, and illustrates how biform theories can be formalized in Chiron.",
paper = "Farm07.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@inproceedings{Care07,
author = "Carette, Jacques and Farmer, William M. and Sorge, Volker",
title = {{A Rational Reconstruction of a System for Experimental
Mathematics}},
booktitle = "14th Workshop on Automated Reasoning",
publisher = "unknown",
year = "2007",
abstract =
"In previous papers we described the implementation of a system
which combines mathematical object generation, transformation and
filtering, conjecture generation, proving and disproving for
mathematical discovery in nonassociative algebra. While the system
has generated novel, fully verified theorems, their construction
involved a lot of ad hoc communication between disparate systems. In
this paper we carefully reconstruct a specification of a subprocess
of the original system in a framework for trustable communication
between mathematics systems put forth by us. It employs the concept
of biform theories that enables the combined formalisation of the
axiomatic and algorithmic theories behind the generation
process. This allows us to gain a much better understanding of the
original system, and exposes clear generalisation opportunities.",
paper = "Care07.pdf",
keywords = "printed"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@inproceedings{Farm00,
author = "Farmer, William M.",
title = {{A Proposal for the Development of an Interactive
Mathematics Laboratory for Mathematics Eductions}},
booktitle = "Workshop on Deductions Systems for Mathematics Eduation",
pages = "2025",
year = "2000"
paper = "Farm00.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm04,
author = "Farmer, William M.",
title = {{MKM A New Interdisciplinary Field of Research}},
journal = "SIGSAM",
volume = "38",
pages = "4752",
year = "2004",
abstract =
"Mathematical Knowledge Management (MKM) is a new interdisciplinary
field of research in the intersection of mathematics, computer
science, library science, and scientific publishing. Its objective is
to develop new and better ways of managing mathematical knowledge
using sophisticated software tools. Its grand challenge is to create
a universal digital mathematics library (UDML), accessible via the
WorldWide Web, that contains essentially all mathematical knowledge
(intended for the public). The challenges facing MKM are daunting,
but a UDML, even just partially constructed, would transform how
mathematics is learned and practiced.",
paper = "Farm04.pdf",
keywords = "printed, axiomref"
}
\end{chunk}
\index{Farmer, William M.}
\index{Mohrenschildt, Martin v.}
\begin{chunk}{axiom.bib}
@inproceedings{Farm00a,
author = "Farmer, William M. and Mohrenschildt, Martin v.",
title = {{Transformers for Symbolic Computation and Formal Deduction}},
booktitle = "CADE17",
pages = "3645",
year = "2000",
abstract =
"A transformer is a function that maps expressions to expressions.
Many transformational operators — such as expression evaluators and
simplifiers, rewrite rules, rules of inference, and decision
procedures — can be represented by transformers. Computations and
deductions can be formed by applying sound transformers in
sequence. This paper introduces machinery for defining sound
transformers in the context of an axiomatic theory in a formal
logic. The paper is intended to be a first step in a development of an
integrated framework for symbolic computation and formal deduction.",
paper = "Farm00a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Farmer, William M.}
\index{Larjani, Pouya}
\begin{chunk}{axiom.bib}
@misc{Farm14,
author = "Farmer, William M. and Larjani, Pouya",
title = {{Frameworks for Reasoning about Syntax that Utilize
Quotation and Evaluation}},
links = "\url{http://imps.mcmaster.ca/doc/syntax.pdf}",
year = "2014",
abstract =
"It is often useful, if not necessary, to reason about the syntactic
structure of an expression in an interpreted language (i.e., a
language with a semantics). This paper introduces a mathematical
structure called a syntax framework that is intended to be an abstract
model of a system for reasoning about the syntax of an interpreted
language. Like many concrete systems for reasoning about syntax, a
syntax framework contains a mapping of expressions in the
interpreted language to syntactic values that represent the syntactic
structures of the expressions; a language for reasoning about the
syntactic values; a mechanism called quotation to refer to the
syntactic value of an expression; and a mechanism called evaluation to
refer to the value of the expression represented by a syntactic value.
A syntax framework provides a basis for integrating reasoning about
the syntax of the expressions with reasoning about what the
expressions mean. The notion of a syntax framework is used to discuss
how quotation and evaluation can be built into a language and to
define what quasiquotation is. Several examples of syntax frameworks
are presented.",
paper = "Farm14.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@misc{Care11c,
author = "Carette, Jacques and Farmer, William M. and O'Connor, Russell",
title = {{MathScheme: Project Description}},
year = "2011",
link = "\url{http://imps.mcmaster.ca/doc/cicm2011projdesc.pdf}",
paper = "Care11c.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Care08,
author = "Carette, Jacques and Farmer, William M.",
title = {{HighLevel Theories}},
journal = "LNCS",
volume = "5144",
pages = "232245"
year = "2008",
abstract =
"We introduce highlevel theories in analogy with highlevel
programming languages. The basic point is that even though one can
define many theories via simple, lowlevel axiomatizations , that is
neither an effective nor a comfortable way to work with such theories.
We present an approach which is closer to what users of mathematics
employ, while still being based on formal structures.",
paper = "Care08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@article{Care12,
author = "Carette, Jacques and O'Connor, Russell",
title = {{Theory Presentation Combinators}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"We motivate and give semantics to theory presentation combinators
as the foundational building blocks for a scalable library of
theories. The key observation is that the category of contexts and
fibered categories are the ideal theoretical tools for this
purpose.",
paper = "Care12.pdf",
keywords = "printed"
}
\end{chunk}
\index{Musser, David R.}
\index{Kapur, Deepak}
\begin{chunk}{axiom.bib}
@article{Muss82,
author = "Musser, David R. and Kapur, Deepak",
title = {{Rewrite Rule Theory and Abstract Data Type Analysis}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "7790",
year = "1982",
paper = "Muss82.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Lazard, Daniel}
\begin{chunk}{axiom.bib}
@article{Laza82,,
author = "Lazard, Daniel",
title = {{Commutative Algebra and Computer Algebra}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "4048",
year = "1982",
abstract =
"It is well known that computer algebra deals with commutative rings
such as the integers, the rationals, the modular integers and
polynomials over such rings.
On the other hand, commutative algebra is that part of mathematics
which studies commutative rings.
Our aim is to make this link more precise. It will appear that most of
the constructions which appear in classical commutative algebra can be
done explicitly in finite time. However, there are
exceptions. Moreover, most of the known algorithms are intractable :
The problems are generally exponential by themselves, but many
algorithms are overoverexponential. It needs a lot of work to find
better methods, and to implement them, with the final hope to open a
computation domain larger than this one which is possible by hand.
To illustrate the limits and the possibilities of computerizing
commutative algebra, we describe an algorithm which tests the
primality of a polynomial ideal and we give an example of a single
algebraic extension of fields $K\subset L$ wuch that there exists an
algorithm of factorization for the polynomials over $k$, but not for
the polynomials over $L$.",
paper = "Laza82.pdf"
}
\end{chunk}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear82,,
author = "Hearn, Anthony C.",
title = {{REDUCE  A Case Study in Algebra System Development}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "263272",
year = "1982",
paper = "Hear82.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Padget, J.A.}
\begin{chunk}{axiom.bib}
@article{Padg82,
author = "Padget, J.A.",
title = {{Escaping from Intermediate Expression Swell: A Continuing Saga}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "256262",
year = "1982",
abstract =
"The notion of a closed continuation is introduced, is presented,
coroutines using function call and return based on this concept, are
applications and a functional dialect of LISP shown to be merely a
more general form of for coroutines in algebraic simplification and
are suggested, by extension function. expression Potential evaluation
and a specific example of their use is given in a novel attack on the
phenomenon of intermediate expression swell in polynomial
multiplication.",
paper = "Padg82.pdf"
}
\end{chunk}
\index{Norman, Arthur}
\begin{chunk}{axiom.bib}
@article{Norm82,
author = "Norman, Arthur",
title = {{The Development of a VectorBased Algebra System}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "237248",
year = "1982",
paper = "Norm82.pdf"
}
\end{chunk}
\index{Bordoni, L.}
\index{Colagrossi, A.}
\index{Miola, A.}
\begin{chunk}{axiom.bib}
@article{Bord82,
author = "Bordoni, L. and Colagrossi, A. and Miola, A.",
title = {{Linear Algebraic Approach for Computing Polynomial Resultant}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "231236",
year = "1982",
abstract =
"This paper presents a linear algebraic method for computing the
resultant of two polynomials. This method is based on the
computation of a determinant of order equal to the minimum of the
degrees of the two given polynomials. This method turns out to be
preferable to other known linear algebraic methods both from a
computational point of view and for a total generality respect to
the class of the given polynomials. Some relationships of this
method with the polynomial pseudoremainder operation are discussed.",
paper = "Bord82.pdf"
}
\end{chunk}
\index{Arnon, Dennis S.}
\index{McCallum, Scott}
\begin{chunk}{axiom.bib}
@article{Arno82a,
author = "Arnon, Dennis S. and McCallum, Scott",
title = {{Cylindrical Algebraic Decomposition by Quantifier Eliminations}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "215222",
year = "1982",
abstract =
"Cylindrical algebraic decompositions were introduced as a major
component of a new quantifier elimination algorithm for elementary
algebra and geometry (G. Collins, ~973). In the present paper we turn
the tables and show that one can use quantifier elimination for ele
mentary algebra and geometry to obtain a new version of the
cylindrical algebraic decomposi tion algorithm. A key part of our
result is a theorem, of interest in its own right, that relates the
multiplicities of the roots of a polynomial to their continuity.",
paper = "Arno82a.pdf"
}
\end{chunk}
\index{Collins, George E.}
\begin{chunk}{axiom.bib}
@article{Coll82a,
author = "Collins, George E.",
title = {{Factorization in Cylindrical Algebraic Decomposition  Abstract}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "212214",
year = "1982",
paper = "Coll82a.pdf"
}
\end{chunk}
\index{Lazard, Daniel}
\begin{chunk}{axiom.bib}
@article{Laza82a,
author = "Lazard, Daniel",
title = {{On Polynomial Factorization}},
journal = "Lecture Notes in Computer Science",
volume = "144",
pages = "144157",
year = "1982",
abstract =
"We present new algorithms for factoring univariate polynomials
over finite fields. They are variants of the algorithms of Camion
and CantorZassenhaus and differ from them essentially by
computing the primitive idempotents of the algebra $K[X]/f$ before
factoring $f$.
These algorithms are probabilistic in the following sense. The
time of computation depends on random choices, but the validity of
the result does not depend on them. So, worst case complexity,
being infinite, is meaningless and we compute average
complexity. It appears that our and CantorZassenhaus algorithms
have the same asymptotic complexity and they are the best
algorithms actuall known; with elementary multiplication and GCD
computation, CantorZassenhaus algorithm is always bettern than
ours; with fast multiplication and GCD, it seems that ours is
better, but this fact is not yet proven.
Finally, for factoring polynomials over the integers, it seems
that the best strategy consists in choosing prime moduli as big as
possible in the range where the time of the multiplication does
not depend on the size of the factors (machine word size). An
accurate computation of the involved constants would be needed for
proving this estimation.",
paper = "Laza82a.pdf"
}
\end{chunk}
\index{Strachey, Christopher}
\index{Wadsworth, Christopher}
\begin{chunk}{axiom.bib}
@article{Stra00a,
author = "Strachey, Christopher and Wadsworth, Christopher",
title = {{Continuations: A Mathematical Semantics for Handling Full Jumps}},
journal = "HigherOrder and Symbolic Computation",
volume = "13",
pages = "135152",
year = "2000",
abstract =
"This paper describes a method of giving the mathematical
semantics of programming languages which include the most general
form of jumps.",
paper = "Stra00a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kaes, Stefan}
\begin{chunk}{axiom.bib}
@article{Kaes88,
author = "Kaes, Stefan",
title = {{Parametric Overloading in Polymorphic Programming Languages}},
journal = "LNCS",
volume = "300",
pages = "131144",
year = "1988",
abstract =
"The introduction of unrestricted overloading in languagues with type
systems based on implicit parametric potymorphism generally destroys
the principal type property: namely that the type of every expression
can uniformly be represented by a single type expression over some set
of type variables. As a consequence, type inference in the presence
of unrestricted overloading can become a NPcomplete problem. In
this paper we define the concept of parametric overloading as a
restricted form of overloading which is easily combined with
parametric polymorphism. Parametric overloading preserves the
principal type property, thereby allowing the design of efficient type
inference algorithms. We present sound type deduction systems, both
for predefined and programmer defined overloading. Finally we state
that parametric overloading can be resolved either statically, at
compile time, or dynamically, during program execution.",
paper = "Kaes88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kaes, Stefan}
\begin{chunk}{axiom.bib}
@article{Kaes92,
author = "Kaes, Stefan",
title = {{Type Inference inthe Presence of Overloading, Subtyping and
Recursive Types}},
journal = "ACM Lisp Pointers",
volume = "5",
number = "1",
year = "1992",
pages = "193204",
abstract =
"We present a unified approach to type inference in the presence of
overloading and coercions based on the concept of {\sl constrained
types}. We define a generic inference system, show that subtyping and
overloading can be treated as a special instance of this system and
develop a simple algorithm to compute principal types. We prove the
decidability of type inference for hte class of {\sl decomposable
predicates} and deelop a canonical representation for principal types
based on {\sl most accurate simplifications} of constraint
sets. Finally, we investigate the extension of our techniques to
{\sl recursive types}.",
paper = "Kaes92.pdf",
keywords = "printed"
}
\end{chunk}
\index{Hall, Cordelia V.}
\index{Hammond, Kevin}
\index{Jones, Simon L. Peyton}
\index{Wadler, Philip L.}
\begin{chunk}{axiom.bib}
@article{Hall96,
author = "Hall, Cordelia V. and Hammond, Kevin and Jones, Simon L. Peyton
and Wadler, Philip L.",
title = {{Type Classes in Haskell}},
journal = "Trans. on Programming Langues and Systems",
volume = "18",
number = "2",
pages = "109138",
year = "1996",
abstract =
"This article de nes a set of type inference rules for resolving
overloading introduced by type classes, as used in the functional
programming language Haskell. Programs including type classes are
transformed into ones which may be typed by standard HindleyMilner
inference rules. In contrast to other work on type classes, the rules
presented here relate directly to Haskell programs. An innovative
aspect of this work is the use of secondorder lambda calculus to
record type information in the transformed program.",
paper = "Hall96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Harper, Robert}
\index{Chakravarty, Manuel M.T.}
\index{Keller, Gabriele}
\begin{chunk}{axiom.bib}
@inproceedings{Drey07,
author = "Dreyer, Derek and Harper, Robert and Chakravarty, Manuel M.T.
and Keller, Gabriele",
title = {{Modlular Type Classes}},
booktitle = "Proc. POPL'07",
pages = "6370",
year = "2007",
abstract =
"ML modules and Haskell type classes have proven to be highly
effective tools for program structuring. Modules emphasize explicit
configuration of program components and the use of data abstraction.
Type classes emphasize implicit program construction and ad hoc
polymorphism. In this paper , we show how the implicitlytyped
style of type class programming may be supported within the framework
of an explicitlytyped module language by viewing type classes as a
particular mode of use of modules. This view offers a harmonious
integration of modules and type classes, where type class features,
such as class hierarchies and associated types, arise naturally as
uses of existing modulelanguage constructs, such as module
hierarchies and type components. In addition, programmers have
explicit control over which type class instances are available for
use by type inference in a given scope. We formalize our approach as
a HarperStonestyle elaboration relation, and provide a sound type
inference algorithm as a guide to implementation.",
paper = "Drey07.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wehr, Stefan}
\index{Chakravarty, Maneul M.T.}
\begin{chunk}{axiom.bib}
@article{Wehr08,
author = "Wehr, Stefan and Chakravarty, Maneul M.T.",
title = {{ML Modules and Haskell Type Classes: A Constructive
Comparison}},
journal = "LNCS",
volume = "5356",
pages = "188204",
year = "2008",
abstract =
"Researchers repeatedly observed that the module system of ML and the
type class mechanism of Haskell are related. So far, this relationship
has received little formal investigation. The work at hand fills this
gap: It introduces typepreserving translations from modules to type
classes and vice versa, which enable a thorough comparison of the two
concepts.",
paper = "Wehr08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Stuckey, Peter J.}
\index{Sulzmann, Martin}
\begin{chunk}{axiom.bib}
@article{Stuc05,
author = "Stuckey, Peter J. and Sulzmann, Martin",
title = {{A Theory of Overloading}},
journal = "ACM Trans. on Programming Languages and Systems",
volume = "27",
number = "6",
pages = "154",
year = "2005",
abstract =
"We present a novel approach to allow for overloading of
identifiers in the spirit of type classes. Our approach relies on
the combination of the HM(X) type system framework with Constraint
Handling Rules (CHRs). CHRs are a declarative language for writing
incremental constraint solvers, that provide our scheme with a
form of programmable type language. CHRs allow us to precisely
describe the relationships among overloaded identifiers. Under
some sufficient conditions on the CHRs we achieve decidable type
inference and the semantic meaning of programs is unambiguous. Our
approach provides a common formal basis for many type class
extensions such as multiparameter type classes and functional
dependencies.",
paper = "Stuc05.pdf",
keywords = "printed"
}
\end{chunk}
\index{Reynolds, J.C.}
\begin{chunk}{axiom.bib}
@article{Reyn85,
author = "Reynolds, J.C.",
title = {{Three Approaches to Type Structure}},
journal = "LNCS",
volume = "185",
year = "1985",
abstract =
"We examine three disparate views of the type structure of
programming languages: Milner's type deduction system and polymorphic
let construct, the theory of subtypes and generic operators, and
the polymorphic or secondorder typed lambda calculus. These
approaches are illustrated with a functional language including
product, sum and list constructors. The syntactic behavior of types
is formalized with type inference rules, bus their semantics is
treated intuitively.",
paper = "Reyn85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Balsters, Herman}
\index{Fokkinga, Maarten M.}
\begin{chunk}{axiom.bib}
@article{Bals91,
author = "Balsters, Herman and Fokkinga, Maarten M.",
title = {{Subtyping can have a simple semantics}},
journal = "Theoretical Computer Science",
volume = "87",
pages = "8196",
year = "1991",
abstract =
"Consider a first order typed language, with semantics
$\llbracket~\rrbracket$ for expressions and types. Adding
subtyping means that a partial order $\le$ on types is defined
and that the typing rules are extended to the effect that
expression $e$ has type $\tau$ whenever $e$ has type $\sigma$ and
$\sigma \le \tau$. We show how to adapt the semantics
$\llbracket~\rrbracket$ in a {\sl simple set theoretic way},
obtaining a semantics $\llbracket~\rrbracket$ that satisfies, in
addition to some obvious requirements, also the property
$\llbracket\sigma\rrbracket \subseteq $\llbracket\tau\rrbracket$,
whenever $\sigma \le \tau$.",
paper = "Bals91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Card84,
author = "Cardelli, Luca",
title = {{A Semantics of Multiple Inheritance}},
journal = "LNCS",
volume = "173",
year = "1984",
paper = "Card84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Mosses, Peter}
\begin{chunk}{axiom.bib}
@article{Moss84,
author = "Mosses, Peter",
title = {{A Basic Abstract Semantics Algebra}},
journal = "LNCS",
volume = "173",
year = "1984",
abstract =
"It seems that there are some pragmatic advantages in using Abstract
Semantic Algebras (ASAs) instead of Xnotation in denotational
semantics. The values of ASAs correspond to 'actions' (or
'processes'), and the operators correspond to primitive ways of
combining actions. There are simple ASAs for the various independent
'facets' of actions: a functional ASA for dataflow, an imperative ASA
for assignments, a declarative ASA for bindings, etc. The aim is to
obtain general ASAs by systematic combination of these simple ASAs.
Here we specify a basic ASA that captures the common features of the
functional, imperative and declarative ASAs  and highlights their
differences. We discuss the correctness of ASA specifications, and
sketch the proof of the consistency and (limiting) completeness of the
functional ASA, relative to a simple model.
Some familiarity with denotational semantics and algebraic
specifications is assumed.",
paper = "Moss84.pdf"
}
\end{chunk}
\index{Gross, Jason}
\index{Chlipala, Adam}
\begin{chunk}{axiom.bib}
@misc{Gros15,
author = "Gross, Jason and Chlipala, Adam",
title = {{Parsing Parses}},
link = "\url{https://people.csail.mit.edu/jgross/personalwebsite/papers/2015parsingparsetrees.pdf}",
year = "2015",
abstract =
"We present a functional parser for arbitrary contextfree grammars,
together with soundness and completeness proofs, all inside Coq. By
exposing the parser in the right way with parametric polymorphism and
dependent types, we are able to use the parser to prove its own
soundness, and, with a little help from relational parametricity,
prove its own completeness, too. Of particular interest is one strange
instantiation of the type and value parameters: by parsing parse trees
instead of strings, we convince the parser to generate its own
completeness proof. We conclude with highlights of our experiences
iterating through several versions of the Coq development, and some
general lessons about dependently typed programming.",
paper = "Gros15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wehr, Stefan}
\begin{chunk}{axiom.bib}
@phdthesis{Wehr05,
author = "Wehr, Stefan",
title = {{ML Modules and Haskell Type Classes: A Constructive
Comparison}},
school = "Albert Ludwigs Universitat",
year = "2005",
abstract =
"Researchers repeatedly observed that the module system of ML and the
type class mechanism of Haskell are related. So far, this relationship
has received little formal investigation. The work at hand fills this
gap: It introduces typepreserving translations from modules to type
classes and vice versa, which enable a thorough comparison of the two
concepts.
The source language of the first translation is a subset of
Standard ML. The target language is Haskell with common extensions
and one new feature, which was deeloped as part of this work. The
second translation maps a subset of Haskell 98 to ML, with
wellestablished extensions. I prove that the translations
preserve type correctness and provide implementations for both.
Building on the insights obtained from the translations, I present
a thorough comparison between ML modules and Haskell type
classes. Moreover, I evaluate to what extent the techniques used
in the translations can be exploited for modular programming in
Haskell and for programming with adhoc polymorphism in ML.",
paper = "Wehr05.pdf"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Crary, Karl}
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@article{Drey03,
author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
title = {{A Type System for HigherOrder Modules}},
journal = "ACM SIGPLAN Notices",
volume = "38",
number = "1",
year = "2003",
link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms.pdf}",
abstract =
"We present a type theory for higherorder modules that accounts for
many central issues in module system design, including translucency,
applicativity , generativity , and modules as firstclass values.
Our type system harmonizes design elements from previous work,
resulting in a simple, economical account of modular programming. The
main unifying principle is the treatment of abstraction mechanisms
as computational effects. Our language is the first to provide a
complete and practical formalization of all of these critical issues
in module system design.",
paper = "Drey03.pdf",
keywords = "printed"
}
\end{chunk}
\index{Crary, Karl}
\index{Harper, Robert}
\index{Puri, Sidd}
\begin{chunk}{axiom.bib}
@inproceedings{Crar99,
author = "Crary, Karl and Harper, Robert and Puri, Sidd",
title = {{What is a Recursive Module}},
booktitle = "Conf. on Programming Language Design and Implementation",
year = "1999",
link = "\url{https://www.cs.cmu.edu/~crary/papers/1999/recmod/recmod.dvi}",
abstract =
"A hierarchical module system is an effective tool for structuring
large programs. Strictly hierarchical module systems impose an
acyclic ordering on import dependencies among program units. This
can impede modular programming by forcing mutuallydependent
components to be consolidated into a single module. Recently there
have been several proposals for module systems that admit cyclic
dependencies, but it is not clear how these proposals relate to
one another, nor how one mught integrate them into an expressive
module system such as that of ML.
To address this question we provide a typetheoretic analysis of
the notion of a recursive module in the context of a
``phasedistinction'' formalism for higherorder module
systems. We extend this calculus with a recursive module mechanism
and a new form of signature, called a {\sl recurslively dependent
signature}, to support the definition of recursive modules. These
extensions are justified by an interpretation in terms of more
primitive language constructs. This interpretation may also serve
as a guide for implementation.",
paper = "Crar99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Crary, Karl}
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@techreport{Drey02,
author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
title = {{A Type System for HigherOrder Modules (Expanded Version)}},
type = "technical report",
institution = "Carnegie Mellon University",
number = "CMUCS02122R",
year = "2002",
link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thomstr.pdf}",
abstract =
"We present a type theory for higherorder modules that accounts for
many central issues in module system design, including translucency,
applicativity , generativity , and modules as firstclass values.
Our type system harmonizes design elements from previous work,
resulting in a simple, economical account of modular programming. The
main unifying principle is the treatment of abstraction mechanisms
as computational effects. Our language is the first to provide a
complete and practical formalization of all of these critical issues
in module system design.",
paper = "Drey02.pdf"
}
\end{chunk}
\index{Crary, Karl}
\begin{chunk}{axiom.bib}
@techreport{Crar02,
author = "Crary, Karl",
title = {{Toward a Foundational Typed Assembly Language}},
institution = "Carnegie Mellon University",
number = "CMUCS02196",
year = "2002,
link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/talt/talttr.pdf}",
abstract =
"We present the design of a typed assembly language called TALT that
supports heterogeneous tuples, disjoint sums, arrays, and a general
account of addressing modes. TALT also implements the von Neumann
model in which programs are stored in memory, and supports relative
addressing. Type safety for execution and for garbage collection are
shown by machinecheckable proofs. TALT is the first formalized typed
assembly language to provide any of these features.",
paper = "Crar02.pdf"
}
\end{chunk}
\index{Mili, Ali}
\index{Aharon, Shir}
\index{Nadkarni, Chaitanya}
\begin{chunk}{axiom.bib}
@article{Mili09,
author = "Mili, Ali and Aharon, Shir and Nadkarni, Chaitanya",
title = {{Mathematics for Reasoning about Loop Functions}},
journal = "Science of Computer Programming",
volume = "79",
year = "2009",
pages = "9891020",
abstract =
"The criticality of modern software applications, the pervasiveness of
malicious code concerns, the emergence of thirdparty software
development, and the preponderance of program inspection as a quality
assurance method all place a great premium on the ability to analyze
programs and derive their function in all circumstances of use and all
its functional detail. For Clike programming languages, one of the
most challenging tasks in this endeavor is the derivation of loop
functions. In this paper, we outline the premises of our approach to
this problem, present some mathematical results, and discuss how these
results can be used as a basis for building an automated tool that
derives the function of while loops under some conditions.",
paper = "Mili09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Piroi, Florina}
\index{Buchberger, Bruno}
\index{Rosenkranz, Camelia}
\begin{chunk}{axiom.bib}
@misc{Piro08,
author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
year = "2008",
link = "\urlhttp://www.risc.jku.at/publications/download/risc_3442/MathAgentsforSFB2008031012h00.pdf{}",
abstract =
"This report reviews the literature relevant for the research project
'Math−Agents: Mathematical Journals as Reasoning Agents' proposed by
Bruno Buchberger as a technology transfer project based on the results
of the SFB Project 'Scientific Computing', in particular the project
SFB 1302, 'Theorema'. The project aims at computer−supporting the
refereeing process of mathematical journals by tools that are mainly
based on automated reasoning and also at building up the knowledge
archived in mathematical journals in such a way that they can act as
interactive and active reasoning agents later on. In this report,
we review current mathematical software systems with a focus on the
availability of tools that can contribute to the goals of the Math−
Agents project.",
paper = "Piro08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sokolowski, Stefan}
\begin{chunk}{axiom.bib}
@article{Soko87,
author = "Sokolowski, Stefan",
title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
journal = "Trans. on Programming Languages and Systems",
volume = "9",
number = "1",
pages = "100120",
year = "1987",
abstract =
"This paper presents a natural deduction proof of Hoare's logic
carried out by the Edinburgh LCF theorem prover. The emphasis is
on the way Hoare's theory is presented to the LCF, which looks
very much like an exposition of syntax and semantics to human
readers; and on the programmable heuristics (tactics). We also
discuss some problems and possible improvements to the LCF.",
paper = "Soko87.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@article{Wadl95,
author = "Wadler, Philip",
title = {{Monads for Functional Programming}},
journal = "LNCS",
volume = "925",
abstract =
"The use of monads to structure functional programs is
described. Monads provide a convenient framework for simulating
effects found in other languages, such as global state, exception
handling, output, or nondeterminism. Three case studies are
looked at in detail: how monads ease the modification of a simple
evaluator; how monads act as the basis of a datatype of arrays
subject to inplace update; and how monads can be used to build
parsers.",
paper = "Wadl95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Freeman, Tim}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Free91,
author = "Freeman, Tim and Pfenning, Frank",
title = {{Refinement Types for ML}},
booktitle = "ACM SIGPLAN PLDI'91",
year = "1991",
link = "\url{http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}",
abstract =
"We describe a refinement of ML's type system allowing the
specification of recursively defined subtypes of userdefined
datatypes. The resulting system of {\sl refinement types}
preserves desirable properties of ML such as decidability of type
inference, while at the same time allowing more errors to be
detected at compiletime. The type system combines abstract
interpretation with ideas from the intersection type discipline,
but remains closely tied to ML in that refinement types are given
only to programs which are already welltyped in ML.",
paper = "Free91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@misc{Zeil16,
author = "Zeilberger, Noam",
title = {{Towards a Mathematical Science of Programming}},
year = "2016"
}
\end{chunk}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@inproceedings{Zeil16a,
author = "Zeilberger, Noam",
title = {{Principles of Type Refinement}},
booktitle = "OPLSS 2106",
link = "\url{http://noamz.org/oplss16/refinementsnotes.pdf}",
year = "2016",
paper = "Zeil16a.pdf"
}
\end{chunk}
\index{McCarthy, John}
\begin{chunk}{axiom.bib}
@incollection{Mcca63,
author = "McCarthy, John",
title = {{A Basis for a Mathematical Theory of Computation}},
booktitle = "Computer Programming and Formal Systems",
year = "1963",
paper = "Mcca63.pdf"
}
\end{chunk}
\index{Scott, Dana S.}
\index{Strachey, Christopher}
\begin{chunk}{axiom.bib}
@article{Scot71,
author = "Scott, Dana S. and Strachey, C.",
title = {{Towards a Mathematical Semantics for Computer Languages}},
journal = "Proc. Symp. on Computers and Automata",
volume = "21",
year = "1971",
abstract =
"Compilers for highlevel languages are generally constructed to
give a complete translation of the programs into machine
lanugage. As machines merely juggle bit patterns, the concepts of
the original language may be lost or at least obscured during this
passage. The purpose of a mathematical semantics is to give a
correct and meaningful correspondence between programs and
mathematical entities in a way that is entirely independent of an
implementation. This plan is illustrated in a very elementary
method with the usual idea of state transformations. The next
section shows why the mathematics of functions has to be modified
to accommodate recursive commands. Section 3 explains the
modification. Section 4 introduces the environments for handling
variables and identifiers and shows how the semantical equations
define equivalence of programs. Section 5 gives an exposition of
the new type of mathematical function spaces that are required for
the semantics of procedures when these are allowed in assignment
statements. The conclusion traces some of the background of the
project and points the way to future work.",
paper = "Scot71.pdf"
}
\end{chunk}
\index{Mellies, PaulAndre}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@inproceedings{Mell15,
author = "Mellies, PaulAndre and Zeilberger, Noam",
title = {{Functors are Type Refinement Systems}},
booktitle = "POPL'15",
publisher = "ACM",
year = "2015",
abstract =
"The standard reading of type theory through the lens of category
theory is based on the idea of viewing a type system as a category
of welltyped terms. We propose a basic revision of this reading;
rather than interpreting type systems as categories, we describe
them as {\sl functors} from a category of typing derivations to a
category of underlying terms. Then, turning this around, we
explain how in fact {\sl any} functor gives rise to a generalized
type system, with an abstract notion of type judgment, typing
derivations and typing rules. This leads to a purely categorical
reformulation of various natural classes of type systems as
natural classes of functors.
The main purpose of this paper is to describe the general
framework (which can also be seen as providing a categorical
analysis of {\sl refinement types}, and to present a few
applications. As a larger case study, we revisit Reynold's paper
on ``The Meaning of Types'' (2000), showing how the paper's main
results may be reconstructed along these lines.",
paper = "Mell15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Shields, Mark}
\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@inproceedings{Shie02,
author = "Shields, Mark and Jones, Simon Peyton",
title = {{FirstClass Modules for Haskell}},
booktitle = "9th Int. Conf. on Foundations of ObjectOriented Languages",
pages = "2840",
year = "2002",
link = "\url{http://www.microsoft.com/enus/research/wpcontent/uploads/2016/02/first_class_modules.pdf}",
abstract =
"Though Haskell's module language is quite weak, its core language
is highly expressive. Indeed, it is tantalisingly close to being
able to express much of the structure traditionally delegated to a
separate module language. However, the encoding are awkward, and
some situations can't be encoded at all.
In this paper we refine Haskell's core language to support
{\sl firstclass modules} with many of the features of MLstyle
modules. Our proposal cleanly encodes signatures, structures and
functors with the appropriate type abstraction and type sharing,
and supports recursive modules. All of these features work across
compilation units, and interact harmoniously with Haskell's class
system. Coupled with support for staged computation, we believe
our proposal would be an elegant approach to runtime dynamic
linking of structured code.
Our work builds directly upon Jones' work on parameterised
signatures, Odersky and L\"aufer's system of higherranked type
annotations, Russo's semantics of ML modules using ordinary
existential and universal quantifications, and Odersky and
Zenger's work on nested types. We motivate the system by examples,
and include a more formal presentation in the appendix.",
paper = "Shie02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dijkstra, E.W.}
\begin{chunk}{axiom.bib}
@misc{Dijk71,
author = "Dijkstra, E.W.",
title = {{A Short Introduction to the Art of Programming}},
comment = "EWD316",
year = "1971",
paper = "Dijk71.pdf"
}
\end{chunk}
\index{Steele, Guy Lewis}
\index{Sussman, Gerald Jay}
\begin{chunk}{axiom.bib}
@techreport{Stee78a,
author = "Steele, Guy Lewis and Sussman, Gerald Jay",
title = {{The Art of the Interpreter or, The Modularity Complex (Parts
Zero, One, and Two}},
type = "technical report",
institution = "MIT AI Lab",
number = "AIM453",
year = "1978",
abstract =
"We examine the effects of various language design decisions on
the programming styles available to a user of the language, with
particular emphasis on the ability to incrementally construct
modular systems. At each step we exhibit an interactive
metacircular interpreter for the language under consideration.
We explore the consequences of various variable binding
disciplines and the introduction of side effects. We find that
dynamic scoping is unsuitable for constructing procedural
abstractions, but has another role as an agent of modularity,
being a structured form of side effect. More general side effects
are also found to be necessary to promote modular style. We find
that the notion of side effect and the notion of equality (object
identity) are mutually constraining: to define one is to define
the other.
The interpreters we exhibit are all written in a simple dialect of
LISP, and all implement LISPlike languages. A subset of these
interpreters constitute a partial historical reconstruction of the
actual evolution of LISP.",
paper = "Stee78a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
\index{Findler, Robert Bruce}
\begin{chunk}{axiom.bib}
@inproceedings{Wadl07a,
author = "Wadler, Philip and Findler, Robert Bruce",
title = {{WellTyped Programs Can't Be Blamed}},
booktitle = "Workshop on Scheme and Functional Programming",
year = "2007",,
pages = "1526",
abstract =
"We show how {\sl contracts} with blame fit naturally with recent
work on {\sl hybrid types} and {\sl gradual types}. Unlike hybrid
types or gradual types, we require casts in the source code, in
order to indicate where type errors may occur. Two (perhaps
surprising) aspects of our approach are that refined types can
provide useful static guarantees even in the absence of a theorem
prover, and that type {\sl dynamic} should not be regarded as a
supertype of all other types. We factor the wellknown notion of
subtyping into new notions of positive and negative subtyping, and
use these to characterise where positive and negative blame may
arise. Our approach sharpens and clarifies some recent results in
the literature.",
paper = "Wadl07a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Michaelson, Greg}
\begin{chunk}{axiom.bib}
@book{Mich11,
author = "Michaelson, Greg",
title = {{Functional Programming Through Lambda Calculus}},
year = "2011",
publisher = "Dover",
isbn = "9780486478838"
}
\end{chunk}

books/axiom.bib  1167 +++++++++++++++++++++++++++++++
books/bookvolbib.pamphlet  1435 +++++++++++++++++++++++++++++++++++++++
changelog  2 +
patch  1392 +++++++++++++++++++++++++++++++++++++
src/axiomwebsite/patches.html  2 +
5 files changed, 3948 insertions(+), 50 deletions()
diff git a/books/axiom.bib b/books/axiom.bib
index c12b08c..6ffadf6 100644
 a/books/axiom.bib
+++ b/books/axiom.bib
@@ 1298,7 +1298,8 @@ paper = "Brea89.pdf"
volume = "146",
year = "1969",
pages = "2960",
 paper = "Hind69.pdf, printed"
+ paper = "Hind69.pdf",
+ keywords = "printed"
}
@article{Hodg95,
@@ 1480,18 +1481,6 @@ paper = "Brea89.pdf"
paper = "Joua91.pdf"
}
@article{Kaes92,
 author = "Kaes, Stefan",
 title = {{Type Inference in the Presence of Overloading, Subtyping, and
 Recursive Types}},
 journal = "LISP Pointers",
 volume = "V",
 number = "1",
 pages = "193204",
 year = "1992",
 paper = "Kaes92.pdf"
}

@incollection{Kalt83a,
author = "Kaltofen, E.",
title = {{Factorization of Polynomials}},
@@ 2329,7 +2318,8 @@ paper = "Brea89.pdf"
can be defined. In this paper that the standard settheoretic model of
the ordinary typed lambda calculus cannot be extended to model this
language extension.",
 paper = "Reyn84.pdf"
+ paper = "Reyn84.pdf",
+ keywords = "printed"
}
@inproceedings{Reyn91,
@@ 2529,6 +2519,21 @@ paper = "Brea89.pdf"
paper = "Stra00.pdf"
}
+@article{Stra00a,
+ author = "Strachey, Christopher and Wadsworth, Christopher",
+ title = {{Continuations: A Mathematical Semantics for Handling Full Jumps}},
+ journal = "HigherOrder and Symbolic Computation",
+ volume = "13",
+ pages = "135152",
+ year = "2000",
+ abstract =
+ "This paper describes a method of giving the mathematical
+ semantics of programming languages which include the most general
+ form of jumps.",
+ paper = "Stra00a.pdf",
+ keywords = "printed"
+}
+
@book{Stro95,
author = "Stroustrup, Bjarne",
title = {{The C++ Programming Language (2nd Edition)}},
@@ 3530,6 +3535,25 @@ paper = "Brea89.pdf"
paper = "Wied86.pdf"
}
+@article{Bord82,
+ author = "Bordoni, L. and Colagrossi, A. and Miola, A.",
+ title = {{Linear Algebraic Approach for Computing Polynomial Resultant}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "231236",
+ year = "1982",
+ abstract =
+ "This paper presents a linear algebraic method for computing the
+ resultant of two polynomials. This method is based on the
+ computation of a determinant of order equal to the minimum of the
+ degrees of the two given polynomials. This method turns out to be
+ preferable to other known linear algebraic methods both from a
+ computational point of view and for a total generality respect to
+ the class of the given polynomials. Some relationships of this
+ method with the polynomial pseudoremainder operation are discussed.",
+ paper = "Bord82.pdf"
+}
+
@article{Belo83,
author = "Belovari, G.",
title =
@@ 3869,6 +3893,73 @@ paper = "Brea89.pdf"
paper = "Kemp81.pdf"
}
+@article{Laza82,
+ author = "Lazard, Daniel",
+ title = {{Commutative Algebra and Computer Algebra}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "4048",
+ year = "1982",
+ abstract =
+ "It is well known that computer algebra deals with commutative rings
+ such as the integers, the rationals, the modular integers and
+ polynomials over such rings.
+
+ On the other hand, commutative algebra is that part of mathematics
+ which studies commutative rings.
+
+ Our aim is to make this link more precise. It will appear that most of
+ the constructions which appear in classical commutative algebra can be
+ done explicitly in finite time. However, there are
+ exceptions. Moreover, most of the known algorithms are intractable :
+ The problems are generally exponential by themselves, but many
+ algorithms are overoverexponential. It needs a lot of work to find
+ better methods, and to implement them, with the final hope to open a
+ computation domain larger than this one which is possible by hand.
+
+ To illustrate the limits and the possibilities of computerizing
+ commutative algebra, we describe an algorithm which tests the
+ primality of a polynomial ideal and we give an example of a single
+ algebraic extension of fields $K\subset L$ wuch that there exists an
+ algorithm of factorization for the polynomials over $k$, but not for
+ the polynomials over $L$.",
+ paper = "Laza82.pdf"
+}
+
+@article{Laza82a,
+ author = "Lazard, Daniel",
+ title = {{On Polynomial Factorization}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "144157",
+ year = "1982",
+ abstract =
+ "We present new algorithms for factoring univariate polynomials
+ over finite fields. They are variants of the algorithms of Camion
+ and CantorZassenhaus and differ from them essentially by
+ computing the primitive idempotents of the algebra $K[X]/f$ before
+ factoring $f$.
+
+ These algorithms are probabilistic in the following sense. The
+ time of computation depends on random choices, but the validity of
+ the result does not depend on them. So, worst case complexity,
+ being infinite, is meaningless and we compute average
+ complexity. It appears that our and CantorZassenhaus algorithms
+ have the same asymptotic complexity and they are the best
+ algorithms actuall known; with elementary multiplication and GCD
+ computation, CantorZassenhaus algorithm is always bettern than
+ ours; with fast multiplication and GCD, it seems that ours is
+ better, but this fact is not yet proven.
+
+ Finally, for factoring polynomials over the integers, it seems
+ that the best strategy consists in choosing prime moduli as big as
+ possible in the range where the time of the multiplication does
+ not depend on the size of the factors (machine word size). An
+ accurate computation of the involved constants would be needed for
+ proving this estimation.",
+ paper = "Laza82a.pdf"
+}
+
@article{Loos72a,
author = "Loos, Rudiger",
title = {{Analytic Treatment of Three Similar Fredholm Integral Equations
@@ 3899,6 +3990,25 @@ paper = "Brea89.pdf"
paper = "Norf82.pdf"
}
+@article{Padg82,
+ author = "Padget, J.A.",
+ title = {{Escaping from Intermediate Expression Swell: A Continuing Saga}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "256262",
+ year = "1982",
+ abstract =
+ "The notion of a closed continuation is introduced, is presented,
+ coroutines using function call and return based on this concept, are
+ applications and a functional dialect of LISP shown to be merely a
+ more general form of for coroutines in algebraic simplification and
+ are suggested, by extension function. expression Potential evaluation
+ and a specific example of their use is given in a novel attack on the
+ phenomenon of intermediate expression swell in polynomial
+ multiplication.",
+ paper = "Padg82.pdf"
+}
+
@article{Plat09,
author = "Platzer, Andre and Quesel, JanDavid and Rummer, Philipp",
title = {{Real World Verification}},
@@ 4606,6 +4716,16 @@ paper = "Brea89.pdf"
paper = "Free88.pdf"
}
+@article{Norm82,
+ author = "Norman, Arthur",
+ title = {{The Development of a VectorBased Algebra System}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "237248",
+ year = "1982",
+ paper = "Norm82.pdf"
+}
+
@InCollection{Kalt10a,
author = "Kaltofen, Erich L.",
title = {{The ``Seven Dwarfs'' of Symbolic Computation}},
@@ 7676,7 +7796,8 @@ paper = "Brea89.pdf"
proof assistant, a tool for machinechecked mathematical theorem
proving; and formal logical reasoning about the correctness of
programs.",
 paper = "Chli17.pdf, printed"
+ paper = "Chli17.pdf",
+ keywords = "printed"
}
@article{Coen04,
@@ 8981,7 +9102,8 @@ paper = "Brea89.pdf"
several principles (called search principles) which are applicable to
the design of efficient proofprocedures employing resolution as the
basic logical process.",
 paper = "Robi65.pdf, printed"
+ paper = "Robi65.pdf",
+ keywords = "printed"
}
@article{Ruzi89,
@@ 9295,6 +9417,29 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Bals91,
+ author = "Balsters, Herman and Fokkinga, Maarten M.",
+ title = {{Subtyping can have a simple semantics}},
+ journal = "Theoretical Computer Science",
+ volume = "87",
+ pages = "8196",
+ year = "1991",
+ abstract =
+ "Consider a first order typed language, with semantics
+ $\llbracket~\rrbracket$ for expressions and types. Adding
+ subtyping means that a partial order $\le$ on types is defined
+ and that the typing rules are extended to the effect that
+ expression $e$ has type $\tau$ whenever $e$ has type $\sigma$ and
+ $\sigma \le \tau$. We show how to adapt the semantics
+ $\llbracket~\rrbracket$ in a {\sl simple set theoretic way},
+ obtaining a semantics $\llbracket~\rrbracket$ that satisfies, in
+ addition to some obvious requirements, also the property
+ $\llbracket\sigma\rrbracket \subseteq $\llbracket\tau\rrbracket$,
+ whenever $\sigma \le \tau$.",
+ paper = "Bals91.pdf",
+ keywords = "printed"
+}
+
@techreport{Barr96a,
author = "Barras, Bruno",
title = {{Coq en Coq}},
@@ 9386,6 +9531,15 @@ paper = "Brea89.pdf"
paper = "Boye75.pdf"
}
+@book{Brod94,
+ author = "Broda, Krysia and Eisenbach, Susan and Khoshnevisan, Hessam
+ and Vickers, Steve",
+ title = {{Reasoned Programming}},
+ publisher = "Imperial College",
+ year = "1994",
+ paper = "Brod94.pdf"
+}
+
@inproceedings{Cald97,
author = "Caldwell, James L.",
title = {{Moving proofsasprograms into practice}},
@@ 9410,6 +9564,41 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Card84,
+ author = "Cardelli, Luca",
+ title = {{A Semantics of Multiple Inheritance}},
+ journal = "LNCS",
+ volume = "173",
+ year = "1984",
+ paper = "Card84.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Care07,
+ author = "Carette, Jacques and Farmer, William M. and Sorge, Volker",
+ title = {{A Rational Reconstruction of a System for Experimental
+ Mathematics}},
+ booktitle = "14th Workshop on Automated Reasoning",
+ publisher = "unknown",
+ year = "2007",
+ abstract =
+ "In previous papers we described the implementation of a system
+ which combines mathematical object generation, transformation and
+ filtering, conjecture generation, proving and disproving for
+ mathematical discovery in nonassociative algebra. While the system
+ has generated novel, fully verified theorems, their construction
+ involved a lot of ad hoc communication between disparate systems. In
+ this paper we carefully reconstruct a specification of a subprocess
+ of the original system in a framework for trustable communication
+ between mathematics systems put forth by us. It employs the concept
+ of biform theories that enables the combined formalisation of the
+ axiomatic and algorithmic theories behind the generation
+ process. This allows us to gain a much better understanding of the
+ original system, and exposes clear generalisation opportunities.",
+ paper = "Care07.pdf",
+ keywords = "printed"
+}
+
@phdthesis{Cart76,
author = "Cartwright, Robert",
title = {{A Practical Formal Semantic Definition and Verification
@@ 9501,6 +9690,54 @@ paper = "Brea89.pdf"
paper = "Clin72.pdf"
}
+@inproceedings{Crar99,
+ author = "Crary, Karl and Harper, Robert and Puri, Sidd",
+ title = {{What is a Recursive Module}},
+ booktitle = "Conf. on Programming Language Design and Implementation",
+ year = "1999",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/1999/recmod/recmod.dvi}",
+ abstract =
+ "A hierarchical module system is an effective tool for structuring
+ large programs. Strictly hierarchical module systems impose an
+ acyclic ordering on import dependencies among program units. This
+ can impede modular programming by forcing mutuallydependent
+ components to be consolidated into a single module. Recently there
+ have been several proposals for module systems that admit cyclic
+ dependencies, but it is not clear how these proposals relate to
+ one another, nor how one mught integrate them into an expressive
+ module system such as that of ML.
+
+ To address this question we provide a typetheoretic analysis of
+ the notion of a recursive module in the context of a
+ ``phasedistinction'' formalism for higherorder module
+ systems. We extend this calculus with a recursive module mechanism
+ and a new form of signature, called a {\sl recurslively dependent
+ signature}, to support the definition of recursive modules. These
+ extensions are justified by an interpretation in terms of more
+ primitive language constructs. This interpretation may also serve
+ as a guide for implementation.",
+ paper = "Crar99.pdf",
+ keywords = "printed"
+}
+
+@techreport{Crar02,
+ author = "Crary, Karl",
+ title = {{Toward a Foundational Typed Assembly Language}},
+ institution = "Carnegie Mellon University",
+ number = "CMUCS02196",
+ year = "2002",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/talt/talttr.pdf}",
+ abstract =
+ "We present the design of a typed assembly language called TALT that
+ supports heterogeneous tuples, disjoint sums, arrays, and a general
+ account of addressing modes. TALT also implements the von Neumann
+ model in which programs are stored in memory, and supports relative
+ addressing. Type safety for execution and for garbage collection are
+ shown by machinecheckable proofs. TALT is the first formalized typed
+ assembly language to provide any of these features.",
+ paper = "Crar02.pdf"
+}
+
@misc{Crar08,
author = "Crary, Karl and Sarkar, Susmit",
title = {{Foundational Certified Code in a Metalogical Framework}},
@@ 9629,6 +9866,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Dijk71,
+ author = "Dijkstra, E.W.",
+ title = {{A Short Introduction to the Art of Programming}},
+ comment = "EWD316",
+ year = "1971",
+ paper = "Dijk71.pdf"
+}
+
@incollection{Dijk72a,
author = "Dijkstra, E.W.",
title = {{Notes on Structured Programming}},
@@ 9726,6 +9971,77 @@ paper = "Brea89.pdf"
paper = "Down16.pdf"
}
+@techreport{Drey02,
+ author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
+ title = {{A Type System for HigherOrder Modules (Expanded Version)}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMUCS02122R",
+ year = "2002",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thomstr.pdf}",
+ abstract =
+ "We present a type theory for higherorder modules that accounts for
+ many central issues in module system design, including translucency,
+ applicativity , generativity , and modules as firstclass values.
+ Our type system harmonizes design elements from previous work,
+ resulting in a simple, economical account of modular programming. The
+ main unifying principle is the treatment of abstraction mechanisms
+ as computational effects. Our language is the first to provide a
+ complete and practical formalization of all of these critical issues
+ in module system design.",
+ paper = "Drey02.pdf"
+}
+
+@article{Drey03,
+ author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
+ title = {{A Type System for HigherOrder Modules}},
+ journal = "ACM SIGPLAN Notices",
+ volume = "38",
+ number = "1",
+ year = "2003",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms.pdf}",
+ abstract =
+ "We present a type theory for higherorder modules that accounts for
+ many central issues in module system design, including translucency,
+ applicativity , generativity , and modules as firstclass values.
+ Our type system harmonizes design elements from previous work,
+ resulting in a simple, economical account of modular programming. The
+ main unifying principle is the treatment of abstraction mechanisms
+ as computational effects. Our language is the first to provide a
+ complete and practical formalization of all of these critical issues
+ in module system design.",
+ paper = "Drey03.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Drey07,
+ author = "Dreyer, Derek and Harper, Robert and Chakravarty, Manuel M.T.
+ and Keller, Gabriele",
+ title = {{Modlular Type Classes}},
+ booktitle = "Proc. POPL'07",
+ pages = "6370",
+ year = "2007",
+ abstract =
+ "ML modules and Haskell type classes have proven to be highly
+ effective tools for program structuring. Modules emphasize explicit
+ configuration of program components and the use of data abstraction.
+ Type classes emphasize implicit program construction and ad hoc
+ polymorphism. In this paper , we show how the implicitlytyped
+ style of type class programming may be supported within the framework
+ of an explicitlytyped module language by viewing type classes as a
+ particular mode of use of modules. This view offers a harmonious
+ integration of modules and type classes, where type class features,
+ such as class hierarchies and associated types, arise naturally as
+ uses of existing modulelanguage constructs, such as module
+ hierarchies and type components. In addition, programmers have
+ explicit control over which type class instances are available for
+ use by type inference in a given scope. We formalize our approach as
+ a HarperStonestyle elaboration relation, and provide a sound type
+ inference algorithm as a guide to implementation.",
+ paper = "Drey07.pdf",
+ keywords = "printed"
+}
+
@article{Dura14,
author = "Duran, Antonio J. and Perez, Mario and Varona, Juan L.",
title = {{The Misfortunes of a Trio of Mathematicians Using Computer
@@ 9740,6 +10056,116 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Farm00,
+ author = "Farmer, William M.",
+ title = {{A Proposal for the Development of an Interactive
+ Mathematics Laboratory for Mathematics Eductions}},
+ booktitle = "Workshop on Deductions Systems for Mathematics Eduation",
+ pages = "2025",
+ year = "2000",
+ paper = "Farm00.pdf",
+ keywords = "axiomref, printed"
+}
+
+@inproceedings{Farm00a,
+ author = "Farmer, William M. and Mohrenschildt, Martin v.",
+ title = {{Transformers for Symbolic Computation and Formal Deduction}},
+ booktitle = "CADE17",
+ pages = "3645",
+ year = "2000",
+ abstract =
+ "A transformer is a function that maps expressions to expressions.
+ Many transformational operators — such as expression evaluators and
+ simplifiers, rewrite rules, rules of inference, and decision
+ procedures — can be represented by transformers. Computations and
+ deductions can be formed by applying sound transformers in
+ sequence. This paper introduces machinery for defining sound
+ transformers in the context of an axiomatic theory in a formal
+ logic. The paper is intended to be a first step in a development of an
+ integrated framework for symbolic computation and formal deduction.",
+ paper = "Farm00a.pdf",
+ keywords = "printed"
+}
+
+@article{Farm04,
+ author = "Farmer, William M.",
+ title = {{MKM A New Interdisciplinary Field of Research}},
+ journal = "SIGSAM",
+ volume = "38",
+ pages = "4752",
+ year = "2004",
+ abstract =
+ "Mathematical Knowledge Management (MKM) is a new interdisciplinary
+ field of research in the intersection of mathematics, computer
+ science, library science, and scientific publishing. Its objective is
+ to develop new and better ways of managing mathematical knowledge
+ using sophisticated software tools. Its grand challenge is to create
+ a universal digital mathematics library (UDML), accessible via the
+ WorldWide Web, that contains essentially all mathematical knowledge
+ (intended for the public). The challenges facing MKM are daunting,
+ but a UDML, even just partially constructed, would transform how
+ mathematics is learned and practiced.",
+ paper = "Farm04.pdf",
+ keywords = "printed, axiomref"
+}
+
+@article{Farm07,
+ author = "Farmer, William M.",
+ title = {{Biform Theories in Chiron}},
+ journal = "LNCS",
+ volume = "4573",
+ pages = "6679",
+ year = "2007",
+ abstract =
+ "An axiomatic theory represents mathematical knowledge declaratively
+ as a set of axioms. An algorithmic theory represents mathematical
+ knowledge procedurally as a set of algorithms. A biform theory is
+ simultaneously an axiomatic theory and an algorithmic theory. It
+ represents mathematical knowledge both declaratively and procedurally.
+ Since the algorithms of algorithmic theories manipulate th e syntax of
+ expressions, biform theories — as well as algorithmic theories — are
+ difficult to formalize in a traditional logic without the means to
+ reason about syntax. Chiron is a derivative of
+ vonNeumannBernaysG̈odel ( NBG ) set theory that is intended to be a
+ practical, generalpurpose logic for mechanizing mathematics. It
+ includes elements of type theory, a scheme for handling undefinedness,
+ and a facility for reasoning about the syntax of expressions. It is an
+ exceptionally wellsuited logic for formalizing biform theories. This
+ paper defines the notion of a biform theory, gives an overview of
+ Chiron, and illustrates how biform theories can be formalized in Chiron.",
+ paper = "Farm07.pdf",
+ keywords = "printed"
+}
+
+@misc{Farm14,
+ author = "Farmer, William M. and Larjani, Pouya",
+ title = {{Frameworks for Reasoning about Syntax that Utilize
+ Quotation and Evaluation}},
+ links = "\url{http://imps.mcmaster.ca/doc/syntax.pdf}",
+ year = "2014",
+ abstract =
+ "It is often useful, if not necessary, to reason about the syntactic
+ structure of an expression in an interpreted language (i.e., a
+ language with a semantics). This paper introduces a mathematical
+ structure called a syntax framework that is intended to be an abstract
+ model of a system for reasoning about the syntax of an interpreted
+ language. Like many concrete systems for reasoning about syntax, a
+ syntax framework contains a mapping of expressions in the
+ interpreted language to syntactic values that represent the syntactic
+ structures of the expressions; a language for reasoning about the
+ syntactic values; a mechanism called quotation to refer to the
+ syntactic value of an expression; and a mechanism called evaluation to
+ refer to the value of the expression represented by a syntactic value.
+ A syntax framework provides a basis for integrating reasoning about
+ the syntax of the expressions with reasoning about what the
+ expressions mean. The notion of a syntax framework is used to discuss
+ how quotation and evaluation can be built into a language and to
+ define what quasiquotation is. Several examples of syntax frameworks
+ are presented.",
+ paper = "Farm14.pdf",
+ keywords = "printed"
+}
+
@article{Fill03,
author = "Filliatre, JeanChristophe",
title = {{Verification of NonFunctional Programs using Interpretations
@@ 9831,6 +10257,26 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Free91,
+ author = "Freeman, Tim and Pfenning, Frank",
+ title = {{Refinement Types for ML}},
+ booktitle = "ACM SIGPLAN PLDI'91",
+ year = "1991",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}",
+ abstract =
+ "We describe a refinement of ML's type system allowing the
+ specification of recursively defined subtypes of userdefined
+ datatypes. The resulting system of {\sl refinement types}
+ preserves desirable properties of ML such as decidability of type
+ inference, while at the same time allowing more errors to be
+ detected at compiletime. The type system combines abstract
+ interpretation with ideas from the intersection type discipline,
+ but remains closely tied to ML in that refinement types are given
+ only to programs which are already welltyped in ML.",
+ paper = "Free91.pdf",
+ keywords = "printed"
+}
+
@article{Foxx03,
author = "Fox, Anthony",
title = {{Formal Specification and Verification of ARM6}},
@@ 9858,6 +10304,34 @@ paper = "Brea89.pdf"
isbn = "0201416085"
}
+@techreport{Frie76,
+ author = "Friedman, Daniel P. and Wise, David S.",
+ title = {{CONS should not Evaluate its Arguments}},
+ institution = "Indiana University",
+ number = "TR44",
+ year = "1976",
+ abstract =
+ "The constructor function which allocates and fills records in
+ recursive, sideeffectfree procedural languages is redefined to be a
+ nonstrict (Vuillemin 1974) elementary operation. Instead of
+ evaluating its arguments, it builds suspensions of them which are not
+ coerced until the suspensions is accessed by strict elementary
+ function. The resulting evalutation procedures are strictly more
+ powerful than existing schemes for languages such as LISP. The main
+ results are that Landin's streams are subsumed into McCarthy's LISP
+ merely by the redefinition of elementar functions, that invocations of
+ LISP's evaluator can be minimized by redefining the elemntary
+ functions without redefining the interpreter, and as a strong
+ conjecture, that redefining the elementary functions yields the least
+ fixedpoint semantics for McCarthy's evalution scheme. This new
+ insight into the role of construction functions will do much to ease
+ the interface between recursive programmers and iterative programmers,
+ as well as the interface between programmers and data structure
+ designers.",
+ paper = "Frie16.pdf",
+ keywords = "printed"
+}
+
@misc{Giar95,
author = "Girard, JeanYves",
title = {{Linear Logic: Its Syntax and Semantics}},
@@ 9969,6 +10443,27 @@ paper = "Brea89.pdf"
paper = "Gord06.pdf"
}
+@misc{Gros15,
+ author = "Gross, Jason and Chlipala, Adam",
+ title = {{Parsing Parses}},
+ link = "\url{https://people.csail.mit.edu/jgross/personalwebsite/papers/2015parsingparsetrees.pdf}",
+ year = "2015",
+ abstract =
+ "We present a functional parser for arbitrary contextfree grammars,
+ together with soundness and completeness proofs, all inside Coq. By
+ exposing the parser in the right way with parametric polymorphism and
+ dependent types, we are able to use the parser to prove its own
+ soundness, and, with a little help from relational parametricity,
+ prove its own completeness, too. Of particular interest is one strange
+ instantiation of the type and value parameters: by parsing parse trees
+ instead of strings, we convince the parser to generate its own
+ completeness proof. We conclude with highlights of our experiences
+ iterating through several versions of the Coq development, and some
+ general lessons about dependently typed programming.",
+ paper = "Gros15.pdf",
+ keywords = "printed"
+}
+
@article{Gutt95,
author = "Guttman, Joshua D. and Ramsdell, John D. and Wand, Mitchell",
title = {{VLISP: A Verified Implementation of Scheme}},
@@ 10099,6 +10594,56 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Kaes88,
+ author = "Kaes, Stefan",
+ title = {{Parametric Overloading in Polymorphic Programming Languages}},
+ journal = "LNCS",
+ volume = "300",
+ pages = "131144",
+ year = "1988",
+ abstract =
+ "The introduction of unrestricted overloading in languagues with type
+ systems based on implicit parametric potymorphism generally destroys
+ the principal type property: namely that the type of every expression
+ can uniformly be represented by a single type expression over some set
+ of type variables. As a consequence, type inference in the presence
+ of unrestricted overloading can become a NPcomplete problem. In
+ this paper we define the concept of parametric overloading as a
+ restricted form of overloading which is easily combined with
+ parametric polymorphism. Parametric overloading preserves the
+ principal type property, thereby allowing the design of efficient type
+ inference algorithms. We present sound type deduction systems, both
+ for predefined and programmer defined overloading. Finally we state
+ that parametric overloading can be resolved either statically, at
+ compile time, or dynamically, during program execution.",
+ paper = "Kaes88.pdf",
+ keywords = "printed"
+}
+
+@article{Kaes92,
+ author = "Kaes, Stefan",
+ title = {{Type Inference inthe Presence of Overloading, Subtyping and
+ Recursive Types}},
+ journal = "ACM Lisp Pointers",
+ volume = "5",
+ number = "1",
+ year = "1992",
+ pages = "193204",
+ abstract =
+ "We present a unified approach to type inference in the presence of
+ overloading and coercions based on the concept of {\sl constrained
+ types}. We define a generic inference system, show that subtyping and
+ overloading can be treated as a special instance of this system and
+ develop a simple algorithm to compute principal types. We prove the
+ decidability of type inference for hte class of {\sl decomposable
+ predicates} and deelop a canonical representation for principal types
+ based on {\sl most accurate simplifications} of constraint
+ sets. Finally, we investigate the extension of our techniques to
+ {\sl recursive types}.",
+ paper = "Kaes92.pdf",
+ keywords = "printed"
+}
+
@article{Keim09,
author = "Keimel, Klaus and Plotkin, Gordon D.",
title = {{Predicate Transformers for Extended Probability and
@@ 10130,6 +10675,41 @@ paper = "Brea89.pdf"
paper = "Keim09.pdf"
}
+@phdthesis{Kell13,
+ author = "Keller, C.",
+ title = {{A Matter of Trust: Skeptical Commuication Between Coq and
+ External Provers}},
+ school = "Ecole Polytechnique",
+ year = "2013",
+ link =
+"\url{https://www.lri.fr/~keller/Documentsrecherche/Publications/thesis13.pdf}",
+ abstract =
+ "This thesis studies the cooperation between the Coq proof assistant
+ and external provers through proof witnesses. We concentrate on two
+ different kinds of provers that can return certicates: first, answers
+ coming from SAT and SMT solvers can be checked in Coq to increase both
+ the confidence in these solvers and Coq 's automation; second,
+ theorems established in interactive provers based on HigherOrder
+ Logic can be exported to Coq and checked again, in order to offer the
+ possibility to produce formal developments which mix these two
+ different logical paradigms. It ended up in two software : SMTCoq, a
+ bidirectional cooperation between Coq and SAT/SMT solvers, and
+ HOLLIGHTCOQ, a tool importing HOL Light theorems into Coq.
+
+ For both tools, we took great care to define a modular and efficient
+ architecture, based on three clearly separated ingredients: an
+ embedding of the formalism of the external tool inside Coq which is
+ carefully translated into Coq terms, a certified checker to establish
+ the proofs using the certicates and a Ocaml preprocessor to transform
+ proof witnesses coming from different provers into a generic
+ certificate. This division allows that a change in the format of proof
+ witnesses only affects the preprocessor, but no proved Coq code.
+ Another fundamental component for efficiency and modularity is
+ computational reflection, which exploits the computational power of
+ Coq to establish generic and small proofs based on the certicates.",
+ paper = "Kell13.pdf"
+}
+
@inproceedings{Kran86,
author = "Kranz, David and Kelsey, Richard and Rees, Jonathan and
Hudak, Paul and Philbin, James and Adams, Norman",
@@ 10207,6 +10787,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@incollection{Mcca63,
+ author = "McCarthy, John",
+ title = {{A Basis for a Mathematical Theory of Computation}},
+ booktitle = "Computer Programming and Formal Systems",
+ year = "1963",
+ paper = "Mcca63.pdf"
+}
+
@inproceedings{Mcdo97,
author = "McDowell, Raymond and Miller, Dale",
title = {{A Logic for Reasoning with HigherOrder Abstract Syntax}},
@@ 10287,6 +10875,66 @@ paper = "Brea89.pdf"
paper = "Mcdo02.pdf"
}
+@book{Mich11,
+ author = "Michaelson, Greg",
+ title = {{Functional Programming Through Lambda Calculus}},
+ year = "2011",
+ publisher = "Dover",
+ isbn = "9780486478838"
+}
+
+@article{Mili09,
+ author = "Mili, Ali and Aharon, Shir and Nadkarni, Chaitanya",
+ title = {{Mathematics for Reasoning about Loop Functions}},
+ journal = "Science of Computer Programming",
+ volume = "79",
+ year = "2009",
+ pages = "9891020",
+ abstract =
+ "The criticality of modern software applications, the pervasiveness of
+ malicious code concerns, the emergence of thirdparty software
+ development, and the preponderance of program inspection as a quality
+ assurance method all place a great premium on the ability to analyze
+ programs and derive their function in all circumstances of use and all
+ its functional detail. For Clike programming languages, one of the
+ most challenging tasks in this endeavor is the derivation of loop
+ functions. In this paper, we outline the premises of our approach to
+ this problem, present some mathematical results, and discuss how these
+ results can be used as a basis for building an automated tool that
+ derives the function of while loops under some conditions.",
+ paper = "Mili09.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Mell15,
+ author = "Mellies, PaulAndre and Zeilberger, Noam",
+ title = {{Functors are Type Refinement Systems}},
+ booktitle = "POPL'15",
+ publisher = "ACM",
+ year = "2015",
+ abstract =
+ "The standard reading of type theory through the lens of category
+ theory is based on the idea of viewing a type system as a category
+ of welltyped terms. We propose a basic revision of this reading;
+ rather than interpreting type systems as categories, we describe
+ them as {\sl functors} from a category of typing derivations to a
+ category of underlying terms. Then, turning this around, we
+ explain how in fact {\sl any} functor gives rise to a generalized
+ type system, with an abstract notion of type judgment, typing
+ derivations and typing rules. This leads to a purely categorical
+ reformulation of various natural classes of type systems as
+ natural classes of functors.
+
+ The main purpose of this paper is to describe the general
+ framework (which can also be seen as providing a categorical
+ analysis of {\sl refinement types}, and to present a few
+ applications. As a larger case study, we revisit Reynold's paper
+ on ``The Meaning of Types'' (2000), showing how the paper's main
+ results may be reconstructed along these lines.",
+ paper = "Mell15.pdf",
+ keywords = "printed"
+}
+
@book{Morg98,
author = "Morgan, Carroll",
title = {{Programming from Specifcations, 2nd Ed.}},
@@ 10327,6 +10975,33 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Moss84,
+ author = "Mosses, Peter",
+ title = {{A Basic Abstract Semantics Algebra}},
+ journal = "LNCS",
+ volume = "173",
+ year = "1984",
+ abstract =
+ "It seems that there are some pragmatic advantages in using Abstract
+ Semantic Algebras (ASAs) instead of Xnotation in denotational
+ semantics. The values of ASAs correspond to 'actions' (or
+ 'processes'), and the operators correspond to primitive ways of
+ combining actions. There are simple ASAs for the various independent
+ 'facets' of actions: a functional ASA for dataflow, an imperative ASA
+ for assignments, a declarative ASA for bindings, etc. The aim is to
+ obtain general ASAs by systematic combination of these simple ASAs.
+
+ Here we specify a basic ASA that captures the common features of the
+ functional, imperative and declarative ASAs  and highlights their
+ differences. We discuss the correctness of ASA specifications, and
+ sketch the proof of the consistency and (limiting) completeness of the
+ functional ASA, relative to a simple model.
+
+ Some familiarity with denotational semantics and algebraic
+ specifications is assumed.",
+ paper = "Moss84.pdf"
+}
+
@article{Myre14,
author = "Myreen, Magnus O. and Davis, Jared",
title = {{The Reflective Milawa Theorem Prover is Sound}},
@@ 10581,6 +11256,28 @@ paper = "Brea89.pdf"
paper = "Phil92.pdf"
}
+@misc{Piro08,
+ author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
+ title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
+ year = "2008",
+ link = "\urlhttp://www.risc.jku.at/publications/download/risc_3442/MathAgentsforSFB2008031012h00.pdf{}",
+ abstract =
+ "This report reviews the literature relevant for the research project
+ 'Math−Agents: Mathematical Journals as Reasoning Agents' proposed by
+ Bruno Buchberger as a technology transfer project based on the results
+ of the SFB Project 'Scientific Computing', in particular the project
+ SFB 1302, 'Theorema'. The project aims at computer−supporting the
+ refereeing process of mathematical journals by tools that are mainly
+ based on automated reasoning and also at building up the knowledge
+ archived in mathematical journals in such a way that they can act as
+ interactive and active reasoning agents later on. In this report,
+ we review current mathematical software systems with a focus on the
+ availability of tools that can contribute to the goals of the Math−
+ Agents project.",
+ paper = "Piro08.pdf",
+ keywords = "printed"
+}
+
@phdthesis{Poll94,
author = "Pollack, Robert",
title = {{The Theory of LEGO  A Proof Checker for the Extended Calculus
@@ 10659,6 +11356,25 @@ paper = "Brea89.pdf"
paper = "Remy17.tgz"
}
+@article{Reyn85,
+ author = "Reynolds, J.C.",
+ title = {{Three Approaches to Type Structure}},
+ journal = "LNCS",
+ volume = "185",
+ year = "1985",
+ abstract =
+ "We examine three disparate views of the type structure of
+ programming languages: Milner's type deduction system and polymorphic
+ let construct, the theory of subtypes and generic operators, and
+ the polymorphic or secondorder typed lambda calculus. These
+ approaches are illustrated with a functional language including
+ product, sum and list constructors. The syntactic behavior of types
+ is formalized with type inference rules, bus their semantics is
+ treated intuitively.",
+ paper = "Reyn85.pdf",
+ keywords = "printed"
+}
+
@techreport{Sabr92,
author = "Sabry, Amr and Felleisen, Matthias",
title = {{Reasoning About Programs in ContinuationPassing Style}},
@@ 10682,6 +11398,120 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Sarm17,
+ author = "Sarma, Gopal and Hay, Nick J.",
+ title = {{Robust Computer Algebra, Theorem Proving, and Oracle AI}},
+ journal = "Informatica",
+ volume = "41",
+ number = "3",
+ link = "\url{https://arxiv.org/pdf/1708.02553.pdf}",
+ year = "2017",
+ abstract =
+ "In the context of superintelligent AI systems, the term 'oracle' has
+ two meanings. One refers to modular systems queried for
+ domainspecific tasks. Another usage, referring to a class of systems
+ which may be useful for addressing the value alignment and AI control
+ problems, is a superintelligent AI system that only answers questions.
+ The aim of this manuscript is to survey contemporary research problems
+ related to oracles which align with longterm research goals of AI
+ safety. We examine existing question answering systems and argue that
+ their high degree of architectural heterogeneity makes them poor
+ candidates for rigorous analysis as oracles. On the other hand, we
+ identify computer algebra systems (CASs) as being primitive examples
+ of domainspecific oracles for mathematics and argue that efforts to
+ integrate computer algebra systems with theorem provers, systems which
+ have largely been developed independent of one another, provide a
+ concrete set of problems related to the notion of provable safety that
+ has emerged in the AI safety community. We review approaches to
+ interfacing CASs with theorem provers, describe welldefined
+ architectural deficiencies that have been identified with CASs, and
+ suggest possible lines of research and practical software projects for
+ scientists interested in AI safety.",
+ paper = "Sarm17.pdf",
+ keywords = "printed, axiomref"
+}
+
+@article{Scot71,
+ author = "Scott, Dana S. and Strachey, C.",
+ title = {{Towards a Mathematical Semantics for Computer Languages}},
+ journal = "Proc. Symp. on Computers and Automata",
+ volume = "21",
+ year = "1971",
+ abstract =
+ "Compilers for highlevel languages are generally constructed to
+ give a complete translation of the programs into machine
+ lanugage. As machines merely juggle bit patterns, the concepts of
+ the original language may be lost or at least obscured during this
+ passage. The purpose of a mathematical semantics is to give a
+ correct and meaningful correspondence between programs and
+ mathematical entities in a way that is entirely independent of an
+ implementation. This plan is illustrated in a very elementary
+ method with the usual idea of state transformations. The next
+ section shows why the mathematics of functions has to be modified
+ to accommodate recursive commands. Section 3 explains the
+ modification. Section 4 introduces the environments for handling
+ variables and identifiers and shows how the semantical equations
+ define equivalence of programs. Section 5 gives an exposition of
+ the new type of mathematical function spaces that are required for
+ the semantics of procedures when these are allowed in assignment
+ statements. The conclusion traces some of the background of the
+ project and points the way to future work.",
+ paper = "Scot71.pdf"
+}
+
+@inproceedings{Shie02,
+ author = "Shields, Mark and Jones, Simon Peyton",
+ title = {{FirstClass Modules for Haskell}},
+ booktitle = "9th Int. Conf. on Foundations of ObjectOriented Languages",
+ pages = "2840",
+ year = "2002",
+ link = "\url{http://www.microsoft.com/enus/research/wpcontent/uploads/2016/02/first_class_modules.pdf}",
+ abstract =
+ "Though Haskell's module language is quite weak, its core language
+ is highly expressive. Indeed, it is tantalisingly close to being
+ able to express much of the structure traditionally delegated to a
+ separate module language. However, the encoding are awkward, and
+ some situations can't be encoded at all.
+
+ In this paper we refine Haskell's core language to support
+ {\sl firstclass modules} with many of the features of MLstyle
+ modules. Our proposal cleanly encodes signatures, structures and
+ functors with the appropriate type abstraction and type sharing,
+ and supports recursive modules. All of these features work across
+ compilation units, and interact harmoniously with Haskell's class
+ system. Coupled with support for staged computation, we believe
+ our proposal would be an elegant approach to runtime dynamic
+ linking of structured code.
+
+ Our work builds directly upon Jones' work on parameterised
+ signatures, Odersky and Laufer's system of higherranked type
+ annotations, Russo's semantics of ML modules using ordinary
+ existential and universal quantifications, and Odersky and
+ Zenger's work on nested types. We motivate the system by examples,
+ and include a more formal presentation in the appendix.",
+ paper = "Shie02.pdf",
+ keywords = "printed"
+}
+
+@article{Soko87,
+ author = "Sokolowski, Stefan",
+ title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
+ journal = "Trans. on Programming Languages and Systems",
+ volume = "9",
+ number = "1",
+ pages = "100120",
+ year = "1987",
+ abstract =
+ "This paper presents a natural deduction proof of Hoare's logic
+ carried out by the Edinburgh LCF theorem prover. The emphasis is
+ on the way Hoare's theory is presented to the LCF, which looks
+ very much like an exposition of syntax and semantics to human
+ readers; and on the programmable heuristics (tactics). We also
+ discuss some problems and possible improvements to the LCF.",
+ paper = "Soko87.pdf",
+ keywords = "printed"
+}
+
@techreport{Stee78,
author = "Steele, Guy Lewis",
title = {{RABBIT: A Compiler for SCHEME}},
@@ 10735,6 +11565,31 @@ paper = "Brea89.pdf"
paper = "Stee78.pdf"
}
+@article{Stuc05,
+ author = "Stuckey, Peter J. and Sulzmann, Martin",
+ title = {{A Theory of Overloading}},
+ journal = "ACM Trans. on Programming Languages and Systems",
+ volume = "27",
+ number = "6",
+ pages = "154",
+ year = "2005",
+ abstract =
+ "We present a novel approach to allow for overloading of
+ identifiers in the spirit of type classes. Our approach relies on
+ the combination of the HM(X) type system framework with Constraint
+ Handling Rules (CHRs). CHRs are a declarative language for writing
+ incremental constraint solvers, that provide our scheme with a
+ form of programmable type language. CHRs allow us to precisely
+ describe the relationships among overloaded identifiers. Under
+ some sufficient conditions on the CHRs we achieve decidable type
+ inference and the semantic meaning of programs is unambiguous. Our
+ approach provides a common formal basis for many type class
+ extensions such as multiparameter type classes and functional
+ dependencies.",
+ paper = "Stuc05.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Talp92,
author = "Talpin, JeanPierre and Jouvelot, Pierre",
title = {{The Type and Effect Discipline}},
@@ 10811,6 +11666,24 @@ paper = "Brea89.pdf"
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
+@article{Wadl95,
+ author = "Wadler, Philip",
+ title = {{Monads for Functional Programming}},
+ journal = "LNCS",
+ volume = "925",
+ abstract =
+ "The use of monads to structure functional programs is
+ described. Monads provide a convenient framework for simulating
+ effects found in other languages, such as global state, exception
+ handling, output, or nondeterminism. Three case studies are
+ looked at in detail: how monads ease the modification of a simple
+ evaluator; how monads act as the basis of a datatype of arrays
+ subject to inplace update; and how monads can be used to build
+ parsers.",
+ paper = "Wadl95.pdf",
+ keywords = "printed"
+}
+
@article{Wadl92,
author = "Wadler, Philip",
title = {{Comprehending Monads}},
@@ 10866,6 +11739,77 @@ paper = "Brea89.pdf"
paper = "Wadl07.pdf"
}
+@inproceedings{Wadl07a,
+ author = "Wadler, Philip and Findler, Robert Bruce",
+ title = {{WellTyped Programs Can't Be Blamed}},
+ booktitle = "Workshop on Scheme and Functional Programming",
+ year = "2007",
+ pages = "1526",
+ abstract =
+ "We show how {\sl contracts} with blame fit naturally with recent
+ work on {\sl hybrid types} and {\sl gradual types}. Unlike hybrid
+ types or gradual types, we require casts in the source code, in
+ order to indicate where type errors may occur. Two (perhaps
+ surprising) aspects of our approach are that refined types can
+ provide useful static guarantees even in the absence of a theorem
+ prover, and that type {\sl dynamic} should not be regarded as a
+ supertype of all other types. We factor the wellknown notion of
+ subtyping into new notions of positive and negative subtyping, and
+ use these to characterise where positive and negative blame may
+ arise. Our approach sharpens and clarifies some recent results in
+ the literature.",
+ paper = "Wadl07a.pdf",
+ keywords = "printed"
+}
+
+@phdthesis{Wehr05,
+ author = "Wehr, Stefan",
+ title = {{ML Modules and Haskell Type Classes: A Constructive
+ Comparison}},
+ school = "Albert Ludwigs Universitat",
+ year = "2005",
+ abstract =
+ "Researchers repeatedly observed that the module system of ML and the
+ type class mechanism of Haskell are related. So far, this relationship
+ has received little formal investigation. The work at hand fills this
+ gap: It introduces typepreserving translations from modules to type
+ classes and vice versa, which enable a thorough comparison of the two
+ concepts.
+
+ The source language of the first translation is a subset of
+ Standard ML. The target language is Haskell with common extensions
+ and one new feature, which was deeloped as part of this work. The
+ second translation maps a subset of Haskell 98 to ML, with
+ wellestablished extensions. I prove that the translations
+ preserve type correctness and provide implementations for both.
+
+ Building on the insights obtained from the translations, I present
+ a thorough comparison between ML modules and Haskell type
+ classes. Moreover, I evaluate to what extent the techniques used
+ in the translations can be exploited for modular programming in
+ Haskell and for programming with adhoc polymorphism in ML.",
+ paper = "Wehr05.pdf"
+}
+
+@article{Wehr08,
+ author = "Wehr, Stefan and Chakravarty, Maneul M.T.",
+ title = {{ML Modules and Haskell Type Classes: A Constructive
+ Comparison}},
+ journal = "LNCS",
+ volume = "5356",
+ pages = "188204",
+ year = "2008",
+ abstract =
+ "Researchers repeatedly observed that the module system of ML and the
+ type class mechanism of Haskell are related. So far, this relationship
+ has received little formal investigation. The work at hand fills this
+ gap: It introduces typepreserving translations from modules to type
+ classes and vice versa, which enable a thorough comparison of the two
+ concepts.",
+ paper = "Wehr08.pdf",
+ keywords = "printed"
+}
+
@phdthesis{Zeil09,
author = "Zeilberger, Noam",
title = {{The Logical Basis of Evaluation Order and PatternMatching}},
@@ 10913,6 +11857,21 @@ paper = "Brea89.pdf"
paper = "Zeil09.pdf"
}
+@misc{Zeil16,
+ author = "Zeilberger, Noam",
+ title = {{Towards a Mathematical Science of Programming}},
+ year = "2016"
+}
+
+@inproceedings{Zeil16a,
+ author = "Zeilberger, Noam",
+ title = {{Principles of Type Refinement}},
+ booktitle = "OPLSS 2106",
+ link = "\url{http://noamz.org/oplss16/refinementsnotes.pdf}",
+ year = "2016",
+ paper = "Zeil16a.pdf"
+}
+
@incollection{Soze06,
author = "Sozeau, Matthieu",
title = {{Subset Coercions in Coq}},
@@ 11371,7 +12330,7 @@ paper = "Brea89.pdf"
which enables the communication between both systems illustrated by
some examples that can be solved by theorems and algorithms",
paper = "Ball95.pdf",
 keywords = "axiomref, CASProof"
+ keywords = "axiomref, CASProof, printed"
}
@misc{Ball98,
@@ 12472,6 +13431,46 @@ paper = "Brea89.pdf"
paper = "Cast16.pdf"
}
+@article{Care08,
+ author = "Carette, Jacques and Farmer, William M.",
+ title = {{HighLevel Theories}},
+ journal = "LNCS",
+ volume = "5144",
+ pages = "232245",
+ year = "2008",
+ abstract =
+ "We introduce highlevel theories in analogy with highlevel
+ programming languages. The basic point is that even though one can
+ define many theories via simple, lowlevel axiomatizations , that is
+ neither an effective nor a comfortable way to work with such theories.
+ We present an approach which is closer to what users of mathematics
+ employ, while still being based on formal structures.",
+ paper = "Care08.pdf",
+ keywords = "printed"
+}
+
+@article{Care09,
+ author = "Carette, Jacques and Farmer, William M.",
+ title = {{A Review of Mathematical Knowledge Management}},
+ journal = "LNCS",
+ volume = "5625",
+ pages = "233246",
+ year = "2009",
+ abstract =
+ "Mathematical Knowledge Management (MKM), as a field, has seen
+ tremendous growth in the last few years. This period was one where
+ many research threads were started and the field was defining
+ itself. We believe that we are now in a position to use the MKM body
+ of knowledge as a means to define what MKM is, what it worries about,
+ etc. In this paper, we review the literature of MKM and gather various
+ metadata from these papers. After offering some definitions
+ surrounding MKM, we analyze the metadata we have gathered from these
+ papers, in an effort to cast more light on the field of MKM and its
+ evolution",
+ paper = "Care09.pdf",
+ keywords = "printed"
+}
+
@misc{Care11a,
author = "Carette, Jacques and Farmer, William M. and Jeremic, Filip and
Maccio, Vincent and O'Connor, Russell and Tran, Quang M.",
@@ 12485,7 +13484,7 @@ paper = "Brea89.pdf"
and reflect, as much as possible, the mathematical structure present
in the objects which populate the library.",
paper = "Care11a.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, printed"
}
@misc{Care11b,
@@ 12509,6 +13508,31 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Care11c,
+ author = "Carette, Jacques and Farmer, William M. and O'Connor, Russell",
+ title = {{MathScheme: Project Description}},
+ year = "2011",
+ link = "\url{http://imps.mcmaster.ca/doc/cicm2011projdesc.pdf}",
+ paper = "Care11c.pdf",
+ keywords = "printed"
+}
+
+@article{Care12,
+ author = "Carette, Jacques and O'Connor, Russell",
+ title = {{Theory Presentation Combinators}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "We motivate and give semantics to theory presentation combinators
+ as the foundational building blocks for a scalable library of
+ theories. The key observation is that the category of contexts and
+ fibered categories are the ideal theoretical tools for this
+ purpose.",
+ paper = "Care12.pdf",
+ keywords = "printed"
+}
+
@article{Chli10,
author = "Chlipala, Adam",
title = {{An Introduction to Programming and Proving with Dependent Types
@@ 16381,7 +17405,8 @@ paper = "Brea89.pdf"
programming, bounded type quantification, and abstract data types. This
paper provides an informal introduction to type classes, and defines them
formally by means of type inference rules",
 paper = "Wadl88.pdf"
+ paper = "Wadl88.pdf",
+ keywords = "printed"
}
@inproceedings{Wadl89,
@@ 16737,6 +17762,30 @@ paper = "Brea89.pdf"
applying this method."
}
+@misc{Walt71,
+ author = "Walther, J.S.",
+ title = {{A Unified Algorithm for Elementary Functions}},
+ link = "\url{}",
+ year = "1971",
+ abstract =
+ "This paper describes a single unified algorithm for the calculation
+ of elementary functions including multipli cation, division, sin,
+ cos, tan, arctan, sinh, cosh, tanh, arctanh, In, exp and squareroot.
+ The basis for the algorithm is coordinate rotation in a linear,
+ circular, or hyperbolic coordinate system depending on which function
+ is to be calculated. The only operations re quired are shifting,
+ adding, subtracting and the recall of prestored constants. The
+ limited domain of con vergence of the algorithm is calculated,
+ leading to a discussion of the modifications required to extend the
+ domain for floating point calculations.
+
+ A hardware floating point processor using the algo rithm was built at
+ HewlettPackard Laboratories. The block diagram of the processor, the
+ microprogram control used for the algorithm, and measures of actual
+ performance are shown.",
+ paper = "Walt71.pdf"
+}
+
@misc{Kama15,
author = "Kamareddine, Fairouz and Wells, Joe and Zengler, Christoph and
Barendregt, Henk",
@@ 20413,6 +21462,25 @@ paper = "Brea89.pdf"
paper = "Arno82.pdf"
}
+@article{Arno82a,
+ author = "Arnon, Dennis S. and McCallum, Scott",
+ title = {{Cylindrical Algebraic Decomposition by Quantifier Eliminations}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "215222",
+ year = "1982",
+ abstract =
+ "Cylindrical algebraic decompositions were introduced as a major
+ component of a new quantifier elimination algorithm for elementary
+ algebra and geometry (G. Collins, ~973). In the present paper we turn
+ the tables and show that one can use quantifier elimination for ele
+ mentary algebra and geometry to obtain a new version of the
+ cylindrical algebraic decomposi tion algorithm. A key part of our
+ result is a theorem, of interest in its own right, that relates the
+ multiplicities of the roots of a polynomial to their continuity.",
+ paper = "Arno82a.pdf"
+}
+
@article{Arno84,
author = "Arnon, Dennis S. and Collins, George E. and McCallum, Scott",
title = {{Cylindrical Algebraic Decomposition II: An Adjacency Algorithm
@@ 21030,6 +22098,16 @@ paper = "Brea89.pdf"
paper = "Coll75.pdf"
}
+@article{Coll82a,
+ author = "Collins, George E.",
+ title = {{Factorization in Cylindrical Algebraic Decomposition  Abstract}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "212214",
+ year = "1982",
+ paper = "Coll82a.pdf"
+}
+
@article{Coll91,
author = "Collins, George E. and Hong, Hoon",
title = {{Partial Cylindrical Algebraic Decomposition for Quantifier
@@ 28441,7 +29519,7 @@ paper = "Brea89.pdf"
@article{Farm03,
author = "Farmer, William M. and von Mohrenschildt, Martin",
 title = {{An overview of a formal framework for managing mathematics}},
+ title = {{An overview of A Formal Framework For Managing Mathematics}},
journal = "Ann. Math. Artif. Intell.",
volume = "38",
number = "13",
@@ 29802,6 +30880,28 @@ paper = "Brea89.pdf"
keywords = "axiomref"
}
+@article{Hall96,
+ author = "Hall, Cordelia V. and Hammond, Kevin and Jones, Simon L. Peyton
+ and Wadler, Philip L.",
+ title = {{Type Classes in Haskell}},
+ journal = "Trans. on Programming Langues and Systems",
+ volume = "18",
+ number = "2",
+ pages = "109138",
+ year = "1996",
+ abstract =
+ "This article de nes a set of type inference rules for resolving
+ overloading introduced by type classes, as used in the functional
+ programming language Haskell. Programs including type classes are
+ transformed into ones which may be typed by standard HindleyMilner
+ inference rules. In contrast to other work on type classes, the rules
+ presented here relate directly to Haskell programs. An innovative
+ aspect of this work is the use of secondorder lambda calculus to
+ record type information in the transformed program.",
+ paper = "Hall96.pdf",
+ keywords = "printed"
+}
+
@article{Harr98,
author = "Harrison, J. and Thery, L.",
title = {{A Skeptic's approach to combining HOL and Maple}},
@@ 29949,6 +31049,17 @@ paper = "Brea89.pdf"
paper = "Hear80.pdf"
}
+@article{Hear82,
+ author = "Hearn, Anthony C.",
+ title = {{REDUCE  A Case Study in Algebra System Development}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "263272",
+ year = "1982",
+ paper = "Hear82.pdf",
+ keywords = "axiomref"
+}
+
@article{Hear95,
author = "Hearn, Anthony C. and Eberhard, Schrufer",
title = {{A computer algebra system based on ordersorted algebra}},
@@ 32339,7 +33450,7 @@ paper = "Brea89.pdf"
certain Mathematica computations, so that the rigor of the proof
assistant is not compromised.",
paper = "Lewi17.pdf",
 keywords = "axiomref,printed"
+ keywords = "axiomref, printed"
}
@inproceedings{Liao95,
@@ 33165,6 +34276,17 @@ paper = "Brea89.pdf"
}
+@article{Muss82,
+ author = "Musser, David R. and Kapur, Deepak",
+ title = {{Rewrite Rule Theory and Abstract Data Type Analysis}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "7790",
+ year = "1982",
+ paper = "Muss82.pdf",
+ keywords = "axiomref"
+}
+
@InProceedings{Nayl06,
author = "Naylor, William and Padget, Julian",
title = {{From Untyped to Polymorphically Typed Objects in Mathematical
@@ 36218,7 +37340,8 @@ paper = "Brea89.pdf"
simple registration/subscription mechanism, and a translation
mechanism which ensures the transparent and provably sound exchange of
logical services.",
 paper = "Arma00.pdf"
+ paper = "Arma00.pdf",
+ keywords = "printed"
}
@article{Aste02,
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 909e1bb..4458978 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 1932,7 +1932,8 @@ paper = "Brea89.pdf"
volume = "146",
year = "1969",
pages = "2960",
 paper = "Hind69.pdf, printed"
+ paper = "Hind69.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 2184,22 +2185,6 @@ paper = "Brea89.pdf"
\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Kaes, Stefan}
\begin{chunk}{axiom.bib}
@article{Kaes92,
 author = "Kaes, Stefan",
 title = {{Type Inference in the Presence of Overloading, Subtyping, and
 Recursive Types}},
 journal = "LISP Pointers",
 volume = "V",
 number = "1",
 pages = "193204",
 year = "1992",
 paper = "Kaes92.pdf"
}

\end{chunk}

\index{Kaltofen, E.}
\begin{chunk}{axiom.bib}
@incollection{Kalt83a,
@@ 3264,7 +3249,8 @@ paper = "Brea89.pdf"
can be defined. In this paper that the standard settheoretic model of
the ordinary typed lambda calculus cannot be extended to model this
language extension.",
 paper = "Reyn84.pdf"
+ paper = "Reyn84.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 3531,6 +3517,26 @@ paper = "Brea89.pdf"
\end{chunk}
+\index{Strachey, Christopher}
+\index{Wadsworth, Christopher}
+\begin{chunk}{axiom.bib}
+@article{Stra00a,
+ author = "Strachey, Christopher and Wadsworth, Christopher",
+ title = {{Continuations: A Mathematical Semantics for Handling Full Jumps}},
+ journal = "HigherOrder and Symbolic Computation",
+ volume = "13",
+ pages = "135152",
+ year = "2000",
+ abstract =
+ "This paper describes a method of giving the mathematical
+ semantics of programming languages which include the most general
+ form of jumps.",
+ paper = "Stra00a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Stroustrup, Bjarne}
\begin{chunk}{axiom.bib}
@book{Stro95,
@@ 4973,6 +4979,31 @@ when shown in factored form.
\end{chunk}
+\index{Bordoni, L.}
+\index{Colagrossi, A.}
+\index{Miola, A.}
+\begin{chunk}{axiom.bib}
+@article{Bord82,
+ author = "Bordoni, L. and Colagrossi, A. and Miola, A.",
+ title = {{Linear Algebraic Approach for Computing Polynomial Resultant}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "231236",
+ year = "1982",
+ abstract =
+ "This paper presents a linear algebraic method for computing the
+ resultant of two polynomials. This method is based on the
+ computation of a determinant of order equal to the minimum of the
+ degrees of the two given polynomials. This method turns out to be
+ preferable to other known linear algebraic methods both from a
+ computational point of view and for a total generality respect to
+ the class of the given polynomials. Some relationships of this
+ method with the polynomial pseudoremainder operation are discussed.",
+ paper = "Bord82.pdf"
+}
+
+\end{chunk}
+
\section{Algebraic Algorithms} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Belovari, G.}
@@ 5421,6 +5452,81 @@ when shown in factored form.
\end{chunk}
+\index{Lazard, Daniel}
+\begin{chunk}{axiom.bib}
+@article{Laza82,
+ author = "Lazard, Daniel",
+ title = {{Commutative Algebra and Computer Algebra}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "4048",
+ year = "1982",
+ abstract =
+ "It is well known that computer algebra deals with commutative rings
+ such as the integers, the rationals, the modular integers and
+ polynomials over such rings.
+
+ On the other hand, commutative algebra is that part of mathematics
+ which studies commutative rings.
+
+ Our aim is to make this link more precise. It will appear that most of
+ the constructions which appear in classical commutative algebra can be
+ done explicitly in finite time. However, there are
+ exceptions. Moreover, most of the known algorithms are intractable :
+ The problems are generally exponential by themselves, but many
+ algorithms are overoverexponential. It needs a lot of work to find
+ better methods, and to implement them, with the final hope to open a
+ computation domain larger than this one which is possible by hand.
+
+ To illustrate the limits and the possibilities of computerizing
+ commutative algebra, we describe an algorithm which tests the
+ primality of a polynomial ideal and we give an example of a single
+ algebraic extension of fields $K\subset L$ wuch that there exists an
+ algorithm of factorization for the polynomials over $k$, but not for
+ the polynomials over $L$.",
+ paper = "Laza82.pdf"
+}
+
+\end{chunk}
+
+\index{Lazard, Daniel}
+\begin{chunk}{axiom.bib}
+@article{Laza82a,
+ author = "Lazard, Daniel",
+ title = {{On Polynomial Factorization}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "144157",
+ year = "1982",
+ abstract =
+ "We present new algorithms for factoring univariate polynomials
+ over finite fields. They are variants of the algorithms of Camion
+ and CantorZassenhaus and differ from them essentially by
+ computing the primitive idempotents of the algebra $K[X]/f$ before
+ factoring $f$.
+
+ These algorithms are probabilistic in the following sense. The
+ time of computation depends on random choices, but the validity of
+ the result does not depend on them. So, worst case complexity,
+ being infinite, is meaningless and we compute average
+ complexity. It appears that our and CantorZassenhaus algorithms
+ have the same asymptotic complexity and they are the best
+ algorithms actuall known; with elementary multiplication and GCD
+ computation, CantorZassenhaus algorithm is always bettern than
+ ours; with fast multiplication and GCD, it seems that ours is
+ better, but this fact is not yet proven.
+
+ Finally, for factoring polynomials over the integers, it seems
+ that the best strategy consists in choosing prime moduli as big as
+ possible in the range where the time of the multiplication does
+ not depend on the size of the factors (machine word size). An
+ accurate computation of the involved constants would be needed for
+ proving this estimation.",
+ paper = "Laza82a.pdf"
+}
+
+\end{chunk}
+
\index{Loos, Rudiger}
\begin{chunk}{axiom.bib}
@article{Loos72a,
@@ 5459,6 +5565,29 @@ when shown in factored form.
\end{chunk}
+\index{Padget, J.A.}
+\begin{chunk}{axiom.bib}
+@article{Padg82,
+ author = "Padget, J.A.",
+ title = {{Escaping from Intermediate Expression Swell: A Continuing Saga}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "256262",
+ year = "1982",
+ abstract =
+ "The notion of a closed continuation is introduced, is presented,
+ coroutines using function call and return based on this concept, are
+ applications and a functional dialect of LISP shown to be merely a
+ more general form of for coroutines in algebraic simplification and
+ are suggested, by extension function. expression Potential evaluation
+ and a specific example of their use is given in a novel attack on the
+ phenomenon of intermediate expression swell in polynomial
+ multiplication.",
+ paper = "Padg82.pdf"
+}
+
+\end{chunk}
+
\index{Platzer, Andre}
\index{Quesel, JanDavid}
\index{Rummer, Philipp}
@@ 6365,6 +6494,20 @@ when shown in factored form.
\end{chunk}
+\index{Norman, Arthur}
+\begin{chunk}{axiom.bib}
+@article{Norm82,
+ author = "Norman, Arthur",
+ title = {{The Development of a VectorBased Algebra System}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "237248",
+ year = "1982",
+ paper = "Norm82.pdf"
+}
+
+\end{chunk}
+
\section{The Seven Dwarfs} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Kaltofen, Erich}
@@ 10336,7 +10479,8 @@ when shown in factored form.
proof assistant, a tool for machinechecked mathematical theorem
proving; and formal logical reasoning about the correctness of
programs.",
 paper = "Chli17.pdf, printed"
+ paper = "Chli17.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 11935,7 +12079,8 @@ when shown in factored form.
several principles (called search principles) which are applicable to
the design of efficient proofprocedures employing resolution as the
basic logical process.",
 paper = "Robi65.pdf, printed"
+ paper = "Robi65.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 12338,6 +12483,34 @@ when shown in factored form.
\end{chunk}
+\index{Balsters, Herman}
+\index{Fokkinga, Maarten M.}
+\begin{chunk}{axiom.bib}
+@article{Bals91,
+ author = "Balsters, Herman and Fokkinga, Maarten M.",
+ title = {{Subtyping can have a simple semantics}},
+ journal = "Theoretical Computer Science",
+ volume = "87",
+ pages = "8196",
+ year = "1991",
+ abstract =
+ "Consider a first order typed language, with semantics
+ $\llbracket~\rrbracket$ for expressions and types. Adding
+ subtyping means that a partial order $\le$ on types is defined
+ and that the typing rules are extended to the effect that
+ expression $e$ has type $\tau$ whenever $e$ has type $\sigma$ and
+ $\sigma \le \tau$. We show how to adapt the semantics
+ $\llbracket~\rrbracket$ in a {\sl simple set theoretic way},
+ obtaining a semantics $\llbracket~\rrbracket$ that satisfies, in
+ addition to some obvious requirements, also the property
+ $\llbracket\sigma\rrbracket \subseteq $\llbracket\tau\rrbracket$,
+ whenever $\sigma \le \tau$.",
+ paper = "Bals91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Barras, Bruno}
\begin{chunk}{axiom.bib}
@techreport{Barr96a,
@@ 12452,6 +12625,22 @@ when shown in factored form.
\end{chunk}
+\index{Broda, Krysia}
+\index{Eisenbach, Susan}
+\index{Khoshnevisan, Hessam}
+\index{Vickers, Steve}
+\begin{chunk}{axiom.bib}
+@book{Brod94,
+ author = "Broda, Krysia and Eisenbach, Susan and Khoshnevisan, Hessam
+ and Vickers, Steve",
+ title = {{Reasoned Programming}},
+ publisher = "Imperial College",
+ year = "1994",
+ paper = "Brod94.pdf"
+}
+
+\end{chunk}
+
\subsection{C} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Caldwell, James L.}
@@ 12482,6 +12671,51 @@ when shown in factored form.
\end{chunk}
+\index{Cardelli, Luca}
+\begin{chunk}{axiom.bib}
+@article{Card84,
+ author = "Cardelli, Luca",
+ title = {{A Semantics of Multiple Inheritance}},
+ journal = "LNCS",
+ volume = "173",
+ year = "1984",
+ paper = "Card84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{Sorge, Volker}
+\begin{chunk}{axiom.bib}
+@inproceedings{Care07,
+ author = "Carette, Jacques and Farmer, William M. and Sorge, Volker",
+ title = {{A Rational Reconstruction of a System for Experimental
+ Mathematics}},
+ booktitle = "14th Workshop on Automated Reasoning",
+ publisher = "unknown",
+ year = "2007",
+ abstract =
+ "In previous papers we described the implementation of a system
+ which combines mathematical object generation, transformation and
+ filtering, conjecture generation, proving and disproving for
+ mathematical discovery in nonassociative algebra. While the system
+ has generated novel, fully verified theorems, their construction
+ involved a lot of ad hoc communication between disparate systems. In
+ this paper we carefully reconstruct a specification of a subprocess
+ of the original system in a framework for trustable communication
+ between mathematics systems put forth by us. It employs the concept
+ of biform theories that enables the combined formalisation of the
+ axiomatic and algorithmic theories behind the generation
+ process. This allows us to gain a much better understanding of the
+ original system, and exposes clear generalisation opportunities.",
+ paper = "Care07.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Cartwright, Robert}
\begin{chunk}{axiom.bib}
@phdthesis{Cart76,
@@ 12592,6 +12826,64 @@ when shown in factored form.
\end{chunk}
\index{Crary, Karl}
+\index{Harper, Robert}
+\index{Puri, Sidd}
+\begin{chunk}{axiom.bib}
+@inproceedings{Crar99,
+ author = "Crary, Karl and Harper, Robert and Puri, Sidd",
+ title = {{What is a Recursive Module}},
+ booktitle = "Conf. on Programming Language Design and Implementation",
+ year = "1999",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/1999/recmod/recmod.dvi}",
+ abstract =
+ "A hierarchical module system is an effective tool for structuring
+ large programs. Strictly hierarchical module systems impose an
+ acyclic ordering on import dependencies among program units. This
+ can impede modular programming by forcing mutuallydependent
+ components to be consolidated into a single module. Recently there
+ have been several proposals for module systems that admit cyclic
+ dependencies, but it is not clear how these proposals relate to
+ one another, nor how one mught integrate them into an expressive
+ module system such as that of ML.
+
+ To address this question we provide a typetheoretic analysis of
+ the notion of a recursive module in the context of a
+ ``phasedistinction'' formalism for higherorder module
+ systems. We extend this calculus with a recursive module mechanism
+ and a new form of signature, called a {\sl recurslively dependent
+ signature}, to support the definition of recursive modules. These
+ extensions are justified by an interpretation in terms of more
+ primitive language constructs. This interpretation may also serve
+ as a guide for implementation.",
+ paper = "Crar99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Crary, Karl}
+\begin{chunk}{axiom.bib}
+@techreport{Crar02,
+ author = "Crary, Karl",
+ title = {{Toward a Foundational Typed Assembly Language}},
+ institution = "Carnegie Mellon University",
+ number = "CMUCS02196",
+ year = "2002",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/talt/talttr.pdf}",
+ abstract =
+ "We present the design of a typed assembly language called TALT that
+ supports heterogeneous tuples, disjoint sums, arrays, and a general
+ account of addressing modes. TALT also implements the von Neumann
+ model in which programs are stored in memory, and supports relative
+ addressing. Type safety for execution and for garbage collection are
+ shown by machinecheckable proofs. TALT is the first formalized typed
+ assembly language to provide any of these features.",
+ paper = "Crar02.pdf"
+}
+
+\end{chunk}
+
+\index{Crary, Karl}
\index{Sarkar, Susmit}
\begin{chunk}{axiom.bib}
@misc{Crar08,
@@ 12747,6 +13039,18 @@ when shown in factored form.
\index{Dijkstra, E.W.}
\begin{chunk}{axiom.bib}
+@misc{Dijk71,
+ author = "Dijkstra, E.W.",
+ title = {{A Short Introduction to the Art of Programming}},
+ comment = "EWD316",
+ year = "1971",
+ paper = "Dijk71.pdf"
+}
+
+\end{chunk}
+
+\index{Dijkstra, E.W.}
+\begin{chunk}{axiom.bib}
@incollection{Dijk72a,
author = "Dijkstra, E.W.",
title = {{Notes on Structured Programming}},
@@ 12864,6 +13168,96 @@ when shown in factored form.
\end{chunk}
+\index{Dreyer, Derek}
+\index{Crary, Karl}
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@techreport{Drey02,
+ author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
+ title = {{A Type System for HigherOrder Modules (Expanded Version)}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMUCS02122R",
+ year = "2002",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thomstr.pdf}",
+ abstract =
+ "We present a type theory for higherorder modules that accounts for
+ many central issues in module system design, including translucency,
+ applicativity , generativity , and modules as firstclass values.
+ Our type system harmonizes design elements from previous work,
+ resulting in a simple, economical account of modular programming. The
+ main unifying principle is the treatment of abstraction mechanisms
+ as computational effects. Our language is the first to provide a
+ complete and practical formalization of all of these critical issues
+ in module system design.",
+ paper = "Drey02.pdf"
+}
+
+\end{chunk}
+
+\index{Dreyer, Derek}
+\index{Crary, Karl}
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@article{Drey03,
+ author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
+ title = {{A Type System for HigherOrder Modules}},
+ journal = "ACM SIGPLAN Notices",
+ volume = "38",
+ number = "1",
+ year = "2003",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms.pdf}",
+ abstract =
+ "We present a type theory for higherorder modules that accounts for
+ many central issues in module system design, including translucency,
+ applicativity , generativity , and modules as firstclass values.
+ Our type system harmonizes design elements from previous work,
+ resulting in a simple, economical account of modular programming. The
+ main unifying principle is the treatment of abstraction mechanisms
+ as computational effects. Our language is the first to provide a
+ complete and practical formalization of all of these critical issues
+ in module system design.",
+ paper = "Drey03.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dreyer, Derek}
+\index{Harper, Robert}
+\index{Chakravarty, Manuel M.T.}
+\index{Keller, Gabriele}
+\begin{chunk}{axiom.bib}
+@inproceedings{Drey07,
+ author = "Dreyer, Derek and Harper, Robert and Chakravarty, Manuel M.T.
+ and Keller, Gabriele",
+ title = {{Modlular Type Classes}},
+ booktitle = "Proc. POPL'07",
+ pages = "6370",
+ year = "2007",
+ abstract =
+ "ML modules and Haskell type classes have proven to be highly
+ effective tools for program structuring. Modules emphasize explicit
+ configuration of program components and the use of data abstraction.
+ Type classes emphasize implicit program construction and ad hoc
+ polymorphism. In this paper , we show how the implicitlytyped
+ style of type class programming may be supported within the framework
+ of an explicitlytyped module language by viewing type classes as a
+ particular mode of use of modules. This view offers a harmonious
+ integration of modules and type classes, where type class features,
+ such as class hierarchies and associated types, arise naturally as
+ uses of existing modulelanguage constructs, such as module
+ hierarchies and type components. In addition, programmers have
+ explicit control over which type class instances are available for
+ use by type inference in a given scope. We formalize our approach as
+ a HarperStonestyle elaboration relation, and provide a sound type
+ inference algorithm as a guide to implementation.",
+ paper = "Drey07.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Duran, Antonio J.}
\index{Perez, Mario}
\index{Varona, Juan L.}
@@ 12888,6 +13282,138 @@ when shown in factored form.
\subsection{F} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Farm00,
+ author = "Farmer, William M.",
+ title = {{A Proposal for the Development of an Interactive
+ Mathematics Laboratory for Mathematics Eductions}},
+ booktitle = "Workshop on Deductions Systems for Mathematics Eduation",
+ pages = "2025",
+ year = "2000",
+ paper = "Farm00.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\index{Mohrenschildt, Martin v.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Farm00a,
+ author = "Farmer, William M. and Mohrenschildt, Martin v.",
+ title = {{Transformers for Symbolic Computation and Formal Deduction}},
+ booktitle = "CADE17",
+ pages = "3645",
+ year = "2000",
+ abstract =
+ "A transformer is a function that maps expressions to expressions.
+ Many transformational operators — such as expression evaluators and
+ simplifiers, rewrite rules, rules of inference, and decision
+ procedures — can be represented by transformers. Computations and
+ deductions can be formed by applying sound transformers in
+ sequence. This paper introduces machinery for defining sound
+ transformers in the context of an axiomatic theory in a formal
+ logic. The paper is intended to be a first step in a development of an
+ integrated framework for symbolic computation and formal deduction.",
+ paper = "Farm00a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm04,
+ author = "Farmer, William M.",
+ title = {{MKM A New Interdisciplinary Field of Research}},
+ journal = "SIGSAM",
+ volume = "38",
+ pages = "4752",
+ year = "2004",
+ abstract =
+ "Mathematical Knowledge Management (MKM) is a new interdisciplinary
+ field of research in the intersection of mathematics, computer
+ science, library science, and scientific publishing. Its objective is
+ to develop new and better ways of managing mathematical knowledge
+ using sophisticated software tools. Its grand challenge is to create
+ a universal digital mathematics library (UDML), accessible via the
+ WorldWide Web, that contains essentially all mathematical knowledge
+ (intended for the public). The challenges facing MKM are daunting,
+ but a UDML, even just partially constructed, would transform how
+ mathematics is learned and practiced.",
+ paper = "Farm04.pdf",
+ keywords = "printed, axiomref"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm07,
+ author = "Farmer, William M.",
+ title = {{Biform Theories in Chiron}},
+ journal = "LNCS",
+ volume = "4573",
+ pages = "6679",
+ year = "2007",
+ abstract =
+ "An axiomatic theory represents mathematical knowledge declaratively
+ as a set of axioms. An algorithmic theory represents mathematical
+ knowledge procedurally as a set of algorithms. A biform theory is
+ simultaneously an axiomatic theory and an algorithmic theory. It
+ represents mathematical knowledge both declaratively and procedurally.
+ Since the algorithms of algorithmic theories manipulate th e syntax of
+ expressions, biform theories — as well as algorithmic theories — are
+ difficult to formalize in a traditional logic without the means to
+ reason about syntax. Chiron is a derivative of
+ vonNeumannBernaysG̈odel ( NBG ) set theory that is intended to be a
+ practical, generalpurpose logic for mechanizing mathematics. It
+ includes elements of type theory, a scheme for handling undefinedness,
+ and a facility for reasoning about the syntax of expressions. It is an
+ exceptionally wellsuited logic for formalizing biform theories. This
+ paper defines the notion of a biform theory, gives an overview of
+ Chiron, and illustrates how biform theories can be formalized in Chiron.",
+ paper = "Farm07.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\index{Larjani, Pouya}
+\begin{chunk}{axiom.bib}
+@misc{Farm14,
+ author = "Farmer, William M. and Larjani, Pouya",
+ title = {{Frameworks for Reasoning about Syntax that Utilize
+ Quotation and Evaluation}},
+ links = "\url{http://imps.mcmaster.ca/doc/syntax.pdf}",
+ year = "2014",
+ abstract =
+ "It is often useful, if not necessary, to reason about the syntactic
+ structure of an expression in an interpreted language (i.e., a
+ language with a semantics). This paper introduces a mathematical
+ structure called a syntax framework that is intended to be an abstract
+ model of a system for reasoning about the syntax of an interpreted
+ language. Like many concrete systems for reasoning about syntax, a
+ syntax framework contains a mapping of expressions in the
+ interpreted language to syntactic values that represent the syntactic
+ structures of the expressions; a language for reasoning about the
+ syntactic values; a mechanism called quotation to refer to the
+ syntactic value of an expression; and a mechanism called evaluation to
+ refer to the value of the expression represented by a syntactic value.
+ A syntax framework provides a basis for integrating reasoning about
+ the syntax of the expressions with reasoning about what the
+ expressions mean. The notion of a syntax framework is used to discuss
+ how quotation and evaluation can be built into a language and to
+ define what quasiquotation is. Several examples of syntax frameworks
+ are presented.",
+ paper = "Farm14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Filliatre, JeanChristophe}
\begin{chunk}{axiom.bib}
@article{Fill03,
@@ 13003,6 +13529,31 @@ when shown in factored form.
\end{chunk}
+\index{Freeman, Tim}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@inproceedings{Free91,
+ author = "Freeman, Tim and Pfenning, Frank",
+ title = {{Refinement Types for ML}},
+ booktitle = "ACM SIGPLAN PLDI'91",
+ year = "1991",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}",
+ abstract =
+ "We describe a refinement of ML's type system allowing the
+ specification of recursively defined subtypes of userdefined
+ datatypes. The resulting system of {\sl refinement types}
+ preserves desirable properties of ML such as decidability of type
+ inference, while at the same time allowing more errors to be
+ detected at compiletime. The type system combines abstract
+ interpretation with ideas from the intersection type discipline,
+ but remains closely tied to ML in that refinement types are given
+ only to programs which are already welltyped in ML.",
+ paper = "Free91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Fox, Anthony}
\begin{chunk}{axiom.bib}
@article{Foxx03,
@@ 13038,6 +13589,39 @@ when shown in factored form.
\end{chunk}
+\index{Friedman, Daniel P.}
+\index{Wise, David S.}
+\begin{chunk}{axiom.bib}
+@techreport{Frie76,
+ author = "Friedman, Daniel P. and Wise, David S.",
+ title = {{CONS should not Evaluate its Arguments}},
+ institution = "Indiana University",
+ number = "TR44",
+ year = "1976",
+ abstract =
+ "The constructor function which allocates and fills records in
+ recursive, sideeffectfree procedural languages is redefined to be a
+ nonstrict (Vuillemin 1974) elementary operation. Instead of
+ evaluating its arguments, it builds suspensions of them which are not
+ coerced until the suspensions is accessed by strict elementary
+ function. The resulting evalutation procedures are strictly more
+ powerful than existing schemes for languages such as LISP. The main
+ results are that Landin's streams are subsumed into McCarthy's LISP
+ merely by the redefinition of elementar functions, that invocations of
+ LISP's evaluator can be minimized by redefining the elemntary
+ functions without redefining the interpreter, and as a strong
+ conjecture, that redefining the elementary functions yields the least
+ fixedpoint semantics for McCarthy's evalution scheme. This new
+ insight into the role of construction functions will do much to ease
+ the interface between recursive programmers and iterative programmers,
+ as well as the interface between programmers and data structure
+ designers.",
+ paper = "Frie16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{G} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Girard, JeanYves}
@@ 13189,6 +13773,32 @@ when shown in factored form.
\end{chunk}
+\index{Gross, Jason}
+\index{Chlipala, Adam}
+\begin{chunk}{axiom.bib}
+@misc{Gros15,
+ author = "Gross, Jason and Chlipala, Adam",
+ title = {{Parsing Parses}},
+ link = "\url{https://people.csail.mit.edu/jgross/personalwebsite/papers/2015parsingparsetrees.pdf}",
+ year = "2015",
+ abstract =
+ "We present a functional parser for arbitrary contextfree grammars,
+ together with soundness and completeness proofs, all inside Coq. By
+ exposing the parser in the right way with parametric polymorphism and
+ dependent types, we are able to use the parser to prove its own
+ soundness, and, with a little help from relational parametricity,
+ prove its own completeness, too. Of particular interest is one strange
+ instantiation of the type and value parameters: by parsing parse trees
+ instead of strings, we convince the parser to generate its own
+ completeness proof. We conclude with highlights of our experiences
+ iterating through several versions of the Coq development, and some
+ general lessons about dependently typed programming.",
+ paper = "Gros15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Guttman, Joshua D.}
\index{Ramsdell, John D.}
\index{Wand, Mitchell}
@@ 13359,6 +13969,64 @@ when shown in factored form.
\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Kaes, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Kaes88,
+ author = "Kaes, Stefan",
+ title = {{Parametric Overloading in Polymorphic Programming Languages}},
+ journal = "LNCS",
+ volume = "300",
+ pages = "131144",
+ year = "1988",
+ abstract =
+ "The introduction of unrestricted overloading in languagues with type
+ systems based on implicit parametric potymorphism generally destroys
+ the principal type property: namely that the type of every expression
+ can uniformly be represented by a single type expression over some set
+ of type variables. As a consequence, type inference in the presence
+ of unrestricted overloading can become a NPcomplete problem. In
+ this paper we define the concept of parametric overloading as a
+ restricted form of overloading which is easily combined with
+ parametric polymorphism. Parametric overloading preserves the
+ principal type property, thereby allowing the design of efficient type
+ inference algorithms. We present sound type deduction systems, both
+ for predefined and programmer defined overloading. Finally we state
+ that parametric overloading can be resolved either statically, at
+ compile time, or dynamically, during program execution.",
+ paper = "Kaes88.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kaes, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Kaes92,
+ author = "Kaes, Stefan",
+ title = {{Type Inference inthe Presence of Overloading, Subtyping and
+ Recursive Types}},
+ journal = "ACM Lisp Pointers",
+ volume = "5",
+ number = "1",
+ year = "1992",
+ pages = "193204",
+ abstract =
+ "We present a unified approach to type inference in the presence of
+ overloading and coercions based on the concept of {\sl constrained
+ types}. We define a generic inference system, show that subtyping and
+ overloading can be treated as a special instance of this system and
+ develop a simple algorithm to compute principal types. We prove the
+ decidability of type inference for hte class of {\sl decomposable
+ predicates} and deelop a canonical representation for principal types
+ based on {\sl most accurate simplifications} of constraint
+ sets. Finally, we investigate the extension of our techniques to
+ {\sl recursive types}.",
+ paper = "Kaes92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Keimel, Klaus}
\index{Plotkin, Gordon D.}
\begin{chunk}{axiom.bib}
@@ 13395,6 +14063,45 @@ when shown in factored form.
\end{chunk}
+\index{Keller, C}
+\begin{chunk}{axiom.bib}
+@phdthesis{Kell13,
+ author = "Keller, C.",
+ title = {{A Matter of Trust: Skeptical Commuication Between Coq and
+ External Provers}},
+ school = "Ecole Polytechnique",
+ year = "2013",
+ link =
+"\url{https://www.lri.fr/~keller/Documentsrecherche/Publications/thesis13.pdf}",
+ abstract =
+ "This thesis studies the cooperation between the Coq proof assistant
+ and external provers through proof witnesses. We concentrate on two
+ different kinds of provers that can return certicates: first, answers
+ coming from SAT and SMT solvers can be checked in Coq to increase both
+ the confidence in these solvers and Coq 's automation; second,
+ theorems established in interactive provers based on HigherOrder
+ Logic can be exported to Coq and checked again, in order to offer the
+ possibility to produce formal developments which mix these two
+ different logical paradigms. It ended up in two software : SMTCoq, a
+ bidirectional cooperation between Coq and SAT/SMT solvers, and
+ HOLLIGHTCOQ, a tool importing HOL Light theorems into Coq.
+
+ For both tools, we took great care to define a modular and efficient
+ architecture, based on three clearly separated ingredients: an
+ embedding of the formalism of the external tool inside Coq which is
+ carefully translated into Coq terms, a certified checker to establish
+ the proofs using the certicates and a Ocaml preprocessor to transform
+ proof witnesses coming from different provers into a generic
+ certificate. This division allows that a change in the format of proof
+ witnesses only affects the preprocessor, but no proved Coq code.
+ Another fundamental component for efficiency and modularity is
+ computational reflection, which exploits the computational power of
+ Coq to establish generic and small proofs based on the certicates.",
+ paper = "Kell13.pdf"
+}
+
+\end{chunk}
+
\index{Kranz, David}
\index{Kelsey, Richard}
\index{Rees, Jonathan}
@@ 13499,6 +14206,18 @@ when shown in factored form.
\end{chunk}
+\index{McCarthy, John}
+\begin{chunk}{axiom.bib}
+@incollection{Mcca63,
+ author = "McCarthy, John",
+ title = {{A Basis for a Mathematical Theory of Computation}},
+ booktitle = "Computer Programming and Formal Systems",
+ year = "1963",
+ paper = "Mcca63.pdf"
+}
+
+\end{chunk}
+
\index{McDowell, Raymond}
\index{Miller, Dale}
\begin{chunk}{axiom.bib}
@@ 13589,6 +14308,81 @@ when shown in factored form.
\end{chunk}
+\index{Michaelson, Greg}
+\begin{chunk}{axiom.bib}
+@book{Mich11,
+ author = "Michaelson, Greg",
+ title = {{Functional Programming Through Lambda Calculus}},
+ year = "2011",
+ publisher = "Dover",
+ isbn = "9780486478838"
+}
+
+\end{chunk}
+
+\index{Mili, Ali}
+\index{Aharon, Shir}
+\index{Nadkarni, Chaitanya}
+\begin{chunk}{axiom.bib}
+@article{Mili09,
+ author = "Mili, Ali and Aharon, Shir and Nadkarni, Chaitanya",
+ title = {{Mathematics for Reasoning about Loop Functions}},
+ journal = "Science of Computer Programming",
+ volume = "79",
+ year = "2009",
+ pages = "9891020",
+ abstract =
+ "The criticality of modern software applications, the pervasiveness of
+ malicious code concerns, the emergence of thirdparty software
+ development, and the preponderance of program inspection as a quality
+ assurance method all place a great premium on the ability to analyze
+ programs and derive their function in all circumstances of use and all
+ its functional detail. For Clike programming languages, one of the
+ most challenging tasks in this endeavor is the derivation of loop
+ functions. In this paper, we outline the premises of our approach to
+ this problem, present some mathematical results, and discuss how these
+ results can be used as a basis for building an automated tool that
+ derives the function of while loops under some conditions.",
+ paper = "Mili09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Mellies, PaulAndre}
+\index{Zeilberger, Noam}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mell15,
+ author = "Mellies, PaulAndre and Zeilberger, Noam",
+ title = {{Functors are Type Refinement Systems}},
+ booktitle = "POPL'15",
+ publisher = "ACM",
+ year = "2015",
+ abstract =
+ "The standard reading of type theory through the lens of category
+ theory is based on the idea of viewing a type system as a category
+ of welltyped terms. We propose a basic revision of this reading;
+ rather than interpreting type systems as categories, we describe
+ them as {\sl functors} from a category of typing derivations to a
+ category of underlying terms. Then, turning this around, we
+ explain how in fact {\sl any} functor gives rise to a generalized
+ type system, with an abstract notion of type judgment, typing
+ derivations and typing rules. This leads to a purely categorical
+ reformulation of various natural classes of type systems as
+ natural classes of functors.
+
+ The main purpose of this paper is to describe the general
+ framework (which can also be seen as providing a categorical
+ analysis of {\sl refinement types}, and to present a few
+ applications. As a larger case study, we revisit Reynold's paper
+ on ``The Meaning of Types'' (2000), showing how the paper's main
+ results may be reconstructed along these lines.",
+ paper = "Mell15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Morgan, Carroll}
\begin{chunk}{axiom.bib}
@book{Morg98,
@@ 13640,6 +14434,37 @@ when shown in factored form.
\end{chunk}
+\index{Mosses, Peter}
+\begin{chunk}{axiom.bib}
+@article{Moss84,
+ author = "Mosses, Peter",
+ title = {{A Basic Abstract Semantics Algebra}},
+ journal = "LNCS",
+ volume = "173",
+ year = "1984",
+ abstract =
+ "It seems that there are some pragmatic advantages in using Abstract
+ Semantic Algebras (ASAs) instead of Xnotation in denotational
+ semantics. The values of ASAs correspond to 'actions' (or
+ 'processes'), and the operators correspond to primitive ways of
+ combining actions. There are simple ASAs for the various independent
+ 'facets' of actions: a functional ASA for dataflow, an imperative ASA
+ for assignments, a declarative ASA for bindings, etc. The aim is to
+ obtain general ASAs by systematic combination of these simple ASAs.
+
+ Here we specify a basic ASA that captures the common features of the
+ functional, imperative and declarative ASAs  and highlights their
+ differences. We discuss the correctness of ASA specifications, and
+ sketch the proof of the consistency and (limiting) completeness of the
+ functional ASA, relative to a simple model.
+
+ Some familiarity with denotational semantics and algebraic
+ specifications is assumed.",
+ paper = "Moss84.pdf"
+}
+
+\end{chunk}
+
\index{Myreen, Magnus O.}
\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@@ 13955,6 +14780,34 @@ when shown in factored form.
\end{chunk}
+\index{Piroi, Florina}
+\index{Buchberger, Bruno}
+\index{Rosenkranz, Camelia}
+\begin{chunk}{axiom.bib}
+@misc{Piro08,
+ author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
+ title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
+ year = "2008",
+ link = "\urlhttp://www.risc.jku.at/publications/download/risc_3442/MathAgentsforSFB2008031012h00.pdf{}",
+ abstract =
+ "This report reviews the literature relevant for the research project
+ 'Math−Agents: Mathematical Journals as Reasoning Agents' proposed by
+ Bruno Buchberger as a technology transfer project based on the results
+ of the SFB Project 'Scientific Computing', in particular the project
+ SFB 1302, 'Theorema'. The project aims at computer−supporting the
+ refereeing process of mathematical journals by tools that are mainly
+ based on automated reasoning and also at building up the knowledge
+ archived in mathematical journals in such a way that they can act as
+ interactive and active reasoning agents later on. In this report,
+ we review current mathematical software systems with a focus on the
+ availability of tools that can contribute to the goals of the Math−
+ Agents project.",
+ paper = "Piro08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Pollack, Robert}
\begin{chunk}{axiom.bib}
@phdthesis{Poll94,
@@ 14049,6 +14902,29 @@ when shown in factored form.
\end{chunk}
+\index{Reynolds, J.C.}
+\begin{chunk}{axiom.bib}
+@article{Reyn85,
+ author = "Reynolds, J.C.",
+ title = {{Three Approaches to Type Structure}},
+ journal = "LNCS",
+ volume = "185",
+ year = "1985",
+ abstract =
+ "We examine three disparate views of the type structure of
+ programming languages: Milner's type deduction system and polymorphic
+ let construct, the theory of subtypes and generic operators, and
+ the polymorphic or secondorder typed lambda calculus. These
+ approaches are illustrated with a functional language including
+ product, sum and list constructors. The syntactic behavior of types
+ is formalized with type inference rules, bus their semantics is
+ treated intuitively.",
+ paper = "Reyn85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{S} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Sabry, Amr}
@@ 14079,6 +14955,139 @@ when shown in factored form.
\end{chunk}
+\index{Sarma, Gopal}
+\index{Hay, Nick J.}
+\begin{chunk}{axiom.bib}
+@article{Sarm17,
+ author = "Sarma, Gopal and Hay, Nick J.",
+ title = {{Robust Computer Algebra, Theorem Proving, and Oracle AI}},
+ journal = "Informatica",
+ volume = "41",
+ number = "3",
+ link = "\url{https://arxiv.org/pdf/1708.02553.pdf}",
+ year = "2017",
+ abstract =
+ "In the context of superintelligent AI systems, the term 'oracle' has
+ two meanings. One refers to modular systems queried for
+ domainspecific tasks. Another usage, referring to a class of systems
+ which may be useful for addressing the value alignment and AI control
+ problems, is a superintelligent AI system that only answers questions.
+ The aim of this manuscript is to survey contemporary research problems
+ related to oracles which align with longterm research goals of AI
+ safety. We examine existing question answering systems and argue that
+ their high degree of architectural heterogeneity makes them poor
+ candidates for rigorous analysis as oracles. On the other hand, we
+ identify computer algebra systems (CASs) as being primitive examples
+ of domainspecific oracles for mathematics and argue that efforts to
+ integrate computer algebra systems with theorem provers, systems which
+ have largely been developed independent of one another, provide a
+ concrete set of problems related to the notion of provable safety that
+ has emerged in the AI safety community. We review approaches to
+ interfacing CASs with theorem provers, describe welldefined
+ architectural deficiencies that have been identified with CASs, and
+ suggest possible lines of research and practical software projects for
+ scientists interested in AI safety.",
+ paper = "Sarm17.pdf",
+ keywords = "printed, axiomref"
+}
+
+\end{chunk}
+
+\index{Scott, Dana S.}
+\index{Strachey, C.}
+\begin{chunk}{axiom.bib}
+@article{Scot71,
+ author = "Scott, Dana S. and Strachey, C.",
+ title = {{Towards a Mathematical Semantics for Computer Languages}},
+ journal = "Proc. Symp. on Computers and Automata",
+ volume = "21",
+ year = "1971",
+ abstract =
+ "Compilers for highlevel languages are generally constructed to
+ give a complete translation of the programs into machine
+ lanugage. As machines merely juggle bit patterns, the concepts of
+ the original language may be lost or at least obscured during this
+ passage. The purpose of a mathematical semantics is to give a
+ correct and meaningful correspondence between programs and
+ mathematical entities in a way that is entirely independent of an
+ implementation. This plan is illustrated in a very elementary
+ method with the usual idea of state transformations. The next
+ section shows why the mathematics of functions has to be modified
+ to accommodate recursive commands. Section 3 explains the
+ modification. Section 4 introduces the environments for handling
+ variables and identifiers and shows how the semantical equations
+ define equivalence of programs. Section 5 gives an exposition of
+ the new type of mathematical function spaces that are required for
+ the semantics of procedures when these are allowed in assignment
+ statements. The conclusion traces some of the background of the
+ project and points the way to future work.",
+ paper = "Scot71.pdf"
+}
+
+\end{chunk}
+
+\index{Shields, Mark}
+\index{Jones, Simon Peyton}
+\begin{chunk}{axiom.bib}
+@inproceedings{Shie02,
+ author = "Shields, Mark and Jones, Simon Peyton",
+ title = {{FirstClass Modules for Haskell}},
+ booktitle = "9th Int. Conf. on Foundations of ObjectOriented Languages",
+ pages = "2840",
+ year = "2002",
+ link = "\url{http://www.microsoft.com/enus/research/wpcontent/uploads/2016/02/first_class_modules.pdf}",
+ abstract =
+ "Though Haskell's module language is quite weak, its core language
+ is highly expressive. Indeed, it is tantalisingly close to being
+ able to express much of the structure traditionally delegated to a
+ separate module language. However, the encoding are awkward, and
+ some situations can't be encoded at all.
+
+ In this paper we refine Haskell's core language to support
+ {\sl firstclass modules} with many of the features of MLstyle
+ modules. Our proposal cleanly encodes signatures, structures and
+ functors with the appropriate type abstraction and type sharing,
+ and supports recursive modules. All of these features work across
+ compilation units, and interact harmoniously with Haskell's class
+ system. Coupled with support for staged computation, we believe
+ our proposal would be an elegant approach to runtime dynamic
+ linking of structured code.
+
+ Our work builds directly upon Jones' work on parameterised
+ signatures, Odersky and Laufer's system of higherranked type
+ annotations, Russo's semantics of ML modules using ordinary
+ existential and universal quantifications, and Odersky and
+ Zenger's work on nested types. We motivate the system by examples,
+ and include a more formal presentation in the appendix.",
+ paper = "Shie02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sokolowski, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Soko87,
+ author = "Sokolowski, Stefan",
+ title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
+ journal = "Trans. on Programming Languages and Systems",
+ volume = "9",
+ number = "1",
+ pages = "100120",
+ year = "1987",
+ abstract =
+ "This paper presents a natural deduction proof of Hoare's logic
+ carried out by the Edinburgh LCF theorem prover. The emphasis is
+ on the way Hoare's theory is presented to the LCF, which looks
+ very much like an exposition of syntax and semantics to human
+ readers; and on the programmable heuristics (tactics). We also
+ discuss some problems and possible improvements to the LCF.",
+ paper = "Soko87.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Steele, Guy Lewis}
\begin{chunk}{axiom.bib}
@techreport{Stee78,
@@ 14136,6 +15145,36 @@ when shown in factored form.
\end{chunk}
+\index{Stuckey, Peter J.}
+\index{Sulzmann, Martin}
+\begin{chunk}{axiom.bib}
+@article{Stuc05,
+ author = "Stuckey, Peter J. and Sulzmann, Martin",
+ title = {{A Theory of Overloading}},
+ journal = "ACM Trans. on Programming Languages and Systems",
+ volume = "27",
+ number = "6",
+ pages = "154",
+ year = "2005",
+ abstract =
+ "We present a novel approach to allow for overloading of
+ identifiers in the spirit of type classes. Our approach relies on
+ the combination of the HM(X) type system framework with Constraint
+ Handling Rules (CHRs). CHRs are a declarative language for writing
+ incremental constraint solvers, that provide our scheme with a
+ form of programmable type language. CHRs allow us to precisely
+ describe the relationships among overloaded identifiers. Under
+ some sufficient conditions on the CHRs we achieve decidable type
+ inference and the semantic meaning of programs is unambiguous. Our
+ approach provides a common formal basis for many type class
+ extensions such as multiparameter type classes and functional
+ dependencies.",
+ paper = "Stuc05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Talpin, JeanPierre}
@@ 14231,6 +15270,28 @@ when shown in factored form.
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
+@article{Wadl95,
+ author = "Wadler, Philip",
+ title = {{Monads for Functional Programming}},
+ journal = "LNCS",
+ volume = "925",
+ abstract =
+ "The use of monads to structure functional programs is
+ described. Monads provide a convenient framework for simulating
+ effects found in other languages, such as global state, exception
+ handling, output, or nondeterminism. Three case studies are
+ looked at in detail: how monads ease the modification of a simple
+ evaluator; how monads act as the basis of a datatype of arrays
+ subject to inplace update; and how monads can be used to build
+ parsers.",
+ paper = "Wadl95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wadler, Philip}
+\begin{chunk}{axiom.bib}
@article{Wadl92,
author = "Wadler, Philip",
title = {{Comprehending Monads}},
@@ 14292,6 +15353,91 @@ when shown in factored form.
\end{chunk}
+\index{Wadler, Philip}
+\index{Findler, Robert Bruce}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wadl07a,
+ author = "Wadler, Philip and Findler, Robert Bruce",
+ title = {{WellTyped Programs Can't Be Blamed}},
+ booktitle = "Workshop on Scheme and Functional Programming",
+ year = "2007",
+ pages = "1526",
+ abstract =
+ "We show how {\sl contracts} with blame fit naturally with recent
+ work on {\sl hybrid types} and {\sl gradual types}. Unlike hybrid
+ types or gradual types, we require casts in the source code, in
+ order to indicate where type errors may occur. Two (perhaps
+ surprising) aspects of our approach are that refined types can
+ provide useful static guarantees even in the absence of a theorem
+ prover, and that type {\sl dynamic} should not be regarded as a
+ supertype of all other types. We factor the wellknown notion of
+ subtyping into new notions of positive and negative subtyping, and
+ use these to characterise where positive and negative blame may
+ arise. Our approach sharpens and clarifies some recent results in
+ the literature.",
+ paper = "Wadl07a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wehr, Stefan}
+\begin{chunk}{axiom.bib}
+@phdthesis{Wehr05,
+ author = "Wehr, Stefan",
+ title = {{ML Modules and Haskell Type Classes: A Constructive
+ Comparison}},
+ school = "Albert Ludwigs Universitat",
+ year = "2005",
+ abstract =
+ "Researchers repeatedly observed that the module system of ML and the
+ type class mechanism of Haskell are related. So far, this relationship
+ has received little formal investigation. The work at hand fills this
+ gap: It introduces typepreserving translations from modules to type
+ classes and vice versa, which enable a thorough comparison of the two
+ concepts.
+
+ The source language of the first translation is a subset of
+ Standard ML. The target language is Haskell with common extensions
+ and one new feature, which was deeloped as part of this work. The
+ second translation maps a subset of Haskell 98 to ML, with
+ wellestablished extensions. I prove that the translations
+ preserve type correctness and provide implementations for both.
+
+ Building on the insights obtained from the translations, I present
+ a thorough comparison between ML modules and Haskell type
+ classes. Moreover, I evaluate to what extent the techniques used
+ in the translations can be exploited for modular programming in
+ Haskell and for programming with adhoc polymorphism in ML.",
+ paper = "Wehr05.pdf"
+}
+
+\end{chunk}
+
+\index{Wehr, Stefan}
+\index{Chakravarty, Maneul M.T.}
+\begin{chunk}{axiom.bib}
+@article{Wehr08,
+ author = "Wehr, Stefan and Chakravarty, Maneul M.T.",
+ title = {{ML Modules and Haskell Type Classes: A Constructive
+ Comparison}},
+ journal = "LNCS",
+ volume = "5356",
+ pages = "188204",
+ year = "2008",
+ abstract =
+ "Researchers repeatedly observed that the module system of ML and the
+ type class mechanism of Haskell are related. So far, this relationship
+ has received little formal investigation. The work at hand fills this
+ gap: It introduces typepreserving translations from modules to type
+ classes and vice versa, which enable a thorough comparison of the two
+ concepts.",
+ paper = "Wehr08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{X} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{Y} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 14349,6 +15495,29 @@ when shown in factored form.
\end{chunk}
+\index{Zeilberger, Noam}
+\begin{chunk}{axiom.bib}
+@misc{Zeil16,
+ author = "Zeilberger, Noam",
+ title = {{Towards a Mathematical Science of Programming}},
+ year = "2016"
+}
+
+\end{chunk}
+
+\index{Zeilberger, Noam}
+\begin{chunk}{axiom.bib}
+@inproceedings{Zeil16a,
+ author = "Zeilberger, Noam",
+ title = {{Principles of Type Refinement}},
+ booktitle = "OPLSS 2106",
+ link = "\url{http://noamz.org/oplss16/refinementsnotes.pdf}",
+ year = "2016",
+ paper = "Zeil16a.pdf"
+}
+
+\end{chunk}
+
\section{Proving Axiom Correct  Coercion in CASProof Systesms} %
\index{Sozeau, Matthieu}
@@ 14955,7 +16124,7 @@ when shown in factored form.
which enables the communication between both systems illustrated by
some examples that can be solved by theorems and algorithms",
paper = "Ball95.pdf",
 keywords = "axiomref, CASProof"
+ keywords = "axiomref, CASProof, printed"
}
\end{chunk}
@@ 16366,6 +17535,56 @@ when shown in factored form.
\index{Carette, Jacques}
\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Care08,
+ author = "Carette, Jacques and Farmer, William M.",
+ title = {{HighLevel Theories}},
+ journal = "LNCS",
+ volume = "5144",
+ pages = "232245",
+ year = "2008",
+ abstract =
+ "We introduce highlevel theories in analogy with highlevel
+ programming languages. The basic point is that even though one can
+ define many theories via simple, lowlevel axiomatizations , that is
+ neither an effective nor a comfortable way to work with such theories.
+ We present an approach which is closer to what users of mathematics
+ employ, while still being based on formal structures.",
+ paper = "Care08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Care09,
+ author = "Carette, Jacques and Farmer, William M.",
+ title = {{A Review of Mathematical Knowledge Management}},
+ journal = "LNCS",
+ volume = "5625",
+ pages = "233246",
+ year = "2009",
+ abstract =
+ "Mathematical Knowledge Management (MKM), as a field, has seen
+ tremendous growth in the last few years. This period was one where
+ many research threads were started and the field was defining
+ itself. We believe that we are now in a position to use the MKM body
+ of knowledge as a means to define what MKM is, what it worries about,
+ etc. In this paper, we review the literature of MKM and gather various
+ metadata from these papers. After offering some definitions
+ surrounding MKM, we analyze the metadata we have gathered from these
+ papers, in an effort to cast more light on the field of MKM and its
+ evolution",
+ paper = "Care09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
\index{Jeremic, Filip}
\index{Maccio, Vincent}
\index{O'Connor, Russell}
@@ 16384,7 +17603,7 @@ when shown in factored form.
and reflect, as much as possible, the mathematical structure present
in the objects which populate the library.",
paper = "Care11a.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ 16416,6 +17635,42 @@ when shown in factored form.
\end{chunk}
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{O'Connor, Russell}
+\begin{chunk}{axiom.bib}
+@misc{Care11c,
+ author = "Carette, Jacques and Farmer, William M. and O'Connor, Russell",
+ title = {{MathScheme: Project Description}},
+ year = "2011",
+ link = "\url{http://imps.mcmaster.ca/doc/cicm2011projdesc.pdf}",
+ paper = "Care11c.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{O'Connor, Russell}
+\begin{chunk}{axiom.bib}
+@article{Care12,
+ author = "Carette, Jacques and O'Connor, Russell",
+ title = {{Theory Presentation Combinators}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "We motivate and give semantics to theory presentation combinators
+ as the foundational building blocks for a scalable library of
+ theories. The key observation is that the category of contexts and
+ fibered categories are the ideal theoretical tools for this
+ purpose.",
+ paper = "Care12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Chlipala, Adam}
\begin{chunk}{axiom.bib}
@article{Chli10,
@@ 21351,7 +22606,8 @@ when shown in factored form.
programming, bounded type quantification, and abstract data types. This
paper provides an informal introduction to type classes, and defines them
formally by means of type inference rules",
 paper = "Wadl88.pdf"
+ paper = "Wadl88.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 21861,6 +23117,34 @@ in Lecture Notes in Computer Science, Springer ISBN 9783540855200
\end{chunk}
+\index{Walther, J.S.}
+\begin{chunk}{axiom.bib}
+@misc{Walt71,
+ author = "Walther, J.S.",
+ title = {{A Unified Algorithm for Elementary Functions}},
+ link = "\url{}",
+ year = "1971",
+ abstract =
+ "This paper describes a single unified algorithm for the calculation
+ of elementary functions including multipli cation, division, sin,
+ cos, tan, arctan, sinh, cosh, tanh, arctanh, In, exp and squareroot.
+ The basis for the algorithm is coordinate rotation in a linear,
+ circular, or hyperbolic coordinate system depending on which function
+ is to be calculated. The only operations re quired are shifting,
+ adding, subtracting and the recall of prestored constants. The
+ limited domain of con vergence of the algorithm is calculated,
+ leading to a discussion of the modifications required to extend the
+ domain for floating point calculations.
+
+ A hardware floating point processor using the algo rithm was built at
+ HewlettPackard Laboratories. The block diagram of the processor, the
+ microprogram control used for the algorithm, and measures of actual
+ performance are shown.",
+ paper = "Walt71.pdf"
+}
+
+\end{chunk}
+
\section{Advanced Documentation} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Bostock, Mike}
@@ 27763,6 +29047,30 @@ Proc ISSAC 97 pp172175 (1997)
\end{chunk}
\index{Arnon, Dennis S.}
+\index{McCallum, Scott}
+\begin{chunk}{axiom.bib}
+@article{Arno82a,
+ author = "Arnon, Dennis S. and McCallum, Scott",
+ title = {{Cylindrical Algebraic Decomposition by Quantifier Eliminations}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "215222",
+ year = "1982",
+ abstract =
+ "Cylindrical algebraic decompositions were introduced as a major
+ component of a new quantifier elimination algorithm for elementary
+ algebra and geometry (G. Collins, ~973). In the present paper we turn
+ the tables and show that one can use quantifier elimination for ele
+ mentary algebra and geometry to obtain a new version of the
+ cylindrical algebraic decomposi tion algorithm. A key part of our
+ result is a theorem, of interest in its own right, that relates the
+ multiplicities of the roots of a polynomial to their continuity.",
+ paper = "Arno82a.pdf"
+}
+
+\end{chunk}
+
+\index{Arnon, Dennis S.}
\index{Collins, George E.}
\index{McCallum, Scott}
\begin{chunk}{axiom.bib}
@@ 28513,6 +29821,20 @@ Proc ISSAC 97 pp172175 (1997)
\end{chunk}
\index{Collins, George E.}
+\begin{chunk}{axiom.bib}
+@article{Coll82a,
+ author = "Collins, George E.",
+ title = {{Factorization in Cylindrical Algebraic Decomposition  Abstract}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "212214",
+ year = "1982",
+ paper = "Coll82a.pdf"
+}
+
+\end{chunk}
+
+\index{Collins, George E.}
\index{Hong, Hoon}
\begin{chunk}{axiom.bib}
@article{Coll91,
@@ 39335,7 +40657,7 @@ Grant citation GR/L48256 Nov 1, 1997Feb 28, 2001
\begin{chunk}{axiom.bib}
@article{Farm03,
author = "Farmer, William M. and von Mohrenschildt, Martin",
 title = {{An overview of a formal framework for managing mathematics}},
+ title = {{An overview of A Formal Framework For Managing Mathematics}},
journal = "Ann. Math. Artif. Intell.",
volume = "38",
number = "13",
@@ 41608,6 +42930,35 @@ April 1976 (private copy)
\subsection{H} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Hall, Cordelia V.}
+\index{Hammond, Kevin}
+\index{Jones, Simon L. Peyton}
+\index{Wadler, Philip L.}
+\begin{chunk}{axiom.bib}
+@article{Hall96,
+ author = "Hall, Cordelia V. and Hammond, Kevin and Jones, Simon L. Peyton
+ and Wadler, Philip L.",
+ title = {{Type Classes in Haskell}},
+ journal = "Trans. on Programming Langues and Systems",
+ volume = "18",
+ number = "2",
+ pages = "109138",
+ year = "1996",
+ abstract =
+ "This article de nes a set of type inference rules for resolving
+ overloading introduced by type classes, as used in the functional
+ programming language Haskell. Programs including type classes are
+ transformed into ones which may be typed by standard HindleyMilner
+ inference rules. In contrast to other work on type classes, the rules
+ presented here relate directly to Haskell programs. An innovative
+ aspect of this work is the use of secondorder lambda calculus to
+ record type information in the transformed program.",
+ paper = "Hall96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Harrison, J.}
\index{Thery, L.}
\begin{chunk}{axiom.bib}
@@ 41822,6 +43173,21 @@ in [Wit87], pp58
\end{chunk}
\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear82,
+ author = "Hearn, Anthony C.",
+ title = {{REDUCE  A Case Study in Algebra System Development}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "263272",
+ year = "1982",
+ paper = "Hear82.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
\index{Eberhard, Schrufer}
\begin{chunk}{axiom.bib}
@article{Hear95,
@@ 45447,7 +46813,7 @@ ISBN 0897916999 LCCN QA76.95 I59 1995 ACM order number 505950
certain Mathematica computations, so that the rigor of the proof
assistant is not compromised.",
paper = "Lewi17.pdf",
 keywords = "axiomref,printed"
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ 46849,6 +48215,22 @@ Invited Presentation in Milestones in Computer Algebra, May 2008, Tobago
\end{chunk}
+\index{Musser, David R.}
+\index{Kapur, Deepak}
+\begin{chunk}{axiom.bib}
+@article{Muss82,
+ author = "Musser, David R. and Kapur, Deepak",
+ title = {{Rewrite Rule Theory and Abstract Data Type Analysis}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "7790",
+ year = "1982",
+ paper = "Muss82.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\subsection{N} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Naylor, William A.}
@@ 52095,7 +53477,8 @@ National Physical Laboratory. (1982)
simple registration/subscription mechanism, and a translation
mechanism which ensures the transparent and provably sound exchange of
logical services.",
 paper = "Arma00.pdf"
+ paper = "Arma00.pdf",
+ keywords = "printed"
}
\end{chunk}
diff git a/changelog b/changelog
index 4def1dd..a14fb5a 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20180516 tpd src/axiomwebsite/patches.html 20180516.01.tpd.patch
+20180516 tpd books/bookvolbib add references
20180505 tpd src/axiomwebsite/patches.html 20180505.01.tpd.patch
20180505 tpd books/breakurl.sty latex package to break long URLs
20180415 tpd src/axiomwebsite/patches.html 20180415.01.tpd.patch
diff git a/patch b/patch
index 64501ea..8ffaf48 100644
 a/patch
+++ b/patch
@@ 1,4 +1,1392 @@
books/breakurl.sty latex package to break long URLs
+books/bookvolbib add references
Goal: Literate Programming
+Goal: Proving Axiom Correct
+\index{Walther, J.S.}
+\begin{chunk}{axiom.bib}
+@misc{Walt71,
+ author = "Walther, J.S.",
+ title = {{A Unified Algorithm for Elementary Functions}},
+ link = "\url{}",
+ year = "1971",
+ abstract =
+ "This paper describes a single unified algorithm for the calculation
+ of elementary functions including multipli cation, division, sin,
+ cos, tan, arctan, sinh, cosh, tanh, arctanh, In, exp and squareroot.
+ The basis for the algorithm is coordinate rotation in a linear,
+ circular, or hyperbolic coordinate system depending on which function
+ is to be calculated. The only operations re quired are shifting,
+ adding, subtracting and the recall of prestored constants. The
+ limited domain of con vergence of the algorithm is calculated,
+ leading to a discussion of the modifications required to extend the
+ domain for floating point calculations.
+
+ A hardware floating point processor using the algo rithm was built at
+ HewlettPackard Laboratories. The block diagram of the processor, the
+ microprogram control used for the algorithm, and measures of actual
+ performance are shown.",
+ paper = "Walt71.pdf"
+}
+
+\end{chunk}
+
+\index{Friedman, Daniel P.}
+\index{Wise, David S.}
+\begin{chunk}{axiom.bib}
+@techreport{Frie76,
+ author = "Friedman, Daniel P. and Wise, David S.",
+ title = {{CONS should not Evaluate its Arguments}},
+ institution = "Indiana University",
+ number = "TR44",
+ year = "1976",
+ abstract =
+ "The constructor function which allocates and fills records in
+ recursive, sideeffectfree procedural languages is redefined to be a
+ nonstrict (Vuillemin 1974) elementary operation. Instead of
+ evaluating its arguments, it builds suspensions of them which are not
+ coerced until the suspensions is accessed by strict elementary
+ function. The resulting evalutation procedures are strictly more
+ powerful than existing schemes for languages such as LISP. The main
+ results are that Landin's streams are subsumed into McCarthy's LISP
+ merely by the redefinition of elementar functions, that invocations of
+ LISP's evaluator can be minimized by redefining the elemntary
+ functions without redefining the interpreter, and as a strong
+ conjecture, that redefining the elementary functions yields the least
+ fixedpoint semantics for McCarthy's evalution scheme. This new
+ insight into the role of construction functions will do much to ease
+ the interface between recursive programmers and iterative programmers,
+ as well as the interface between programmers and data structure
+ designers.",
+ paper = "Frie16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sarma, Gopal}
+\index{Hay, Nick J.}
+\begin{chunk}{axiom.bib}
+@article{Sarm17,
+ author = "Sarma, Gopal and Hay, Nick J.",
+ title = {{Robust Computer Algebra, Theorem Proving, and Oracle AI}},
+ journal = "Informatica",
+ volume = "41",
+ number = "3",
+ link = "\url{https://arxiv.org/pdf/1708.02553.pdf}",
+ year = "2017",
+ abstract =
+ "In the context of superintelligent AI systems, the term 'oracle' has
+ two meanings. One refers to modular systems queried for
+ domainspecific tasks. Another usage, referring to a class of systems
+ which may be useful for addressing the value alignment and AI control
+ problems, is a superintelligent AI system that only answers questions.
+ The aim of this manuscript is to survey contemporary research problems
+ related to oracles which align with longterm research goals of AI
+ safety. We examine existing question answering systems and argue that
+ their high degree of architectural heterogeneity makes them poor
+ candidates for rigorous analysis as oracles. On the other hand, we
+ identify computer algebra systems (CASs) as being primitive examples
+ of domainspecific oracles for mathematics and argue that efforts to
+ integrate computer algebra systems with theorem provers, systems which
+ have largely been developed independent of one another, provide a
+ concrete set of problems related to the notion of provable safety that
+ has emerged in the AI safety community. We review approaches to
+ interfacing CASs with theorem provers, describe welldefined
+ architectural deficiencies that have been identified with CASs, and
+ suggest possible lines of research and practical software projects for
+ scientists interested in AI safety.",
+ paper = "Sarm17.pdf",
+ keywords = "printed, axiomref"
+}
+
+\end{chunk}
+
+\index{Keller, Chantal}
+\begin{chunk}{axiom.bib}
+@phdthesis{Kell13,
+ author = "Keller, C.",
+ title = {{A Matter of Trust: Skeptical Commuication Between Coq and
+ External Provers}},
+ school = "Ecole Polytechnique",
+ year = "2013",
+ link =
+"\url{https://www.lri.fr/~keller/Documentsrecherche/Publications/thesis13.pdf}",
+ abstract =
+ "This thesis studies the cooperation between the Coq proof assistant
+ and external provers through proof witnesses. We concentrate on two
+ different kinds of provers that can return certicates: first, answers
+ coming from SAT and SMT solvers can be checked in Coq to increase both
+ the confidence in these solvers and Coq 's automation; second,
+ theorems established in interactive provers based on HigherOrder
+ Logic can be exported to Coq and checked again, in order to offer the
+ possibility to produce formal developments which mix these two
+ different logical paradigms. It ended up in two software : SMTCoq, a
+ bidirectional cooperation between Coq and SAT/SMT solvers, and
+ HOLLIGHTCOQ, a tool importing HOL Light theorems into Coq.
+
+ For both tools, we took great care to define a modular and efficient
+ architecture, based on three clearly separated ingredients: an
+ embedding of the formalism of the external tool inside Coq which is
+ carefully translated into Coq terms, a certified checker to establish
+ the proofs using the certicates and a Ocaml preprocessor to transform
+ proof witnesses coming from different provers into a generic
+ certificate. This division allows that a change in the format of proof
+ witnesses only affects the preprocessor, but no proved Coq code.
+ Another fundamental component for efficiency and modularity is
+ computational reflection, which exploits the computational power of
+ Coq to establish generic and small proofs based on the certicates.",
+ paper = "Kell13.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm07,
+ author = "Farmer, William M.",
+ title = {{Biform Theories in Chiron}},
+ journal = "LNCS",
+ volume = "4573",
+ pages = "6679",
+ year = "2007",
+ abstract =
+ "An axiomatic theory represents mathematical knowledge declaratively
+ as a set of axioms. An algorithmic theory represents mathematical
+ knowledge procedurally as a set of algorithms. A biform theory is
+ simultaneously an axiomatic theory and an algorithmic theory. It
+ represents mathematical knowledge both declaratively and procedurally.
+ Since the algorithms of algorithmic theories manipulate th e syntax of
+ expressions, biform theories — as well as algorithmic theories — are
+ difficult to formalize in a traditional logic without the means to
+ reason about syntax. Chiron is a derivative of
+ vonNeumannBernaysG̈odel ( NBG ) set theory that is intended to be a
+ practical, generalpurpose logic for mechanizing mathematics. It
+ includes elements of type theory, a scheme for handling undefinedness,
+ and a facility for reasoning about the syntax of expressions. It is an
+ exceptionally wellsuited logic for formalizing biform theories. This
+ paper defines the notion of a biform theory, gives an overview of
+ Chiron, and illustrates how biform theories can be formalized in Chiron.",
+ paper = "Farm07.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{Sorge, Volker}
+\begin{chunk}{axiom.bib}
+@inproceedings{Care07,
+ author = "Carette, Jacques and Farmer, William M. and Sorge, Volker",
+ title = {{A Rational Reconstruction of a System for Experimental
+ Mathematics}},
+ booktitle = "14th Workshop on Automated Reasoning",
+ publisher = "unknown",
+ year = "2007",
+ abstract =
+ "In previous papers we described the implementation of a system
+ which combines mathematical object generation, transformation and
+ filtering, conjecture generation, proving and disproving for
+ mathematical discovery in nonassociative algebra. While the system
+ has generated novel, fully verified theorems, their construction
+ involved a lot of ad hoc communication between disparate systems. In
+ this paper we carefully reconstruct a specification of a subprocess
+ of the original system in a framework for trustable communication
+ between mathematics systems put forth by us. It employs the concept
+ of biform theories that enables the combined formalisation of the
+ axiomatic and algorithmic theories behind the generation
+ process. This allows us to gain a much better understanding of the
+ original system, and exposes clear generalisation opportunities.",
+ paper = "Care07.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Farm00,
+ author = "Farmer, William M.",
+ title = {{A Proposal for the Development of an Interactive
+ Mathematics Laboratory for Mathematics Eductions}},
+ booktitle = "Workshop on Deductions Systems for Mathematics Eduation",
+ pages = "2025",
+ year = "2000"
+ paper = "Farm00.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm04,
+ author = "Farmer, William M.",
+ title = {{MKM A New Interdisciplinary Field of Research}},
+ journal = "SIGSAM",
+ volume = "38",
+ pages = "4752",
+ year = "2004",
+ abstract =
+ "Mathematical Knowledge Management (MKM) is a new interdisciplinary
+ field of research in the intersection of mathematics, computer
+ science, library science, and scientific publishing. Its objective is
+ to develop new and better ways of managing mathematical knowledge
+ using sophisticated software tools. Its grand challenge is to create
+ a universal digital mathematics library (UDML), accessible via the
+ WorldWide Web, that contains essentially all mathematical knowledge
+ (intended for the public). The challenges facing MKM are daunting,
+ but a UDML, even just partially constructed, would transform how
+ mathematics is learned and practiced.",
+ paper = "Farm04.pdf",
+ keywords = "printed, axiomref"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\index{Mohrenschildt, Martin v.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Farm00a,
+ author = "Farmer, William M. and Mohrenschildt, Martin v.",
+ title = {{Transformers for Symbolic Computation and Formal Deduction}},
+ booktitle = "CADE17",
+ pages = "3645",
+ year = "2000",
+ abstract =
+ "A transformer is a function that maps expressions to expressions.
+ Many transformational operators — such as expression evaluators and
+ simplifiers, rewrite rules, rules of inference, and decision
+ procedures — can be represented by transformers. Computations and
+ deductions can be formed by applying sound transformers in
+ sequence. This paper introduces machinery for defining sound
+ transformers in the context of an axiomatic theory in a formal
+ logic. The paper is intended to be a first step in a development of an
+ integrated framework for symbolic computation and formal deduction.",
+ paper = "Farm00a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\index{Larjani, Pouya}
+\begin{chunk}{axiom.bib}
+@misc{Farm14,
+ author = "Farmer, William M. and Larjani, Pouya",
+ title = {{Frameworks for Reasoning about Syntax that Utilize
+ Quotation and Evaluation}},
+ links = "\url{http://imps.mcmaster.ca/doc/syntax.pdf}",
+ year = "2014",
+ abstract =
+ "It is often useful, if not necessary, to reason about the syntactic
+ structure of an expression in an interpreted language (i.e., a
+ language with a semantics). This paper introduces a mathematical
+ structure called a syntax framework that is intended to be an abstract
+ model of a system for reasoning about the syntax of an interpreted
+ language. Like many concrete systems for reasoning about syntax, a
+ syntax framework contains a mapping of expressions in the
+ interpreted language to syntactic values that represent the syntactic
+ structures of the expressions; a language for reasoning about the
+ syntactic values; a mechanism called quotation to refer to the
+ syntactic value of an expression; and a mechanism called evaluation to
+ refer to the value of the expression represented by a syntactic value.
+ A syntax framework provides a basis for integrating reasoning about
+ the syntax of the expressions with reasoning about what the
+ expressions mean. The notion of a syntax framework is used to discuss
+ how quotation and evaluation can be built into a language and to
+ define what quasiquotation is. Several examples of syntax frameworks
+ are presented.",
+ paper = "Farm14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{O'Connor, Russell}
+\begin{chunk}{axiom.bib}
+@misc{Care11c,
+ author = "Carette, Jacques and Farmer, William M. and O'Connor, Russell",
+ title = {{MathScheme: Project Description}},
+ year = "2011",
+ link = "\url{http://imps.mcmaster.ca/doc/cicm2011projdesc.pdf}",
+ paper = "Care11c.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Care08,
+ author = "Carette, Jacques and Farmer, William M.",
+ title = {{HighLevel Theories}},
+ journal = "LNCS",
+ volume = "5144",
+ pages = "232245"
+ year = "2008",
+ abstract =
+ "We introduce highlevel theories in analogy with highlevel
+ programming languages. The basic point is that even though one can
+ define many theories via simple, lowlevel axiomatizations , that is
+ neither an effective nor a comfortable way to work with such theories.
+ We present an approach which is closer to what users of mathematics
+ employ, while still being based on formal structures.",
+ paper = "Care08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{O'Connor, Russell}
+\begin{chunk}{axiom.bib}
+@article{Care12,
+ author = "Carette, Jacques and O'Connor, Russell",
+ title = {{Theory Presentation Combinators}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "We motivate and give semantics to theory presentation combinators
+ as the foundational building blocks for a scalable library of
+ theories. The key observation is that the category of contexts and
+ fibered categories are the ideal theoretical tools for this
+ purpose.",
+ paper = "Care12.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Musser, David R.}
+\index{Kapur, Deepak}
+\begin{chunk}{axiom.bib}
+@article{Muss82,
+ author = "Musser, David R. and Kapur, Deepak",
+ title = {{Rewrite Rule Theory and Abstract Data Type Analysis}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "7790",
+ year = "1982",
+ paper = "Muss82.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Lazard, Daniel}
+\begin{chunk}{axiom.bib}
+@article{Laza82,,
+ author = "Lazard, Daniel",
+ title = {{Commutative Algebra and Computer Algebra}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "4048",
+ year = "1982",
+ abstract =
+ "It is well known that computer algebra deals with commutative rings
+ such as the integers, the rationals, the modular integers and
+ polynomials over such rings.
+
+ On the other hand, commutative algebra is that part of mathematics
+ which studies commutative rings.
+
+ Our aim is to make this link more precise. It will appear that most of
+ the constructions which appear in classical commutative algebra can be
+ done explicitly in finite time. However, there are
+ exceptions. Moreover, most of the known algorithms are intractable :
+ The problems are generally exponential by themselves, but many
+ algorithms are overoverexponential. It needs a lot of work to find
+ better methods, and to implement them, with the final hope to open a
+ computation domain larger than this one which is possible by hand.
+
+ To illustrate the limits and the possibilities of computerizing
+ commutative algebra, we describe an algorithm which tests the
+ primality of a polynomial ideal and we give an example of a single
+ algebraic extension of fields $K\subset L$ wuch that there exists an
+ algorithm of factorization for the polynomials over $k$, but not for
+ the polynomials over $L$.",
+ paper = "Laza82.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear82,,
+ author = "Hearn, Anthony C.",
+ title = {{REDUCE  A Case Study in Algebra System Development}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "263272",
+ year = "1982",
+ paper = "Hear82.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Padget, J.A.}
+\begin{chunk}{axiom.bib}
+@article{Padg82,
+ author = "Padget, J.A.",
+ title = {{Escaping from Intermediate Expression Swell: A Continuing Saga}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "256262",
+ year = "1982",
+ abstract =
+ "The notion of a closed continuation is introduced, is presented,
+ coroutines using function call and return based on this concept, are
+ applications and a functional dialect of LISP shown to be merely a
+ more general form of for coroutines in algebraic simplification and
+ are suggested, by extension function. expression Potential evaluation
+ and a specific example of their use is given in a novel attack on the
+ phenomenon of intermediate expression swell in polynomial
+ multiplication.",
+ paper = "Padg82.pdf"
+}
+
+\end{chunk}
+
+\index{Norman, Arthur}
+\begin{chunk}{axiom.bib}
+@article{Norm82,
+ author = "Norman, Arthur",
+ title = {{The Development of a VectorBased Algebra System}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "237248",
+ year = "1982",
+ paper = "Norm82.pdf"
+}
+
+\end{chunk}
+
+\index{Bordoni, L.}
+\index{Colagrossi, A.}
+\index{Miola, A.}
+\begin{chunk}{axiom.bib}
+@article{Bord82,
+ author = "Bordoni, L. and Colagrossi, A. and Miola, A.",
+ title = {{Linear Algebraic Approach for Computing Polynomial Resultant}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "231236",
+ year = "1982",
+ abstract =
+ "This paper presents a linear algebraic method for computing the
+ resultant of two polynomials. This method is based on the
+ computation of a determinant of order equal to the minimum of the
+ degrees of the two given polynomials. This method turns out to be
+ preferable to other known linear algebraic methods both from a
+ computational point of view and for a total generality respect to
+ the class of the given polynomials. Some relationships of this
+ method with the polynomial pseudoremainder operation are discussed.",
+ paper = "Bord82.pdf"
+}
+
+\end{chunk}
+
+\index{Arnon, Dennis S.}
+\index{McCallum, Scott}
+\begin{chunk}{axiom.bib}
+@article{Arno82a,
+ author = "Arnon, Dennis S. and McCallum, Scott",
+ title = {{Cylindrical Algebraic Decomposition by Quantifier Eliminations}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "215222",
+ year = "1982",
+ abstract =
+ "Cylindrical algebraic decompositions were introduced as a major
+ component of a new quantifier elimination algorithm for elementary
+ algebra and geometry (G. Collins, ~973). In the present paper we turn
+ the tables and show that one can use quantifier elimination for ele
+ mentary algebra and geometry to obtain a new version of the
+ cylindrical algebraic decomposi tion algorithm. A key part of our
+ result is a theorem, of interest in its own right, that relates the
+ multiplicities of the roots of a polynomial to their continuity.",
+ paper = "Arno82a.pdf"
+}
+
+\end{chunk}
+
+\index{Collins, George E.}
+\begin{chunk}{axiom.bib}
+@article{Coll82a,
+ author = "Collins, George E.",
+ title = {{Factorization in Cylindrical Algebraic Decomposition  Abstract}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "212214",
+ year = "1982",
+ paper = "Coll82a.pdf"
+}
+
+\end{chunk}
+
+\index{Lazard, Daniel}
+\begin{chunk}{axiom.bib}
+@article{Laza82a,
+ author = "Lazard, Daniel",
+ title = {{On Polynomial Factorization}},
+ journal = "Lecture Notes in Computer Science",
+ volume = "144",
+ pages = "144157",
+ year = "1982",
+ abstract =
+ "We present new algorithms for factoring univariate polynomials
+ over finite fields. They are variants of the algorithms of Camion
+ and CantorZassenhaus and differ from them essentially by
+ computing the primitive idempotents of the algebra $K[X]/f$ before
+ factoring $f$.
+
+ These algorithms are probabilistic in the following sense. The
+ time of computation depends on random choices, but the validity of
+ the result does not depend on them. So, worst case complexity,
+ being infinite, is meaningless and we compute average
+ complexity. It appears that our and CantorZassenhaus algorithms
+ have the same asymptotic complexity and they are the best
+ algorithms actuall known; with elementary multiplication and GCD
+ computation, CantorZassenhaus algorithm is always bettern than
+ ours; with fast multiplication and GCD, it seems that ours is
+ better, but this fact is not yet proven.
+
+ Finally, for factoring polynomials over the integers, it seems
+ that the best strategy consists in choosing prime moduli as big as
+ possible in the range where the time of the multiplication does
+ not depend on the size of the factors (machine word size). An
+ accurate computation of the involved constants would be needed for
+ proving this estimation.",
+ paper = "Laza82a.pdf"
+}
+
+\end{chunk}
+
+\index{Strachey, Christopher}
+\index{Wadsworth, Christopher}
+\begin{chunk}{axiom.bib}
+@article{Stra00a,
+ author = "Strachey, Christopher and Wadsworth, Christopher",
+ title = {{Continuations: A Mathematical Semantics for Handling Full Jumps}},
+ journal = "HigherOrder and Symbolic Computation",
+ volume = "13",
+ pages = "135152",
+ year = "2000",
+ abstract =
+ "This paper describes a method of giving the mathematical
+ semantics of programming languages which include the most general
+ form of jumps.",
+ paper = "Stra00a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kaes, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Kaes88,
+ author = "Kaes, Stefan",
+ title = {{Parametric Overloading in Polymorphic Programming Languages}},
+ journal = "LNCS",
+ volume = "300",
+ pages = "131144",
+ year = "1988",
+ abstract =
+ "The introduction of unrestricted overloading in languagues with type
+ systems based on implicit parametric potymorphism generally destroys
+ the principal type property: namely that the type of every expression
+ can uniformly be represented by a single type expression over some set
+ of type variables. As a consequence, type inference in the presence
+ of unrestricted overloading can become a NPcomplete problem. In
+ this paper we define the concept of parametric overloading as a
+ restricted form of overloading which is easily combined with
+ parametric polymorphism. Parametric overloading preserves the
+ principal type property, thereby allowing the design of efficient type
+ inference algorithms. We present sound type deduction systems, both
+ for predefined and programmer defined overloading. Finally we state
+ that parametric overloading can be resolved either statically, at
+ compile time, or dynamically, during program execution.",
+ paper = "Kaes88.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kaes, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Kaes92,
+ author = "Kaes, Stefan",
+ title = {{Type Inference inthe Presence of Overloading, Subtyping and
+ Recursive Types}},
+ journal = "ACM Lisp Pointers",
+ volume = "5",
+ number = "1",
+ year = "1992",
+ pages = "193204",
+ abstract =
+ "We present a unified approach to type inference in the presence of
+ overloading and coercions based on the concept of {\sl constrained
+ types}. We define a generic inference system, show that subtyping and
+ overloading can be treated as a special instance of this system and
+ develop a simple algorithm to compute principal types. We prove the
+ decidability of type inference for hte class of {\sl decomposable
+ predicates} and deelop a canonical representation for principal types
+ based on {\sl most accurate simplifications} of constraint
+ sets. Finally, we investigate the extension of our techniques to
+ {\sl recursive types}.",
+ paper = "Kaes92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Hall, Cordelia V.}
+\index{Hammond, Kevin}
+\index{Jones, Simon L. Peyton}
+\index{Wadler, Philip L.}
+\begin{chunk}{axiom.bib}
+@article{Hall96,
+ author = "Hall, Cordelia V. and Hammond, Kevin and Jones, Simon L. Peyton
+ and Wadler, Philip L.",
+ title = {{Type Classes in Haskell}},
+ journal = "Trans. on Programming Langues and Systems",
+ volume = "18",
+ number = "2",
+ pages = "109138",
+ year = "1996",
+ abstract =
+ "This article de nes a set of type inference rules for resolving
+ overloading introduced by type classes, as used in the functional
+ programming language Haskell. Programs including type classes are
+ transformed into ones which may be typed by standard HindleyMilner
+ inference rules. In contrast to other work on type classes, the rules
+ presented here relate directly to Haskell programs. An innovative
+ aspect of this work is the use of secondorder lambda calculus to
+ record type information in the transformed program.",
+ paper = "Hall96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dreyer, Derek}
+\index{Harper, Robert}
+\index{Chakravarty, Manuel M.T.}
+\index{Keller, Gabriele}
+\begin{chunk}{axiom.bib}
+@inproceedings{Drey07,
+ author = "Dreyer, Derek and Harper, Robert and Chakravarty, Manuel M.T.
+ and Keller, Gabriele",
+ title = {{Modlular Type Classes}},
+ booktitle = "Proc. POPL'07",
+ pages = "6370",
+ year = "2007",
+ abstract =
+ "ML modules and Haskell type classes have proven to be highly
+ effective tools for program structuring. Modules emphasize explicit
+ configuration of program components and the use of data abstraction.
+ Type classes emphasize implicit program construction and ad hoc
+ polymorphism. In this paper , we show how the implicitlytyped
+ style of type class programming may be supported within the framework
+ of an explicitlytyped module language by viewing type classes as a
+ particular mode of use of modules. This view offers a harmonious
+ integration of modules and type classes, where type class features,
+ such as class hierarchies and associated types, arise naturally as
+ uses of existing modulelanguage constructs, such as module
+ hierarchies and type components. In addition, programmers have
+ explicit control over which type class instances are available for
+ use by type inference in a given scope. We formalize our approach as
+ a HarperStonestyle elaboration relation, and provide a sound type
+ inference algorithm as a guide to implementation.",
+ paper = "Drey07.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wehr, Stefan}
+\index{Chakravarty, Maneul M.T.}
+\begin{chunk}{axiom.bib}
+@article{Wehr08,
+ author = "Wehr, Stefan and Chakravarty, Maneul M.T.",
+ title = {{ML Modules and Haskell Type Classes: A Constructive
+ Comparison}},
+ journal = "LNCS",
+ volume = "5356",
+ pages = "188204",
+ year = "2008",
+ abstract =
+ "Researchers repeatedly observed that the module system of ML and the
+ type class mechanism of Haskell are related. So far, this relationship
+ has received little formal investigation. The work at hand fills this
+ gap: It introduces typepreserving translations from modules to type
+ classes and vice versa, which enable a thorough comparison of the two
+ concepts.",
+ paper = "Wehr08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Stuckey, Peter J.}
+\index{Sulzmann, Martin}
+\begin{chunk}{axiom.bib}
+@article{Stuc05,
+ author = "Stuckey, Peter J. and Sulzmann, Martin",
+ title = {{A Theory of Overloading}},
+ journal = "ACM Trans. on Programming Languages and Systems",
+ volume = "27",
+ number = "6",
+ pages = "154",
+ year = "2005",
+ abstract =
+ "We present a novel approach to allow for overloading of
+ identifiers in the spirit of type classes. Our approach relies on
+ the combination of the HM(X) type system framework with Constraint
+ Handling Rules (CHRs). CHRs are a declarative language for writing
+ incremental constraint solvers, that provide our scheme with a
+ form of programmable type language. CHRs allow us to precisely
+ describe the relationships among overloaded identifiers. Under
+ some sufficient conditions on the CHRs we achieve decidable type
+ inference and the semantic meaning of programs is unambiguous. Our
+ approach provides a common formal basis for many type class
+ extensions such as multiparameter type classes and functional
+ dependencies.",
+ paper = "Stuc05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Reynolds, J.C.}
+\begin{chunk}{axiom.bib}
+@article{Reyn85,
+ author = "Reynolds, J.C.",
+ title = {{Three Approaches to Type Structure}},
+ journal = "LNCS",
+ volume = "185",
+ year = "1985",
+ abstract =
+ "We examine three disparate views of the type structure of
+ programming languages: Milner's type deduction system and polymorphic
+ let construct, the theory of subtypes and generic operators, and
+ the polymorphic or secondorder typed lambda calculus. These
+ approaches are illustrated with a functional language including
+ product, sum and list constructors. The syntactic behavior of types
+ is formalized with type inference rules, bus their semantics is
+ treated intuitively.",
+ paper = "Reyn85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Balsters, Herman}
+\index{Fokkinga, Maarten M.}
+\begin{chunk}{axiom.bib}
+@article{Bals91,
+ author = "Balsters, Herman and Fokkinga, Maarten M.",
+ title = {{Subtyping can have a simple semantics}},
+ journal = "Theoretical Computer Science",
+ volume = "87",
+ pages = "8196",
+ year = "1991",
+ abstract =
+ "Consider a first order typed language, with semantics
+ $\llbracket~\rrbracket$ for expressions and types. Adding
+ subtyping means that a partial order $\le$ on types is defined
+ and that the typing rules are extended to the effect that
+ expression $e$ has type $\tau$ whenever $e$ has type $\sigma$ and
+ $\sigma \le \tau$. We show how to adapt the semantics
+ $\llbracket~\rrbracket$ in a {\sl simple set theoretic way},
+ obtaining a semantics $\llbracket~\rrbracket$ that satisfies, in
+ addition to some obvious requirements, also the property
+ $\llbracket\sigma\rrbracket \subseteq $\llbracket\tau\rrbracket$,
+ whenever $\sigma \le \tau$.",
+ paper = "Bals91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cardelli, Luca}
+\begin{chunk}{axiom.bib}
+@article{Card84,
+ author = "Cardelli, Luca",
+ title = {{A Semantics of Multiple Inheritance}},
+ journal = "LNCS",
+ volume = "173",
+ year = "1984",
+ paper = "Card84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Mosses, Peter}
+\begin{chunk}{axiom.bib}
+@article{Moss84,
+ author = "Mosses, Peter",
+ title = {{A Basic Abstract Semantics Algebra}},
+ journal = "LNCS",
+ volume = "173",
+ year = "1984",
+ abstract =
+ "It seems that there are some pragmatic advantages in using Abstract
+ Semantic Algebras (ASAs) instead of Xnotation in denotational
+ semantics. The values of ASAs correspond to 'actions' (or
+ 'processes'), and the operators correspond to primitive ways of
+ combining actions. There are simple ASAs for the various independent
+ 'facets' of actions: a functional ASA for dataflow, an imperative ASA
+ for assignments, a declarative ASA for bindings, etc. The aim is to
+ obtain general ASAs by systematic combination of these simple ASAs.
+
+ Here we specify a basic ASA that captures the common features of the
+ functional, imperative and declarative ASAs  and highlights their
+ differences. We discuss the correctness of ASA specifications, and
+ sketch the proof of the consistency and (limiting) completeness of the
+ functional ASA, relative to a simple model.
+
+ Some familiarity with denotational semantics and algebraic
+ specifications is assumed.",
+ paper = "Moss84.pdf"
+}
+
+
+\end{chunk}
+
+\index{Gross, Jason}
+\index{Chlipala, Adam}
+\begin{chunk}{axiom.bib}
+@misc{Gros15,
+ author = "Gross, Jason and Chlipala, Adam",
+ title = {{Parsing Parses}},
+ link = "\url{https://people.csail.mit.edu/jgross/personalwebsite/papers/2015parsingparsetrees.pdf}",
+ year = "2015",
+ abstract =
+ "We present a functional parser for arbitrary contextfree grammars,
+ together with soundness and completeness proofs, all inside Coq. By
+ exposing the parser in the right way with parametric polymorphism and
+ dependent types, we are able to use the parser to prove its own
+ soundness, and, with a little help from relational parametricity,
+ prove its own completeness, too. Of particular interest is one strange
+ instantiation of the type and value parameters: by parsing parse trees
+ instead of strings, we convince the parser to generate its own
+ completeness proof. We conclude with highlights of our experiences
+ iterating through several versions of the Coq development, and some
+ general lessons about dependently typed programming.",
+ paper = "Gros15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wehr, Stefan}
+\begin{chunk}{axiom.bib}
+@phdthesis{Wehr05,
+ author = "Wehr, Stefan",
+ title = {{ML Modules and Haskell Type Classes: A Constructive
+ Comparison}},
+ school = "Albert Ludwigs Universitat",
+ year = "2005",
+ abstract =
+ "Researchers repeatedly observed that the module system of ML and the
+ type class mechanism of Haskell are related. So far, this relationship
+ has received little formal investigation. The work at hand fills this
+ gap: It introduces typepreserving translations from modules to type
+ classes and vice versa, which enable a thorough comparison of the two
+ concepts.
+
+ The source language of the first translation is a subset of
+ Standard ML. The target language is Haskell with common extensions
+ and one new feature, which was deeloped as part of this work. The
+ second translation maps a subset of Haskell 98 to ML, with
+ wellestablished extensions. I prove that the translations
+ preserve type correctness and provide implementations for both.
+
+ Building on the insights obtained from the translations, I present
+ a thorough comparison between ML modules and Haskell type
+ classes. Moreover, I evaluate to what extent the techniques used
+ in the translations can be exploited for modular programming in
+ Haskell and for programming with adhoc polymorphism in ML.",
+ paper = "Wehr05.pdf"
+}
+
+\end{chunk}
+
+\index{Dreyer, Derek}
+\index{Crary, Karl}
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@article{Drey03,
+ author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
+ title = {{A Type System for HigherOrder Modules}},
+ journal = "ACM SIGPLAN Notices",
+ volume = "38",
+ number = "1",
+ year = "2003",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms.pdf}",
+ abstract =
+ "We present a type theory for higherorder modules that accounts for
+ many central issues in module system design, including translucency,
+ applicativity , generativity , and modules as firstclass values.
+ Our type system harmonizes design elements from previous work,
+ resulting in a simple, economical account of modular programming. The
+ main unifying principle is the treatment of abstraction mechanisms
+ as computational effects. Our language is the first to provide a
+ complete and practical formalization of all of these critical issues
+ in module system design.",
+ paper = "Drey03.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Crary, Karl}
+\index{Harper, Robert}
+\index{Puri, Sidd}
+\begin{chunk}{axiom.bib}
+@inproceedings{Crar99,
+ author = "Crary, Karl and Harper, Robert and Puri, Sidd",
+ title = {{What is a Recursive Module}},
+ booktitle = "Conf. on Programming Language Design and Implementation",
+ year = "1999",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/1999/recmod/recmod.dvi}",
+ abstract =
+ "A hierarchical module system is an effective tool for structuring
+ large programs. Strictly hierarchical module systems impose an
+ acyclic ordering on import dependencies among program units. This
+ can impede modular programming by forcing mutuallydependent
+ components to be consolidated into a single module. Recently there
+ have been several proposals for module systems that admit cyclic
+ dependencies, but it is not clear how these proposals relate to
+ one another, nor how one mught integrate them into an expressive
+ module system such as that of ML.
+
+ To address this question we provide a typetheoretic analysis of
+ the notion of a recursive module in the context of a
+ ``phasedistinction'' formalism for higherorder module
+ systems. We extend this calculus with a recursive module mechanism
+ and a new form of signature, called a {\sl recurslively dependent
+ signature}, to support the definition of recursive modules. These
+ extensions are justified by an interpretation in terms of more
+ primitive language constructs. This interpretation may also serve
+ as a guide for implementation.",
+ paper = "Crar99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dreyer, Derek}
+\index{Crary, Karl}
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@techreport{Drey02,
+ author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
+ title = {{A Type System for HigherOrder Modules (Expanded Version)}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMUCS02122R",
+ year = "2002",
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thomstr.pdf}",
+ abstract =
+ "We present a type theory for higherorder modules that accounts for
+ many central issues in module system design, including translucency,
+ applicativity , generativity , and modules as firstclass values.
+ Our type system harmonizes design elements from previous work,
+ resulting in a simple, economical account of modular programming. The
+ main unifying principle is the treatment of abstraction mechanisms
+ as computational effects. Our language is the first to provide a
+ complete and practical formalization of all of these critical issues
+ in module system design.",
+ paper = "Drey02.pdf"
+}
+
+\end{chunk}
+
+\index{Crary, Karl}
+\begin{chunk}{axiom.bib}
+@techreport{Crar02,
+ author = "Crary, Karl",
+ title = {{Toward a Foundational Typed Assembly Language}},
+ institution = "Carnegie Mellon University",
+ number = "CMUCS02196",
+ year = "2002,
+ link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/talt/talttr.pdf}",
+ abstract =
+ "We present the design of a typed assembly language called TALT that
+ supports heterogeneous tuples, disjoint sums, arrays, and a general
+ account of addressing modes. TALT also implements the von Neumann
+ model in which programs are stored in memory, and supports relative
+ addressing. Type safety for execution and for garbage collection are
+ shown by machinecheckable proofs. TALT is the first formalized typed
+ assembly language to provide any of these features.",
+ paper = "Crar02.pdf"
+}
+
+\end{chunk}
+
+\index{Mili, Ali}
+\index{Aharon, Shir}
+\index{Nadkarni, Chaitanya}
+\begin{chunk}{axiom.bib}
+@article{Mili09,
+ author = "Mili, Ali and Aharon, Shir and Nadkarni, Chaitanya",
+ title = {{Mathematics for Reasoning about Loop Functions}},
+ journal = "Science of Computer Programming",
+ volume = "79",
+ year = "2009",
+ pages = "9891020",
+ abstract =
+ "The criticality of modern software applications, the pervasiveness of
+ malicious code concerns, the emergence of thirdparty software
+ development, and the preponderance of program inspection as a quality
+ assurance method all place a great premium on the ability to analyze
+ programs and derive their function in all circumstances of use and all
+ its functional detail. For Clike programming languages, one of the
+ most challenging tasks in this endeavor is the derivation of loop
+ functions. In this paper, we outline the premises of our approach to
+ this problem, present some mathematical results, and discuss how these
+ results can be used as a basis for building an automated tool that
+ derives the function of while loops under some conditions.",
+ paper = "Mili09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Piroi, Florina}
+\index{Buchberger, Bruno}
+\index{Rosenkranz, Camelia}
+\begin{chunk}{axiom.bib}
+@misc{Piro08,
+ author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
+ title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
+ year = "2008",
+ link = "\urlhttp://www.risc.jku.at/publications/download/risc_3442/MathAgentsforSFB2008031012h00.pdf{}",
+ abstract =
+ "This report reviews the literature relevant for the research project
+ 'Math−Agents: Mathematical Journals as Reasoning Agents' proposed by
+ Bruno Buchberger as a technology transfer project based on the results
+ of the SFB Project 'Scientific Computing', in particular the project
+ SFB 1302, 'Theorema'. The project aims at computer−supporting the
+ refereeing process of mathematical journals by tools that are mainly
+ based on automated reasoning and also at building up the knowledge
+ archived in mathematical journals in such a way that they can act as
+ interactive and active reasoning agents later on. In this report,
+ we review current mathematical software systems with a focus on the
+ availability of tools that can contribute to the goals of the Math−
+ Agents project.",
+ paper = "Piro08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Sokolowski, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Soko87,
+ author = "Sokolowski, Stefan",
+ title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
+ journal = "Trans. on Programming Languages and Systems",
+ volume = "9",
+ number = "1",
+ pages = "100120",
+ year = "1987",
+ abstract =
+ "This paper presents a natural deduction proof of Hoare's logic
+ carried out by the Edinburgh LCF theorem prover. The emphasis is
+ on the way Hoare's theory is presented to the LCF, which looks
+ very much like an exposition of syntax and semantics to human
+ readers; and on the programmable heuristics (tactics). We also
+ discuss some problems and possible improvements to the LCF.",
+ paper = "Soko87.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wadler, Philip}
+\begin{chunk}{axiom.bib}
+@article{Wadl95,
+ author = "Wadler, Philip",
+ title = {{Monads for Functional Programming}},
+ journal = "LNCS",
+ volume = "925",
+ abstract =
+ "The use of monads to structure functional programs is
+ described. Monads provide a convenient framework for simulating
+ effects found in other languages, such as global state, exception
+ handling, output, or nondeterminism. Three case studies are
+ looked at in detail: how monads ease the modification of a simple
+ evaluator; how monads act as the basis of a datatype of arrays
+ subject to inplace update; and how monads can be used to build
+ parsers.",
+ paper = "Wadl95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Freeman, Tim}
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@inproceedings{Free91,
+ author = "Freeman, Tim and Pfenning, Frank",
+ title = {{Refinement Types for ML}},
+ booktitle = "ACM SIGPLAN PLDI'91",
+ year = "1991",
+ link = "\url{http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}",
+ abstract =
+ "We describe a refinement of ML's type system allowing the
+ specification of recursively defined subtypes of userdefined
+ datatypes. The resulting system of {\sl refinement types}
+ preserves desirable properties of ML such as decidability of type
+ inference, while at the same time allowing more errors to be
+ detected at compiletime. The type system combines abstract
+ interpretation with ideas from the intersection type discipline,
+ but remains closely tied to ML in that refinement types are given
+ only to programs which are already welltyped in ML.",
+ paper = "Free91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Zeilberger, Noam}
+\begin{chunk}{axiom.bib}
+@misc{Zeil16,
+ author = "Zeilberger, Noam",
+ title = {{Towards a Mathematical Science of Programming}},
+ year = "2016"
+}
+
+\end{chunk}
+
+\index{Zeilberger, Noam}
+\begin{chunk}{axiom.bib}
+@inproceedings{Zeil16a,
+ author = "Zeilberger, Noam",
+ title = {{Principles of Type Refinement}},
+ booktitle = "OPLSS 2106",
+ link = "\url{http://noamz.org/oplss16/refinementsnotes.pdf}",
+ year = "2016",
+ paper = "Zeil16a.pdf"
+}
+
+\end{chunk}
+
+\index{McCarthy, John}
+\begin{chunk}{axiom.bib}
+@incollection{Mcca63,
+ author = "McCarthy, John",
+ title = {{A Basis for a Mathematical Theory of Computation}},
+ booktitle = "Computer Programming and Formal Systems",
+ year = "1963",
+ paper = "Mcca63.pdf"
+}
+
+\end{chunk}
+
+\index{Scott, Dana S.}
+\index{Strachey, Christopher}
+\begin{chunk}{axiom.bib}
+@article{Scot71,
+ author = "Scott, Dana S. and Strachey, C.",
+ title = {{Towards a Mathematical Semantics for Computer Languages}},
+ journal = "Proc. Symp. on Computers and Automata",
+ volume = "21",
+ year = "1971",
+ abstract =
+ "Compilers for highlevel languages are generally constructed to
+ give a complete translation of the programs into machine
+ lanugage. As machines merely juggle bit patterns, the concepts of
+ the original language may be lost or at least obscured during this
+ passage. The purpose of a mathematical semantics is to give a
+ correct and meaningful correspondence between programs and
+ mathematical entities in a way that is entirely independent of an
+ implementation. This plan is illustrated in a very elementary
+ method with the usual idea of state transformations. The next
+ section shows why the mathematics of functions has to be modified
+ to accommodate recursive commands. Section 3 explains the
+ modification. Section 4 introduces the environments for handling
+ variables and identifiers and shows how the semantical equations
+ define equivalence of programs. Section 5 gives an exposition of
+ the new type of mathematical function spaces that are required for
+ the semantics of procedures when these are allowed in assignment
+ statements. The conclusion traces some of the background of the
+ project and points the way to future work.",
+ paper = "Scot71.pdf"
+}
+
+\end{chunk}
+
+\index{Mellies, PaulAndre}
+\index{Zeilberger, Noam}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mell15,
+ author = "Mellies, PaulAndre and Zeilberger, Noam",
+ title = {{Functors are Type Refinement Systems}},
+ booktitle = "POPL'15",
+ publisher = "ACM",
+ year = "2015",
+ abstract =
+ "The standard reading of type theory through the lens of category
+ theory is based on the idea of viewing a type system as a category
+ of welltyped terms. We propose a basic revision of this reading;
+ rather than interpreting type systems as categories, we describe
+ them as {\sl functors} from a category of typing derivations to a
+ category of underlying terms. Then, turning this around, we
+ explain how in fact {\sl any} functor gives rise to a generalized
+ type system, with an abstract notion of type judgment, typing
+ derivations and typing rules. This leads to a purely categorical
+ reformulation of various natural classes of type systems as
+ natural classes of functors.
+
+ The main purpose of this paper is to describe the general
+ framework (which can also be seen as providing a categorical
+ analysis of {\sl refinement types}, and to present a few
+ applications. As a larger case study, we revisit Reynold's paper
+ on ``The Meaning of Types'' (2000), showing how the paper's main
+ results may be reconstructed along these lines.",
+ paper = "Mell15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Shields, Mark}
+\index{Jones, Simon Peyton}
+\begin{chunk}{axiom.bib}
+@inproceedings{Shie02,
+ author = "Shields, Mark and Jones, Simon Peyton",
+ title = {{FirstClass Modules for Haskell}},
+ booktitle = "9th Int. Conf. on Foundations of ObjectOriented Languages",
+ pages = "2840",
+ year = "2002",
+ link = "\url{http://www.microsoft.com/enus/research/wpcontent/uploads/2016/02/first_class_modules.pdf}",
+ abstract =
+ "Though Haskell's module language is quite weak, its core language
+ is highly expressive. Indeed, it is tantalisingly close to being
+ able to express much of the structure traditionally delegated to a
+ separate module language. However, the encoding are awkward, and
+ some situations can't be encoded at all.
+
+ In this paper we refine Haskell's core language to support
+ {\sl firstclass modules} with many of the features of MLstyle
+ modules. Our proposal cleanly encodes signatures, structures and
+ functors with the appropriate type abstraction and type sharing,
+ and supports recursive modules. All of these features work across
+ compilation units, and interact harmoniously with Haskell's class
+ system. Coupled with support for staged computation, we believe
+ our proposal would be an elegant approach to runtime dynamic
+ linking of structured code.
+
+ Our work builds directly upon Jones' work on parameterised
+ signatures, Odersky and L\"aufer's system of higherranked type
+ annotations, Russo's semantics of ML modules using ordinary
+ existential and universal quantifications, and Odersky and
+ Zenger's work on nested types. We motivate the system by examples,
+ and include a more formal presentation in the appendix.",
+ paper = "Shie02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dijkstra, E.W.}
+\begin{chunk}{axiom.bib}
+@misc{Dijk71,
+ author = "Dijkstra, E.W.",
+ title = {{A Short Introduction to the Art of Programming}},
+ comment = "EWD316",
+ year = "1971",
+ paper = "Dijk71.pdf"
+}
+
+\end{chunk}
+
+\index{Steele, Guy Lewis}
+\index{Sussman, Gerald Jay}
+\begin{chunk}{axiom.bib}
+@techreport{Stee78a,
+ author = "Steele, Guy Lewis and Sussman, Gerald Jay",
+ title = {{The Art of the Interpreter or, The Modularity Complex (Parts
+ Zero, One, and Two}},
+ type = "technical report",
+ institution = "MIT AI Lab",
+ number = "AIM453",
+ year = "1978",
+ abstract =
+ "We examine the effects of various language design decisions on
+ the programming styles available to a user of the language, with
+ particular emphasis on the ability to incrementally construct
+ modular systems. At each step we exhibit an interactive
+ metacircular interpreter for the language under consideration.
+
+ We explore the consequences of various variable binding
+ disciplines and the introduction of side effects. We find that
+ dynamic scoping is unsuitable for constructing procedural
+ abstractions, but has another role as an agent of modularity,
+ being a structured form of side effect. More general side effects
+ are also found to be necessary to promote modular style. We find
+ that the notion of side effect and the notion of equality (object
+ identity) are mutually constraining: to define one is to define
+ the other.
+
+ The interpreters we exhibit are all written in a simple dialect of
+ LISP, and all implement LISPlike languages. A subset of these
+ interpreters constitute a partial historical reconstruction of the
+ actual evolution of LISP.",
+ paper = "Stee78a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Wadler, Philip}
+\index{Findler, Robert Bruce}
+\begin{chunk}{axiom.bib}
+@inproceedings{Wadl07a,
+ author = "Wadler, Philip and Findler, Robert Bruce",
+ title = {{WellTyped Programs Can't Be Blamed}},
+ booktitle = "Workshop on Scheme and Functional Programming",
+ year = "2007",,
+ pages = "1526",
+ abstract =
+ "We show how {\sl contracts} with blame fit naturally with recent
+ work on {\sl hybrid types} and {\sl gradual types}. Unlike hybrid
+ types or gradual types, we require casts in the source code, in
+ order to indicate where type errors may occur. Two (perhaps
+ surprising) aspects of our approach are that refined types can
+ provide useful static guarantees even in the absence of a theorem
+ prover, and that type {\sl dynamic} should not be regarded as a
+ supertype of all other types. We factor the wellknown notion of
+ subtyping into new notions of positive and negative subtyping, and
+ use these to characterise where positive and negative blame may
+ arise. Our approach sharpens and clarifies some recent results in
+ the literature.",
+ paper = "Wadl07a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Michaelson, Greg}
+\begin{chunk}{axiom.bib}
+@book{Mich11,
+ author = "Michaelson, Greg",
+ title = {{Functional Programming Through Lambda Calculus}},
+ year = "2011",
+ publisher = "Dover",
+ isbn = "9780486478838"
+}
+
+\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index c0699f3..bea9cc7 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5928,6 +5928,8 @@ books/bookvolbib add references
books/bookvolbib add references
20180505.01.tpd.patch
books/breakurl.sty latex package to break long URLs
+20180516.01.tpd.patch
+books/bookvolbib add references

1.9.1