From df059b589c977f67faf685844153b15c5e7253e8 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Fri, 21 Jul 2017 21:05:48 0400
Subject: [PATCH] bookvol5 system:chdir to system::chdir
Goal: Axiom Maintenance
Fix call to nonexported GCL package symbol

books/bookvol5.pamphlet  8 +
changelog  4 +
patch  3011 +
src/axiomwebsite/patches.html  2 +
src/interp/patches.lisp.pamphlet  4 +
src/interp/util.lisp.pamphlet  2 +
6 files changed, 16 insertions(+), 3015 deletions()
diff git a/books/bookvol5.pamphlet b/books/bookvol5.pamphlet
index a366407..52403e1 100644
 a/books/bookvol5.pamphlet
+++ b/books/bookvol5.pamphlet
@@ 47591,9 +47591,9 @@ nrlib/index.kaf
(processDir (dirarg thisdir)
(let (allfiles)
(declare (special vmlisp::*indexfilename*))
 (system:chdir (string dirarg))
+ (system::chdir (string dirarg))
(setq allfiles (directory "*"))
 (system:chdir thisdir)
+ (system::chdir thisdir)
(mapcan #'(lambda (f)
(when (stringequal (pathnametype f) "nrlib")
(list (concatenate 'string (namestring f) "/"
@@ 47608,8 +47608,8 @@ nrlib/index.kaf
(if makedatabase? (setq noexpose t))
(when dir (setq nrlibs (processDir dir thisdir)))
(dolist (file filelist)
 (let ((filename (pathnamename file))
 (namedir (directorynamestring file)))
+ (let ((filename (pathnamename (string file)))
+ (namedir (directorynamestring (string file))))
(unless namedir (setq thisdir (concatenate 'string thisdir "/")))
(cond
((setq file (probefile
diff git a/changelog b/changelog
index 3afa6a1..25d5c73 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,7 @@
+20170721 tpd src/axiomwebsite/patches.html 20170721.01.tpd.patch
+20170721 tpd books/bookvol5 system:chdir to system::chdir
+20170721 tpd src/interp/util.lisp system:chdir to system::chdir
+20170721 tpd src/interp/patches.lisp system:chdir to system::chdir
20170702 tpd src/axiomwebsite/patches.html 20170702.01.tpd.patch
20170702 tpd books/bookheader.tex add Weber thesis
20170702 tpd books/bookvol10.1 add Weber thesis
diff git a/patch b/patch
index b68b389..b0ae29e 100644
 a/patch
+++ b/patch
@@ 1,3010 +1,5 @@
\index{Wolfram, Stephen}
\begin{chunk}{axiom.bib}
@book{Wolf91,
 author = "Wolfram, Stephen"
 title = "Mathematica: A System for Doing Mathematics by Computer",
 publisher = "AddisonWesley",
 isbn = "9780201515022",
 year = "1991"
}
+bookvol5 system:chdir to system::chdir
\end{chunk}
+Goal: Axiom Maintenance
\index{Char, Bruce}
\index{Geddes, Keith O.}
\index{Gonnet, Gaston H.}
\index{Leong, Benton}
\index{Monagan, Michael B.}
\index{Watt, Stephen M.}
\begin{chunk}{axiom.bib}
@book{Char91,
 author = "Char, Bruce and Geddes, Keith O. and Gonnet, Gaston H. and
 Leong, Benton and Monagan, Michael B. and Watt, Stephen M.",
 title = "Maple V Language Reference Manual",
 publisher = "Springer",
 year = "1991",
 isbn = "9780387941240"
}

\end{chunk}

\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@techreport{Jenk70,
 author = "Jenks, Richard D.",
 title = "META/LISP: An interactive translator writing system",
 type = "Research Report",
 number = "RC 2968",
 institution = "IBM Research",
 year = "1970",
 keywords = "axiomref"
}

\end{chunk}

\index{Smolka, Gert}
\begin{chunk}{axiom.bib}
@article{Smol88,
 author = "Smolka, Gert",
 title = "Logic Programming with Polymorphically OrderSorted Types",
 journal = "Lecture Notes in Computer Science",
 volume = "343",
 pages = "5370",
 year = "1988",
 abstract =
 "This paper presents the foundations for relational logic programming
 with polymorphically ordersorted data types. This type discipline
 combines the notion of parametric polymorphism [Milner 78], which has
 been developed for higherorder functional programming [Harper et aL 86],
 with the notion of ordersorted typing [Goguen 78, Smolka etal. 87],
 which has been developed for equational firstorder specification
 and programming [Fatatsugi et al. 85]. Both notions are important
 for practical reasons. With parametric polymorphism one avoids the
 need for redefining lists and other parametric data types for every
 type they are used with. Subsorts not only provide for more natural
 type specifications, but also yield more computational power:
 variables can be constrained to sorts rather than to single values and
 typed unification computes directly with sort constraints, thus
 reducing the need for expensive backtracking."
 paper = "Smol88.pdf"
}

\end{chunk}

\index{Fruehwirth, Thom}
\index{Shapiro, Ehud}
\index{Vardi, Moshe Y.}
\index{Yardeni, Eyal}
\begin{chunk}{axiom.bib}
@inproceedings{Frue91,
 author = "Fruehwirth, Thom and Shapiro, Ehud and Vardi, Moshe Y. and
 Yardeni, Eyal",
 title = "Logic programs as types for logic programs",
 booktitle = "Proc. Sixth Annual IEEE Symp. on Logic in Comp. Sci.",
 publisher = "IEEE",
 pages = "300309",
 year = "1991",
 abstract =
 "Type checking can be extremely useful to the program development process.
 Of particular interest are descriptive type systems, which let the
 programmer write programs without having to define or mention types.
 We consider here optimistic type systems for logic programs. In such
 systems types are conservative approximations to the success set of the
 program predicates. We propose the use of logic programs to describe
 types. We argue that this approach unifies the denotational and
 operational approaches to descriptive type systems and is simpler
 and more natural than previous approaches. We focus on the use of
 unarypredicate programs to describe types. We identify a proper class
 of unarypredicate programs and show that it is expressive enough to
 express several notions of types. We use an analogy with 2way automata
 and a correspondence with alternating algorithms to obtain a complexity
 characterization of type inference and type checking. This
 characterization was facilitated by the use of logic programs to
 represent types.",
 paper = "Frue91.pdf"
}

\end{chunk}

\index{Kifer, Michael}
\index{Wu, James}
\begin{chunk}{axiom.bib}
@inproceedings{Kife91,
 author = "Kifer, Michael and Wu, James",
 title = "A Firstorder Theory of Types and Polymorphism in Logic
 Programming",
 booktitle = "Proc Sixth Annual IEEE Symp. on Logic in Comp. Sci.",
 year = "1991",
 pages = "310321",
 abstract =
 "A logic called typed predicate calculus (TPC) that gives declarative
 meaning to logic programs with type declarations and type inference is
 introduced. The proper interaction between parametric and inclusion
 varieties of polymorphism is achieved through a construct called type
 dependency, which is analogous to implication types but yields more
 natural and succinct specifications. Unlike other proposals where
 typing has extralogical status, in TPC the notion of typecorrectness
 has precise modeltheoretic meaning that is independent of any
 specific typechecking or typeinference procedure. Moreover, many
 different approaches to typing that were proposed in the past can be
 studied and compared within the framework of TPC. Another novel
 feature of TPC is its reflexivity with respect to type declarations;
 in TPC, these declarations can be queried the same way as any other
 data. Type reflexivity is useful for browsing knowledge bases and,
 potentially, for debugging logic programs.",
 paper = "Kife91.pdf"
}

\end{chunk}

\index{Butler, Greg}
\index{Cannon, John}
\begin{chunk}{axiom.bib}
@inproceedings{Butl90,
 author = "Butler, Greg and Cannon, John",
 title = "The Design of Cayley  A Language for Modern Algebra",
 booktitle = "DISCO 1990",
 year = "1990",
 pages = "1019",
 abstract =
 "Established practice in the domain of modern algebra has shaped the
 design of Cayley. The design has also been responsive to the needs of
 its users. The requirements of the users include consistency with
 common mathematical notation; appropriate data types such as sets,
 sequences, mappings, algebraic structures and elements; efficiency;
 extensibility; power of inbuilt functions and procedures for known
 algorithms; and access to common examples of algebraic structures. We
 discuss these influences on the design of Cayley's user language."
 paper = "Butl90.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Kaliszyk, Cezary}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Kali07,
 author = "Kaliszyk, Cezary and Wiedijk, Freek",
 title = "Certified Computer Algebra on Top of an Interactive Theorem
 Prover",
 journal = "LNAI",
 volume = "4573",
 pages = "94105",
 year = "2007",
 abstract =
 "We present a prototype of a computer algebra system that is built on
 top of a proof assistant, HOL Light. This architecture guar antees
 that one can be certain that the system will make no mistakes. All
 expressions in the system will have precise semantics, and the proof
 assistant will check the correctness of all simplifications according
 to this semantics. The system actually {\sl proves} each simplification
 performed by the computer algebra system.

 Although our system is built on top of a proof assistant, we designed
 the user interface to be very close in spirit to the interface of
 systems like Maple and Mathematica. The system, therefore, allows the
 user to easily probe the underlying automation of the proof assistant
 for strengths and weaknesses with respect to the automation of
 mainstream computer algebra systems. The system that we present is a
 prototype, but can be straightforwardly scaled up to a practical
 computer algebra system",
 paper = "Kali07.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Chapman, Peter}
\index{McKinna, James}
\index{Urban, Christian}
\begin{chunk}{axiom.bib}
@article{Chap08,
 author = "Chapman, Peter and McKinna, James and Urban, Christian",
 title = "Mechanising a Proof of Craig's Interpoolation Theorem for
 Intuitionistic Logic in Nominal Isabelle",
 journal = "LNAI",
 volume = "5144",
 year = "2008",
 pages = "3852",
 abstract =
 "Craig's Interpolation Theorem is an important metatheoretical result
 for several logics. Here we describe a formalisation of the result for
 firstorder intuitionistic logif without function symbols or equality,
 with the intention of giving insight into how other such results in
 proof theory might be mechanically verified, notable cutadmissibility.
 We use the package {\sl Nominal Isabelle}, which easily deals with the
 binding issues in the quantifier cases of the proof",
}

\end{chunk}

\index{Lobachev, Oleg}
\index{Loogen, Rita}
\begin{chunk}{axiom.bib}
@article{Loba08,
 author = "Lobachev, Oleg and Loogen, Rita",
 title = "Towards an Implementation of a Computer Algebra System in a
 Functional Language",
 journal = "LNAI",
 year = "2008",
 pages = "141154",
 abstract =
 "This paper discusses the pros and cons of using a functional language
 for implementing a computer algebra system. The contributions of the
 paper are twofold. Firstly, we discuss some languagecentered design
 aspects of a computer algebra system  the ``language unity''
 concept. Secondly, we provide an implementation of a fast polynomial
 multiplication algorithm, which is one of the core elements of a
 computer algebra system. The goal of the paper is to test the
 feasibility of an implementation of (some elements of) a computer
 algebra system in a modern functional language.",
 paper = "Loba08.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Butler, Greg}
\begin{chunk}{axiom.bib}
@article{Butl96,
 author = "Butler, Greg",
 title = "Software Architectures for Computer Algebra: A Case Study",
 journal = "DISCO 96",
 year = "1996",
 pages = "277285",
 abstract =
 "The architectures of the existing computer algebra systems have not
 been discussed sufficiently in the literature. Instead, the focus
 has been on the design of the related programming language, or the
 design of a few key data structures.

 We address this deficiency with a case study of the architecture of
 CAYLEY. Our aim is twofold: to capture this knowledge before the total
 passing of a system now made obsolete by MAGMA; and to encourage others
 to describe the architecture of the computer algebra systems with which
 they are familiar.

 The longterm goal is a better understanding of how to construct
 computer algebra systems in the future.",
 paper = "Butl96.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Mosses, Peter D.}
\begin{chunk}{axiom.bib}
@article{Moss93,
 author = "Mosses, Peter D.",
 title = "The Use of Sorts in Algebraic Specifications",
 journal = "Recent Trends in Data Type Specification"
 year = "1993",
 pages = "6691",
 abstract =
 "Algebraic specification frameworks exploit a variety of sort
 disciplines. The treatment of sorts has a considerable influence on
 the ease with which such features as partially and polymorhism can be
 specified. This survey gives an accessible overview of various
 frameworks, focusing on their sort disciplines and assessing their
 strengths and weaknesses for practical applications. Familiarity with
 the basic notions of algebraic specification is assumed.",
 paper = "Moss93.pdf"
}

\end{chunk}

\index{Kredel, Heinz}
\begin{chunk}{axiom.bib}
@article{Kred08,
 author = "Kredel, Heinz",
 title = "Unique Factorization Domains in the Java Computer Algebra System",
 journal = "Automated Deduction in Geometry",
 year = "2008",
 pages = "86115",
 abstract =
 "This paper describes the implementation of recursive algorithms in
 unique factorization domains, namely multivariate polynomial greatest
 common divisors (gcd) and factorization into irreducible parts in the
 Java computer algebra library (JAS). The implementation of gcds,
 resultants and factorization is part of the essential building blocks
 for any computation in algebraic geometry, in particular in automated
 deduction in geometry. There are various implementations of these
 algorithms in procedural programming languages. Our aim is an
 implementation in a modern object oriented programming language with
 generic data types, as it is provided by Java programming language. We
 exemplify that the type design and implementation of JAS is suitable
 for the implementation of several greatest common divisor algorithms
 and factorization of multivariate polynomials. Due to the design we
 can employ this package in very general settings not commonly seen in
 other computer algebra systems. As for example, in the coefficient
 arithmetic for advanced Gröbner basis computations like in polynomial
 rings over rational function fields or (finite, commutative) regular
 rings. The new package provides factory methods for the selection of
 one of the several implementations for non experts. Further we
 introduce a parallel proxy for gcd implementations which runs
 different implementations concurrently.",
 paper = "Kred08,pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Harrison, John}
\index{Thery, Laurent}
\begin{chunk}{axiom.bib}
@article{Harr94,
 author = "Harrison, John and Thery, Laurent",
 title = "Extending the HOL theorem prover with a computer algebra
 system to reason about the reals",
 journal = "Lecture Notes in Computer Science",
 volume = "780",
 pages = "174184",
 year = "2005",
 abstract =
 "In this paper we describe an environment for reasoning about the reals
 which combines the rigour of a theorem prover with thw power of a
 computer algebra system."
 paper = "Harr94.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{MedinaBulo, I.}
\index{PalomoLozano, F.}
\index{AlonsoJimenez, J.A.}
\index{RuizReina, J.L.}
\begin{chunk}{axiom.bib}
@inproceedings{Medi04,
 author = "MedinaBulo, I. and PalomoLozano, F. and AlonsoJimenez, J.A.
 and RuizReina, J.L.",
 title = "Verified Computer Algebra in ACL2",
 booktitle = "ISSAC 04, LNCS Volume 3249",
 year = "2004",
 pages = "171184",
 abstract =
 "In this paper, we present the formal verification of a COMMON LISP
 implementation of Buchberger's algorithm for computing Groebner bases
 of polynomial ideals. This work is carried out in the ACL2 system and
 shows how verified Computer Algebra can be achieved in an executable
 logic.",
 paper = "Medi04.pdf"
}

\end{chunk}

\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@inproceedings{Dave08,
 author = "Davenport, James H."
 title = "Effective Set Membership in Computer Algebra and Beyond",
 booktitle = "Int. Conf. on Intelligent Computer Mathematics",
 pages = "266269",
 year = "2008",
 journal = "LNCS",
 volume = "5144",
 abstract =
 "In previous work we showed the importance of distinguishing ``I know
 $X\ne Y$'' from ``I don't know that $X=Y$''. IN this paper we look at
 effective set membership, starting with Groebner bases, where the
 issues are wellexpressed in algebra systems, and going on to integration
 and other questions of 'computer calculus'.

 In particular, we claim that a better recognition of the role of set
 membership would clarify some features of computer algebra systems,
 such as 'what does an integral mean as output'.",
 paper = "Dave08.pdf"
}

\end{chunk}

\index{Ueberberg, Johannes}
\begin{chunk}{axiom.bib}
@inproceedings{Uebe94,
 author = "Ueberberg, Johannes",
 title = "Interactive theorem proving and computer algebra",
 booktitle = "AISMC 94",
 year = "1994",
 pages = "19",
 paper = "Uebe94.pdf"
}

\end{chunk}

\index{Jolly, Raphael}
\begin{chunk}{axiom.bib}
@inproceedings{Joll13,
 author = "Jolly, Raphael",
 title = "Categories as Type Classes in the Scala Algebra System",
 booktitle = "CASC 2013",
 year = "2013",
 pages = "209218",
 journal = "LNCS",
 volume = "8136",
 abstract =
 "A characterization of the categorical view of computer algebra is
 proposed. Some requirements on the ability for abstraction that
 programming languages must have in order to allow a categorical
 approach is given. Objectoriented inheritance is presented as a
 suitable abstraction scheme and exemplified by the Java Algebra
 System. Type classes are then introduced as an alternate abstraction
 scheme and shown to be eventually better suited for modeling
 categories. Pro and cons of the two approaches are discussed and a
 hybrid solution is exhibited.",
 paper = "Joll13.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Aladjev, Victor}
\begin{chunk}{axiom.bib}
@inproceedings{Alad03,
 author = "Aladjev, Victor",
 title = "Computer Algebra System Maple: A New Software Library",
 booktitle = "ICCS 2003",
 year = "2003",
 pages = "711717",
 journal = "LNCS",
 volume = "2657",
 abstract =
 "The paper represents Maple library containing more than 400
 procedures expanding possibilities of the Maple package of releases
 6,7 and 8. The library is structurally organized similarly to the main
 Maple library. The process of the library installing is simple enough
 as a result of which the above library will be logically linked with
 the main Maple library, supporting access to software located in it
 equally with standard Maple software. The demo library is delivered
 free of charge at request to addresses mentioned above.",
 paper = "Alad03.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Jackson, Paul}
\begin{chunk}{axiom.bib}
@inproceedings{Jack94,
 author = "Jackson, Paul",
 title = "Exploring abstract algebra in constructive type theory",
 booktitle = "CADE 1994",
 year = "1994",
 pages = "590604",
 journal = "LNCS",
 volume = "814",
 abstract =
 "I describe my implementation of computational abstract algebra in the
 Nuprl system. I focus on my development of multivariate polynomials. I
 show how I use Nuprl's expressive type theory to define classes of
 free abelian monoids and free monoid algebras. These classes are
 combined to create a class of all implementations of polynomials. I
 discuss the issues of subtyping and computational content that came up
 in designing the class definitions. I give examples of relevant theory
 developments, tactics and proofs. I consider how Nuprl could act as an
 algebraic ‘oracle’ for a computer algebra system and the relevance of
 this work for abstract functional programming.",
 paper = "Jack94.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{van Hulzen, J. A.}
\begin{chunk}{axiom.bib}
@inproceedings{Hulz82,
 author = "van Hulzen, J. A.",
 title = "Computer algebra systems viewed by a notorious user",
 booktitle = "EUROCAM 1982",
 pages "166180",
 year = "1982",
 journal = "LNCS",
 volume = "144",
 abstract =
 "Are design and use of computer algebra systems disjoint or
 complementary activities? Raising and answering this question are
 equally controversial, since a clear distinction between languages
 features and library facilities is hard to make. Instead of even
 attempting to answer this rather academic question it is argued why it
 is reasonable to raise related questions: Is SMP a paradox? Is it
 realistic to neglect inaccurate input data? Is a very high level
 programming language instrumental for equal opportunity employment in
 scientific research?",
 paper = "Hulz82.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Caviness, B.F.}
\begin{chunk}{axiom.bib}
@inproceedings{Cavi85,
 author = "Caviness, B.F.",
 title = "Computer Algebra: Past and Future",
 booktitle = "EUROCAL 1985",
 year = "1985",
 pages = "118",
 journal = "LNCS",
 volume = "203",
 abstract =
 "The preceding just touches on some of the highlights of the
 accomplishments and unsolved problems in computer algebra. A really
 comprehensive survey would be much too long for the space available
 here. I close with the following quote, which has been attributed to
 Albert Einstein and helps, perhaps, to keep a proper prospective on
 our work: 'The symbolic representation of abstract entities is doomed
 to its rightful place of relative insignificance in a world in which
 flowers and beautiful women abound.'",
 paper = "Cavi85.pdf"
}

\end{chunk}

\index{Schreiner, Wolfgang}
\index{DanielczykLanderl, Werner}
\index{Marin, Mircea}
\index{Stocher, Wolfgang}
\begin{chunk}{axiom.bib}
@inproceedings{Schr00,
 author = "Schreiner, Wolfgang and DanielczykLanderl, Werner and
 Marin, Mircea and Stocher, Wolfgang",
 title = "A Generic Programming Environment for HighPerformance
 Mathematical Libraries",
 booktitle = "Generic Programming",
 year = "2000",
 pages = "256267",
 journal = "LNCS",
 volume = "1766",
 abstract =
 "We report on a programming environment for the development of generic
 mathematical libraries based on functors (parameterized modules) that
 have rigorously specified but very abstract interfaces. We focus on
 the combination of the functorbased programming style with software
 engineering principles in large development projects. The generated
 target code is highly efficient and can be easily embedded into
 foreign application environments.",
 paper = "Schr00.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Magaud, Nicolas}
\index{Narboux, Julien}
\index{Schreck, Pascal}
\begin{chunk}{axiom.bib}
@inproceedings{Maga08,
 author = "Magaud, Nicolas and Narboux, Julien and Schreck, Pascal",
 title = "Formalizing Project Plane Geometry in Coq",
 booktitle = "Automated Deduction in Geometry",
 year = "2008",
 pages = "141162",
 journal = "LNCS",
 volume = "6301",
 abstract =
 "We investigate how projective plane geometry can be formalized in a
 proof assistant such as Coq. Such a formalization increases the
 reliability of textbook proofs whose details and particular cases are
 often overlooked and left to the reader as exercises. Projective plane
 geometry is described through two different axiom systems which are
 formally proved equivalent. Usual properties such as decidability of
 equality of points (and lines) are then proved in a constructive
 way. The duality principle as well as formal models of projective
 plane geometry are then studied and implemented in Coq. Finally, we
 formally prove in Coq that Desargues’ property is independent of the
 axioms of projective plane geometry.",
 paper = "Maga08.pdf"
}

\end{chunk}

\index{Fevre, Stephane}
\index{Wang, Dongming}
\begin{chunk}{axiom.bib}
@inproceedings{Fevr98,
 author = "Fevre, Stephane and Wang, Dongming",
 title = "Proving Geometric Theorems using Clifford Algebra and
 Rewrite Rules",
 booktitle = "Conf. on Automated Deduction",
 year = "1998",
 pages = "1732",
 journal = "LNCS",
 volume = "1421",
 abstract =
 "We consider geometric theorems that can be stated constructively by
 introducing points, while each newly introduced point may be
 represented in terms of the previously constructed points using
 Clifford algebraic operators. To prove a concrete theorem, one first
 substitutes the expressions of the dependent points into the
 conclusion Clifford polynomial to obtain an expression that involves
 only the free points and parameters. A termrewriting system is
 developed that can simplify such an expression to 0, and thus prove
 the theorem. A large class of theorems can be proved effectively in
 this coordinatefree manner. This paper describes the method in detail
 and reports on our preliminary experiments.",
 paper = "Fevr98.pdf"
}

\end{chunk}

\index{Kerber, Manfred}
\index{Kohlhase, Michael}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@article{Kerb98,
 author = "Kerber, Manfred and Kohlhase, Michael and Sorge, Volker",
 title = "Integrating Computer Algebra with Proof Planning",
 journal = "Journal of Automated Reasoning",
 volume = "21",
 number = "3",
 pages = "327355",
 year = "1998",
 abstract =
 "Mechanized reasoning systems and computer algebra systems have
 different objectives. Their integration is highly desirable, since
 formal proofs often involve both of the two different tasks proving
 and calculating. Even more important, proof and computation are often
 interwoven and not easily separable.

 In this article we advocate an integration of computer algebra into
 mechanized reasoning systems at the proof plan level. This approach
 allows us to view the computer algebra algorithms as methods, that is,
 declarative representations of the problemsolving knowledge specific
 to a certain mathematical domain. Automation can be achieved in many
 cases by searching for a hierarchic proof plan at the method level by
 using suitable domainspecific control knowledge about the
 mathematical algorithms. In other words, the uniform framework of
 proof planning allows us to solve a large class of problems that are
 not automatically solvable by separate systems.

 Our approach also gives an answer to the correctness problems inherent
 in such an integration. We advocate an approach where the computer
 algebra system produces highlevel protocol information that can be
 processed by an interface to derive proof plans. Such a proof plan in
 turn can be expanded to proofs at different levels of abstraction, so
 the approach is well suited for producing a highlevel verbalized
 explication as well as for a lowlevel, machinecheckable,
 calculuslevel proof.

 We present an implementation of our ideas and exemplify them using an
 automatically solved example.",
 paper = "Kerb98.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Loos, Ruediger G. K.}
\begin{chunk}{axiom.bib}
@article{Loos74,
 author = "Loos, Ruediger G. K.",
 title = "Toward a Formal Implementation of Computer Algebra",
 journal = "SIGSAM",
 volume = "8",
 number = "3",
 pages = "916",
 year = "1974",
 abstract =
 "We consider in this paper the task of synthesizing an algebraic
 system. Today the task is significantly simpler than in the pioneer
 days of symbol manipulation, mainly because of the work done by the
 pioneers in our area, but also because of the progress in other areas
 of Computer Science. There is now a considerable collection of
 algebraic algorithms at hand and a much better understanding of data
 structures and programming constructs than only a few years ago.",
 paper = "Loos74.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Gries, David}
\begin{chunk}{axiom.bib}
@book{Grie78,
 author = "Gries, David",
 title = "Programmnig Methodology",
 publisher = "SpringerVerlag",
 year = "1978"
}

\end{chunk}

\index{Wirsing, Martin}
\begin{chunk}{axiom.bib}
@InCollection{Wirs91,
 author = "Wirsing, Martin",
 title = "Algebraic Specification",
 booktitle = "Handbook of Theoretical Computer Science (Vol B)",
 publisher = "MIT Press",
 year = "1991",
 pages = "675788",
 chapter = "13",
 isbn = "0444880747"
}

\end{chunk}

\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@techreport{Jone98,
 author = "Jones, Simon Peyton",
 title = "The Haskell 98 Language and LIbraries. The Revised Report",
 institution = "Cambridge University Press",
 year = "1998",
 type = "technical report",
 link = "\url{https://www.haskell.org/definition/haskell98report.pdf}"
}

\end{chunk}

\index{Aho, Alfred V.}
\index{Sethi, Ravi}
\index{Ullman, Jeffrey D.}
\begin{chunk}{axiom.bib}
@book{Ahox86,
 author = "Aho, Alfred V. and Sethi, Ravi and Ullman, Jeffrey D.",
 title = "Compilers: Principles, Techniques, and Tools",
 year = "1986",
 publisher = "AddisonWesley",
 isbn = "9780201100884"
}

\end{chunk}

\index{Mitchell, John C.}
\begin{chunk}{axiom.bib}
@InCollection{Mitc91,
 author = "Mitchell, John C.",
 title = "Type Systems for Programming Languages",
 booktitle = "Handbook of Theoretical Computer Science (Vol B.)".
 pages = "365458",
 year = "1991",
 publisher = "MIT Press",
 isbn = "0444880747"
}

\end{chunk}

\index{Strachey, Christopher}
\begin{chunk}{axiom.bib}
@article{Stra00,
 author = "Strachey, Christopher",
 title = "Fundamental Concepts in Programming Languages",
 journal = "HigherOrder and Symbolic Computation",
 volume = "13",
 number = "12",
 pages = "1149",
 year = "2000"
 abstract =
 "This paper forms the substance of a course of lectures given at the
 International Summer School in Computer Programming at Copenhagen in
 August, 1967. The lectures were originally given from notes and the
 paper was written after the course was finished. In spite of this, and
 only partly because of the shortage of time, the paper still retains
 many of the shortcomings of a lecture course. The chief of these are
 an uncertainty of aim—it is never quite clear what sort of audience
 there will be for such lectures—and an associated switching from
 formal to informal modes of presentation which may well be less
 acceptable in print than it is natural in the lecture room. For these
 (and other) faults, I apologise to the reader.

 There are numerous references throughout the course to CPL [1–3]. This
 is a programming language which has been under development since 1962
 at Cambridge and London and Oxford. It has served as a vehicle for
 research into both programming languages and the design of
 compilers. Partial implementations exist at Cambridge and London. The
 language is still evolving so that there is no definitive manual
 available yet. We hope to reach another resting point in its evolution
 quite soon and to produce a compiler and reference manuals for this
 version. The compiler will probably be written in such a way that it
 is relatively easyto transfer it to another machine, and in the first
 instance we hope to establish it on three or four machines more or
 less at the same time.

 The lack of a precise formulation for CPL should not cause much
 difficulty in this course, as we are primarily concerned with the
 ideas and concepts involved rather than with their precise
 representation in a programming language.",
 paper = "Stra00.pdf"
}

\end{chunk}

\index{Goguen, Joseph}
\index{Meseguer, Jose}
\begin{chunk}{axiom.bib}
@techreport{Gogu89,
 author = "Goguen, Joseph and Meseguer, Jose",
 title = "Ordersorted Algebra I : Equational Deduction for Multiple
 Inheritance, Overloading, Exceptions, and Partial Operations",
 type = "technical report",
 institution = "SRI International",
 year = "1989"
 number = "SRIR 8910"
}

\end{chunk}

\index{Hindley, R.}
\begin{chunk}{axiom.bib}
@article{Hind69,
 author = "Hindley, R.",
 title = "The Principal TypeScheme of an Object in Combinatory Logic",
 journal = "Trans. AMS",
 volume = "146",
 year = "1969",
 pages = "2960",
 paper = "Hind69.pdf"
}

\end{chunk}

\index{Milner, Robin}
\begin{chunk}{axiom.bib}
@article{Miln78,
 author = "Milner, Robin",
 title = "A Theory of Type Polymorphism in Programming",
 journal = "J. Computer and System Sciences",
 volume = "17",
 number = "3",
 year = "1978",
 pages = "348375",
 abstract =
 "The aim of this work is largely a practical one. A widely employed
 style of programming, particularly in structureprocessing languages
 which impose no discipline of types, entails defining procedures which
 work well on objects of a wide variety. We present a formal type
 discipline for such polymorphic procedures in the context of a simple
 programming language, and a compile time typechecking algorithm which
 enforces the discipline. A Semantic Soundness Theorem (based on a
 formal semantics for the language) states that welltype programs
 cannot “go wrong” and a Syntactic Soundness Theorem states that if
 accepts a program then it is well typed. We also discuss extending
 these results to richer languages; a typechecking algorithm based on
 is in fact already implemented and working, for the metalanguage ML in
 the Edinburgh LCF system.",
 paper = "Miln78.pdf"
}

\end{chunk}

\index{Damas, Luis}
\index{Milner, Robin}
\begin{chunk}{axiom.bib}
@inproceedings{Dama82,
 author = "Damas, Luis and Milner, Robin",
 title = "Principal Typeschemes for Functional Programs",
 booktitle = "POPL 82",
 pages = "207212",
 year = "1982",
 isbn = "0897980656",
 paper = "Dama82.pdf"
}

\end{chunk}

\index{Milner, R.}
\index{Torte, M.}
\index{Harper, R.}
\begin{chunk}{axiom.bib}
@book{Miln90,
 author = "Milner, Robin and Torte, Mads and Harper, Robert",
 title = "The Definition of Standard ML",
 publisher = "Lab for Foundations of Computer Science, Univ. Edinburgh",
 link = "\url{http://smlfamily.org/sml90defn.pdf}",
 year = "1990",
 paper = "Miln90.pdf"
}

\end{chunk}

\index{Milner, R.}
\index{Torte, M.}
\begin{chunk}{axiom.bib}
@book{Miln91,
 author = "Milner, Robin and Torte, Mads",
 title = "Commentary on Standard ML",
 publisher = "Lab for Foundations of Computer Science, Univ. Edinburgh",
 link = "\url{https://pdfs.semanticscholar.org/d199/16cbbda01c06b6eafa0756416e8b6f15ff44.pdf}",
 year = "1991",
 paper = "Miln91.pdf"
}

\end{chunk}

\index{Turner, D.A.}
\begin{chunk}{axiom.bib}
@article{Turn85,
 author = "Turner, D. A.",
 title = "Miranda: A nonstrict functional language with polymorphic types",
 journal = "Lecture Notes in Computer Science",
 volume = "201",
 pages = "116",
 year = "1985",
 link = "\url{http://miranda.org.uk/nancy.html}",
 paper = "Turn85.pdf"
}

\end{chunk}

\index{Turner, D.A.}
\begin{chunk}{axiom.bib}
@article{Turn86,
 author = "Turner, D. A.",
 title = "An Overview of Miranda",
 journal = "SIGPLAN Notices",
 volume = "21",
 number = "12",
 pages = "158166",
 year = "1986",
 link = "\url{http://miranda.org.uk/}",
 paper = "Turn86.pdf"
}

\end{chunk}

\index{Foderaro, John K.}
\begin{chunk}{axiom.bib}
@phdthesis{Fode83,
 author = "Foderaro, John K.",
 title = "The Design of a Language for Algebraic Computation Systems",
 school = "U.C. Berkeley, EECS Dept.",
 year = "1983",
 link = "\url{http://digitalassets.lib.berkeley.edu/techreports/ucb/text/CSD83160.pdf}",
 abstract =
 "This thesis describes the design of a language to support a
 mathematicsoriented symbolic algebra system. The language, which we
 have named NEWSPEAK, permits the complex interrelations of
 mathematical types, such as rings, fields and polynomials to be
 described. Functions can be written over the most general type that
 has the required operations and properties and the inherited by
 subtypes. All function calls are generic, with most function
 resolution done at compile time. Newspeak is typesafe, yet permits
 runtime creation of tyhpes.",
 paper = "Fode83.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Fuh, YouChin}
\index{Mishra, Prateek}
\begin{chunk}{axiom.bib}
@article{Fuhx90,
 author = "Fuh, YouChin",
 title = "Type Inference with Subtypes",
 journal = "Theoretical Computer Science",
 volume = "73",
 number = "2",
 year = "1990",
 pages = "155175",
 abstract =
 "We extend polymorphic type inference with a very general notion of
 subtype based on the concept of type transformation. This paper
 describes the following results. We prove the existence of (i)
 principal type property and (ii) syntactic completeness of the
 typechecker, for type inference with subtypes. This result is
 developed with only minimal assumptions on the underlying theory of
 subtypes. As a consequence, it can be used as the basis for type
 inference with a broad class of subtype theories. For a particular
 “structural” theory of subtypes, those engendered by inclusions
 between type constants only, we show that principal types are
 compactly expressible. This suggests that type inference for the
 structured theory of subtypes is feasible. We describe algorithms
 necessary for such a system. The main algorithm we develop is called
 MATCH, an extension to the classical unification algorithm. A proof of
 correctness for MATCH is given.",
 paper = "Fuhx90.pdf"
}

\end{chunk}

\index{Nipkow, Tobias}
\index{Snelting, Gregor}
\begin{chunk}{axiom.bib}
@inproceedings{Nipk91,
 author = "Nipkow, Tobias and Snelting, Gregor",
 title = "Type Classes and Overloading Resolution via OrderSorted
 Unification",
 booktitle = "Proc 5th ACM Conf. Functional Prog. Lang. and Comp. Arch.",
 year = "1991",
 publisher = "Springer",
 journal = "LNCS",
 volume = "523",
 pages = "114",
 abstract =
 "We present a type inference algorithm for a Haskelllike language
 based on ordersorted unification. The language features polymorphism,
 overloading, type classes and multiple inheritance. Class and instance
 declarations give rise to an ordersorted algebra of types. Type
 inference essentially reduces to the Hindley/Milner algorithm where
 unification takes place in this ordersorted algebra of types. The
 theory of ordersorted unification provides simple sufficient
 conditions which ensure the existence of principal types. The
 semantics of the language is given by a translation into ordinary
 lambdacalculus. We prove the correctness of our type inference
 algorithm with respect to this semantics."
}

\end{chunk}

\index{SchmidtSchauss, M.}
\begin{chunk}{axiom.bib}
@book{Schm89,
 author = "SchmidtSchauss, M.",
 title = "Computational Aspects of an OrderSorted Logic with Term
 Declarations",
 publisher = "Springer",
 isbn = "9783540517054",
 year = "1989"
}

\end{chunk}

\index{Smolka, G.}
\index{Nutt, W.}
\index{Goguen, J.}
\index{Meseguer, J.}
\begin{chunk}{axiom.bib}
@InCollection{Smol89,
 author = "Smolka, G. and Nutt, W. and Goguen, J. and Meseguer, J.",
 title = "Ordersorted Equational Computation",
 booktitle = "Resolution of Equations in Algebra Structures (Vol 2)",
 pages = "297367",
 year = "1989"
}

\end{chunk}

\index{Waldmann, Uwe}
\begin{chunk}{axiom.bib}
@article{Wald92,
 author = "Waldmann, Uwe",
 title = "Semantics of Ordersorted Specifications",
 journal = "Theoretical Computer Science",
 volume = "94",
 number = "12",
 year = "1992",
 pages = "135",
 abstract =
 "Ordersorted specifications (i.e. manysorted specifications with
 subsort relations) have been proved to be a useful tool for the
 description of partially defined functions and error handling in
 abstract data types.

 Several definitions for ordersorted algebras have been proposed. In
 some papers an operator symbol, which may be multiply declared, is
 interpreted by a family of functions (“overloaded” algebras). In other
 papers it is always interpreted by a single function (“nonoverloaded”
 algebras). On the one hand, we try to demonstrate the differences
 between these two approaches with respect to equality, rewriting and
 completion; on the other hand, we prove that in fact both theories can
 be studied in parallel provided that certain notions are suitably
 defined.

 The overloaded approach differs from the manysorted and the
 nonoverloaded one in that the overloaded term algebra is not
 necessarily initial. We give a decidable sufficient criterion for the
 initiality of the term algebra, which is less restrictive than
 GJMregularity as proposed by Goguen, Jouannaud and Meseguer.

 Sortdecreasingness is an important property of rewrite systems since
 it ensures that confluence and ChurchRosser property are equivalent,
 that the overloaded and nonoverloaded rewrite relations agree, and
 that variable overlaps do not yield critical pairs. We prove that it
 is decidable whether or not a rewrite rule is sortdecreasing, even if
 the signature is not regular.

 Finally, we demonstrate that every overloaded completion procedure may
 also be used in the nonoverloaded world, but not conversely, and that
 specifications exist that can only be completed using the
 nonoverloaded semantics.",
 paper = "Wald92.pdf"
}

\end{chunk}

\index{Comon, Hubert}
\begin{chunk}{axiom.bib}
@inproceedings{Como90,
 author = "Comon, Hubert",
 title = "Equational Formulas in Ordersorted Algebras",
 booktitle = "IICALP 90. Automata, Languages and Programming",
 year = "1990",
 pages = "674688",
 abstract =
 "We propose a set of transformation rules for first order formulas
 whose atoms are either equations between terms or “sort constraints” t
 ε s where s is a regular tree language (or a sort in the algebraic
 specification community). This set of rules is proved to be correct,
 terminating and complete. This shows in particular that the first
 order theory of any rational tree language is decidable, extending the
 results of [Mal71,CL89,Mah88]. We also show how to apply our results
 to automatic inductive proofs in equational theories."
}

\end{chunk}

\index{MacLane, Saunders}
\begin{chunk}{axiom.bib}
@book{Macl91,
 author = "MacLane, Saunders",
 title = "Categories for the Working Mathematician",
 publisher = "Springer",
 year = "1991",
 isbn = "0387984038",
 link = "\url{http://www.maths.ed.ac.uk/~aar/papers/maclanecat.pdf}",
 paper = "Macl91.pdf"
}

\end{chunk}

\index{Schubert, Horst}
\begin{chunk}{axiom.bib}
@book{Schu72,
 author = "Schubert, Horst",
 title = "Categories",
 publisher = "SpringerVerlag",
 year = "1972"
}

\end{chunk}

\index{Freyd, Peter J.}
\index{Scedrov, Andre}
\begin{chunk}{axiom.bib}
@book{Frey90,
 author = "Freyd, Peter J. and Scedrov, Andre",
 title = "Categories, Allegories",
 publisher = "Elsevier Science",
 year = "1990",
 isbn = "0444703683"
}

\end{chunk}

\index{Rydeheard, D. E.}
\index{Burstall, R. M.}
\begin{chunk}{axiom.bib}
@book{Ryde88,
 author = "Rydeheard, D. E. and Burstall, R. M.",
 title = "Computational Category Theory",
 publisher = "Prentice Hall",
 year = "1988",
 isbn = "9780131627369"
}

\end{chunk}

\index{Ehrig, Hartmut}
\index{Mahr, Bernd}
\begin{chunk}{axiom.bib}
@book{Ehri85,
 author = "Ehrig, Hartmut and Mahr, Bernd",
 title = "Fundamentals of Algebraic Specification 1: Equations and
 Initial Semantics",
 publisher = "Springer Verlag",
 year = "1985",
 isbn = "9780387137186"
}

\end{chunk}

\index{Loos, Rudiger}
\begin{chunk}{axiom.bib}
@article{Loos72,
 author = "Loos, Rudiger",
 title = "Algebraic Algorithm Descriptions as Programs",
 journal = "ACM SIGSAM Bulletin",
 volume = "23",
 year = "1972",
 pages = "1624",
 abstract =
 "We propose methods for writing algebraic programs in an algebraic
 notation. We discuss the advantages of this approach and a specific
 example",
 paper = "Loos72.pdf"
}

\end{chunk}

\index{Loos, Rudiger}
\begin{chunk}{axiom.bib}
@article{Loos76,
 author = "Loos, Rudiger",
 title = "The Algorithm Description Language (ALDES) (report)",
 journal = "ACM SIGSAM Bulletin",
 volume = "10",
 number = "1",
 year = "1976",
 pages = "1438",
 abstract =
 "ALDES is a formalization of the method to describe algorithms used in
 Knuth's books. The largest documentation of algebraic algorithms,
 Collins' SAC system for Computer Algebra, is written in this
 language. In contrast to PASCAL it provides automatic storage
 deallocation. Compared to LISP equal emphasis was placed on efficiency
 of arithmetic, list processing, and array handling. To allow the
 programmer full control of efficiency all mechanisms of the system are
 accessible to him. Currently ALDES is available as a preprocessor to
 ANSI Fortran, using no additional primitives.",
 paper = "Loos76.pdf"
}

\end{chunk}

\index{Loos, Rudiger}
\index{Collins, George E.}
\begin{chunk}{axiom.bib}
@book{Loos92,
 author = "Loos, Rudiger and Collins, George E.",
 title = "Revised Report on the ALgorithm Language ALDES",
 publisher = "Institut fur Informatik",
 year = "1992"
}

\end{chunk}

\index{Collins, George E.}
\index{Loos, Rudiger}
\begin{chunk}{axiom.bib}
@techreport{Coll90,
 author = "Collins, George E. and Loos, Rudiger",
 title = "Specification and Index of SAC2 Algorithms",
 institution = "Univ. of Tubingen",
 type = "technical report",
 year = "1990",
 number = "WSI904"
}

\end{chunk}

\index{Buendgen, R.}
\index{Hagel, G.}
\index{Loos, R.}
\index{Seitz, S.}
\index{Simon, G.}
\index{Stuebner, R.}
\index{Weber, A.}
\begin{chunk}{axiom.bib}
@article{Buen91,
 author = "Buendgen, R. and Hagel, G. and Loos, R. and Seitz, S. and
 Simon, G. and Stuebner, R. and Weber, A.",
 title = "SAC2 in ALDES  Ein Werkzeug fur dis Algorithmenforschung",
 journal = "MathPAD 1",
 volume = "3",
 year = "1991",
 pages = "3337"
}

\end{chunk}

\index{Weber, Andreas}
\begin{chunk}{axiom.bib}
@techreport{Webe92
 author = "Weber, Andreas",
 title = "Structuring the Type System of a Computer Algebra System",
 link = "\url{http://cg.cs.unibonn.de/personalpages/weber/publications/pdf/WeberA/Weber92a.pdf}",
 institution = "WilhelmSchickardInstitut fur Informatik",
 year = "1992",
 abstract = "
 Most existing computer algebra systems are pure symbol manipulating
 systems without language support for the occuring types. This is
 mainly due to the fact taht the occurring types are much more
 complicated than in traditional programming languages. In the last
 decade the study of type systems has become an active area of
 research. We will give a proposal for a type system showing that
 several problems for a type system of a symbolic computation system
 can be solved by using results of this research. We will also provide
 a variety of examples which will show some of the problems that remain
 and that will require further research.",
 paper = "Webe92b.pdf",
 keywords = "axiomref"

\end{chunk}

\index{Bronstein, Manuel}
\begin{chunk}{axiom.bib}
@misc{Bron90,
 author = "Bronstein, Manuel",
 title =
 "$\sum^{IT}$  A stronglytyped embeddable computer algebra library",
 link = "\url{http://wwwsop.inria.fr/cafe/Manuel.Bronstein/publications/mb_papers.html}",
 abstract = "
 We describe the new computer algebra library $\sum^{IT}$ and its
 underlying design. The development of $\sum^{IT}$ is motivated by the
 need to provide highly efficient implementations of key algorithms for
 linear ordinary differential and ($q$)difference equations to
 scientific programmers and to computer algebra users, regardless of
 the programming language or interactive system they use. As such,
 $\sum^{IT}$ is not a computer algebra system per se, but a library (or
 substrate) which is designed to be ``plugged'' with minimal efforts
 into different types of client applications.",
 paper = "Bron96.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@inproceedings{Reyo74,
 author = "Reynolds, John C.",
 title = "Towards a Theory of Type Structure",
 booktitle = "Colloquim on Programming",
 year = "1974",
 pages = "911",
 paper = "Reyo74.pdf"
}

\end{chunk}

\index{Chen, Kung}
\index{Hudak, Paul}
\index{Odersky, Martin}
\begin{chunk}{axiom.bib}
@inproceedings{Chen92,
 author = "Chen, Kung and Hudak, Paul and Odersky, Martin",
 title = "Parametric Type Classes",
 booktitle = "Proc. ACM Conf. on LISP and Functional Programming",
 year = "1992",
 pages = "170181"
 abstract =
 "We propose a generalization to Haskell's type classes where a class
 can have type parameters besides the placeholder variable. We show
 that this generalization is essential to represent container classes
 with overloaded data constructor and selector operations. We also show
 that the resulting type system has principal types and present
 unification and type reconstruction algorithms.",
 paper = "Chen92.pdf"
}

\end{chunk}

\index{Schoenfinkel, M.}
\begin{chunk}{axiom.bib}
@misc{Scho24,
 author = "Schoenfinkel, M.",
 title = "Uber die Bausteine der mathematischen Logik",
 year = "1924",
 pages = "305316"
}

\end{chunk}

\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@book{Jone87,
 author = "Jones, Simon Peyton",
 title = "The Implementation of Functional Programming Languages",
 publisher = "Simon and Schuster",
 year = "1987",
 isbn = "013453333X",
 paper = "Jone87.pdf"
}

\end{chunk}

\index{Leiss, Hans}
\begin{chunk}{axiom.bib}
@inproceedings{Leis87,
 author = "Leiss, Hans",
 title = "On Type Inference for ObjectOriented Programming Languages",
 booktitle = "Int. Workshop on Computer Science Logic",
 year = "1987",
 pages = "151172",
 abstract =
 "We present a type inference calculus for objectoriented programming
 languages. Explicit polymorphic types, subtypes and multiple
 inheritance are allowed. Class types are obtained by selection from
 record types, but not considered subtypes of record types. The subtype
 relation for class types reflects the (mathematically clean)
 properties of subclass relations in objectoriented programming to a
 better extend than previous systems did.

 Based on Mitchells models for type inference, a semantics for types is
 given where types are sets of values in a model of typefree lambda
 calculus. For the sublanguage without type quantifiers and subtype
 relation, automatic type inference is possible by extending Milners
 algorithm W to deal with a polymorphic fixedpoint rule.",
}

\end{chunk}

\index{Kfoury, A.J.}
\index{Tiuryn, J.}
\index{Utzyczyn, P.}
\begin{chunk}{axiom.bib}
@inproceedings{Kfou88,
 author = "Kfoury, A.J. and Tiuryn, J. and Utzyczyn, P.",
 title = "A Proper Extension of ML with an Effective TypeAssignment",
 booktitle = "POPL 88",
 year = "1988",
 pages = "5869",
 abstract =
 "We extend the functional language ML by allowing the recursive calls
 to a function F on the righthand side of its definition to be at
 different types, all generic instances of the (derived) type of F on
 the lefthand side of its definition. The original definition of ML
 does not allow this feature. This extension does not produce new types
 beyond the usual universal polymorphic types of ML and satisfies the
 properties already enjoyed by ML: the principaltype property and the
 effective typeassignment property.",
 paper = "Kfou88.pdf"
}

\end{chunk}

\index{Tiuryn, J.}
\begin{chunk}{axiom.bib}
@article{Tiur90,
 author = "Tiuryn, J.",
 title = "Type Inference Problems  A Survey",
 journal = "LNCS",
 volume = "452",
 pages = "105120",
 year = "1990",
 paper = "Tiur90.pdf"
}

\end{chunk}

\index{Kfoury, A. J.}
\index{Tiuryn, J.}
\index{Urzyczyn, P.}
\begin{chunk}{axiom.bib}
@article{Kfou93,
 author = "Kfoury, A. J. and Tiuryn, J. and Urzyczyn, P.",
 title = "The Undecidability of the Semiunification Problem",
 journal = "Information and Computation",
 volume = "102",
 number = "1",
 year = "1993",
 pages = "83101"
 abstract =
 "The SemiUnification Problem (SUP) is a natural generalization of
 both firstorder unification and matching. The problem arises in
 various branches of computer science and logic. Although several
 special cases of SUP are known to be decidable, the problem in general
 has been open for several years. We show that SUP in general is
 undecidable, by reducing what we call the "boundedness problem" of
 Turing machines to SUP. The undecidability of this boundedness problem
 is established by a technique developed in the mid1960s to prove
 related results about Turing machines.",
 paper = "Kfou93.pdf"
}

\end{chunk}

\begin{chunk}{axiom.bib}
@misc{Comp17,
 author = "CompCert",
 title = "The CompCert Formally Certified C Compiler",
 year = "2017",
 link = "\url{http://compcert.inria.fr}"
}

\end{chunk}

\index{Hudak, Paul}
\index{Peterson, John}
\index{Fasel, Joseph H.}
\begin{chunk}{axiom.bib}
@misc{Huda99,
 author = "Hudak, Paul and Peterson, John and Fasel, Joseph H.",
 title = "A Gentle Introduction to Haskell 98",
 year = "1999",
 link = "\url{https://www.haskell.org/tutorial/haskell98tutorial.pdf}",
 paper = "Huda99.pdf"
}

\end{chunk}

\index{Faxen, KarlFilip}
\begin{chunk}{axiom.bib}
@article{Faxe02,
 author = "Faxen, KarlFilip",
 title = "A Static Sematics for Haskell",
 year = "2002",
 journal = "J. Functional Programming",
 volume = "12",
 number = "45",
 pages = "295357",
 abstract =
 "This paper gives a static semantics for Haskell 98, a nonstrict
 purely functional programming language. The semantics formally speci
 es nearly all the details of the Haskell 98 type system, including the
 resolution of overloading, kind inference (including defaulting) and
 polymorphic recursion, the only major omission being a proper
 treatment of ambiguous overloading and its resolution. Overloading is
 translated into explicit dictionary passing, as in all current
 implementations of Haskell. The target language of this translation is
 a variant of the Girard{Reynolds polymorphic lambda calculus featuring
 higher order polymorphism and explicit type abstraction and
 application in the term language. Translated programs can thus still
 be type checked, although the implicit version of this system is
 impredicative. A surprising result of this formalization e ort is that
 the monomorphism restriction, when rendered in a system of inference
 rules, compromises the principal type property.",
 paper = "Faxe02.pdf"
}

\end{chunk}

\index{MacLane, Saunders}
\begin{chunk}{axiom.bib}
@book{Macl92,
 author = "MacLane, Saunders",
 title = "Sheaves in Geometry and Logic: A First Introduction to Topos
 Theory",
 year = "1992",
 isbn = "9780387977102",
 publisher = "Springer"
}

\end{chunk}

\index{Manes, Ernest G.}
\begin{chunk}{axiom.bib}
@book{Mane76,
 author = "Manes, Ernest G.",
 title = "Algebraic Theories",
 publisher = "Springer",
 year = "1976",
 series = "Graduate Texts in Mathematics",
 isbn = "978198601"
}

\end{chunk}

\index{Paterson, M. S.}
\begin{chunk}{axiom.bib}
@article{Pate78,
 author = "Paterson, M. S.",
 title = "Linear Unification",
 journal = "J. Computer and System Sciences",
 volume = "16",
 number = "2",
 year = "1978",
 pages = "158167",
 abstract =
 "A unification algorithm is described which tests a set of expressions
 for unifiability and which requires time and space which are only linear
 in the size of the input",
 paper = "Pate78.pdf"
}

\end{chunk}

\index{Kanellakis, Paris C.}
\index{Mairson, Harry G.}
\index{Mitchell, John C.}
\begin{chunk}{axiom.bib}
@techreport{Kane90,
 author = "Kanellakis, Paris C. and Mairson, Harry G. and Mitchell, John C.",
 title = "Unification and ML Type Reconstruction",
 link = "\url{ftp://ftp.cs.brown.edu/pub/techreports/90/cs9026.pdf}",
 institution = "Brown University",
 year = "1990",
 number = "CS9026",
 abstract =
 "We study the complexity of type reconstruction for a core fragment of
 ML with lambda abstraction, function application, and the polymorphic
 {\bf let} declaration. We derive exponential upper and lower bounds on
 recognizing the typable core ML expressions. Our primary technical
 tool is unification of succinctly represented type expressions. After
 observing that core ML expressions, of size $n$, can be typed in
 DTIME($s^n$), we exhibit two different families of programs whose
 principal types grow exponentially. We show how to exploit the
 expressiveness of the {\bf let}polymorphism in these constructions to
 derive lower bounds on deciding typability: one leads naturally to
 NPhardness and the other to DTIME($2^{n^k}$)hardness for each integer
 $k\ge 1$. Our generic simulation of any exponential time Turing
 Machine by ML type reconstruction may be viewed as a nonstandard way
 of computing with types. Our worsecase lower bounds stand in contrast
 to practical experience, which suggests that commonly used algorithms
 for type reconstruction do not slow compilation substantially.",
 paper = "Kane90.pdf"
}

\end{chunk}

\index{Volpano, Dennis M.}
\index{Geoffrey S.}
\begin{chunk}{axiom.bib}
@techreport{Volp91,
 author = "Volpano, Dennis M. and Geoffrey S.",
 title = "On the Complexity of ML Typability with Overloading",
 institution = "Cornell University",
 year = "1991",
 number = "TR911210",
 abstract =
 "We examine the complexity of type checking in an MLstyle type system
 that permits functions to be overloaded with different types. In
 particular, we consider the extension of the ML Type system proposed
 by Wadler and Blott in the appendix of [WB89], with global overloading
 only, that is, where the only overloading is that which exists in an
 initial type assumption set; no local overloading via over and inst
 expressions is allowed. It is shown that under a correct notion of
 welltyped terms, the problem of determining whether a term is well
 typed with respect to an assumption set in this system is
 undecidable. We then investigate limiting recursion in assumption
 sets, the source of the undecidability. Barring mutual recursion is
 considered, but this proves too weak, for the problem remains
 undecidable. Then we consider a limited form of recursion called
 parametric recursion. We show that although the problem becomes
 decidable under parametric recursion, it appears harder than
 conventional ML typability, which is complete for DEXPTIME [Mai90].",
 paper = "Volp91.pdf"
}

\end{chunk}

\index{Hodges, Wilfrid}
\begin{chunk}{axiom.bib}
@article{Hodg95,
 author = "Hodges, Wilfrid",
 title = "The Meaning of Specifications I: Domains and Initial Models",
 journal = "Theoretical Computer Science",
 volume = "192",
 issue = "1",
 year = "1995",
 pages = "6789",
 abstract =
 "This is the first of a short series of papers intended to provide one
 common semantics for several different types of specification
 language, in order to allow comparison and translations. The
 underlying idea is that a specification describes the behaviour of a
 system, depending on parameters. We can represent this behaviour as a
 functor which acts on structures representing the parameters, and
 which yields a structure representing the behaviour. We characterise
 in domaintheoretic terms the class of functors which could in
 principle be specified and implemented; briefly, they are the functors
 which preserve directed colimits and whose restriction to finitely
 presented structures is recursively enumerable. We also characterise
 those functors which allow specification by initial semantics in
 universal Horn classes with finite vocabulary; these functors consist
 of a free functor (i.e. left adjoint of a forgetful functor) followed
 by a forgetful functor. The main result is that these two classes of
 functor are the same up to natural isomorphism.",
 paper = "Hodg95.pdf"
}

\end{chunk}

\index{Graetzer, George}
\begin{chunk}{axiom.bib}
@book{Grae79,
 author = "Graetzer, George",
 title = "Universal Algebra",
 publisher = "Springer",
 isbn = "9780387774862"
 year = "1979",
 paper = "Grae79.pdf"
}

\end{chunk}

\index{Dershowitz, Nachum}
\index{Jouannaud, JeanPierre}
\begin{chunk}{axiom.bib}
@techreport{Ders89,
 author = "Dershowitz, Nachum and Jouannaud, JeanPierre",
 title = "Rewrite Systems",
 year = "1989",
 number = "478",
 institution = "Laboratoire de Recherche en Informatique",
 paper = "Ders89.pdf"
}

\end{chunk}

\index{Chang, C.C.}
\index{Keisler, H. Jerome}
\begin{chunk}{axiom.bib}
@book{Chan90,
 author = "Chang, C.C. and Keisler, H. Jerome",
 title = "Model Theory",
 publisher = "North Holland"
 year = "1990",
 comment = "Studics in Logic and the Foundations of Mathematics",
 volume = "73",
 abstract =
 "Since the second edition of this book (1977), Model Theory has
 changed radically, and is now concerned with fields such as
 classification (or stability) theory, nonstandard analysis,
 modeltheoretic algebra, recursive model theory, abstract model
 theory, and model theories for a host of nonfirst order logics. Model
 theoretic methods have also had a major impact on set theory,
 recursion theory, and proof theory.

 This new edition has been updated to take account of these changes,
 while preserving its usefulness as a first textbook in model
 theory. Whole new sections have been added, as well as new exercises
 and references. A number of updates, improvements and corrections have
 been made to the main text",
}

\end{chunk}

\index{Goguen, Joseph A.}
\index{Winkler, Timothy}
\index{Meseguer, Jose}
\index{Futatsugi, Kokichi}
\index{Jouannaud, JeanPierre}
\begin{chunk}{axiom.bib}
@techreport{Gogu92,
 author = "Goguen, Joseph A. and Winkler, Timothy and Meseguer, Jose and
 Futatsugi, Kokichi and Jouannaud, JeanPierre",
 title = "Introducing OBJ",
 institution = "SRI International",
 number = "SIRCSL9203",
 year = "1992",
 abstract =
 "This is an introduction to OBJ, describing its philosophy, its
 syntax, and aspects of its semantics, both logical and operational,
 with many examples, based on Release 2.0 of OBJ3. OBJ is a wide
 spectrum firstorder functional language that is rigorously based upon
 equational logic. This semantic basis supports a declarative,
 specificational style, facilitates program verification, and allows
 OBJ to be used as a theorem prover. OBJ3 is based upon ordersorted
 equational logic, which provides a notion of subsort that rigorously
 supports multiple inheritance, exception handling and
 overloading. OBJ3 also provides parameterized programming, a technique
 which provides powerful support for design, verification, reuse, and
 maintenance.

 This facility is based on using two kinds of module: objects to
 encapsulate executable code, and in particular to define abstract data
 types by initial algebra semantics; and theories to specify both
 syntactic structure and semantic properties for modules and module
 interfaces. Each kind of module can be parameterized, where actual
 parameters are modules. For parameter instantiation, a view binds the
 formal entities in an interface theory to actual entities in a module,
 and also asserts that the target module satisfies the semantic
 requirements of the interface theory. Module expressions allow complex
 combinations of already defined modules, including sums,
 instantiations, and transformations; moreover, evaluating a module
 expression actually constructs the described software (sub)system from
 the given components.

 Default views can greatly reduce the effort of instantiating
 modules. We argue that firstorder parameterized programming includes
 much ofthe power of higherorder programming. Although OBJ executable
 code normally consists of equations that are interpreted as rewrite
 rules, OBJ objects can also encapsulate Lisp code, e.g., to provide
 efficient builtin data types, or to augment the system with new
 capabilities; we describe the syntax of the facility, and provide some
 examples. In addition, OBJ provides rewriting modulo associative,
 commutative and/or identity equations, as well as userdefinable
 evaluation strategies that allow lazy, eager, and mixed evaluation
 strategies on an operationbyoperation basis; memoization [sic] is
 also available on an operationbyoperation basis. Finally, OBJ
 provides userdefinable mixfix syntax, which supports using the
 notational conventions of particular application domains.",
 paper = "Gogu92.pdf"
}

\end{chunk}

\index{Limongelli, C.}
\index{Temperini, M.}
\begin{chunk}{axiom.bib}
@article{Limo92,
 author = "Limongelli, C. and Temperini, M.",
 title = "Abstract Specification of Structures and Methods in Symbolic
 Mathematical Computation",
 journal = "Theoretical Computer Science",
 volume = "104",
 year = "1992",
 pages = "89107",
 abstract =
 "This paper describes a methodology based on the objectoriented
 programming paradigm, to support the design and implementation of a
 symbolic computation system. The requirements of the system are
 related to the specification and treatment of mathematical
 structures. This treatment is considered from both the numerical and
 the symbolic points of view. The resulting programming system should
 be able to support the formal definition of mathematical data
 structures and methods at their highest level of abstraction, to
 perform computations on instances created from such definitions, and
 to handle abstract data structures through the manipulation of their
 logical properties. Particular consideration is given to the
 correctness aspects. Some examples of convenient application of the
 proposed design methodology are presented.",
 paper = "Limo92.pdf"
}

\end{chunk}

\index{BreazuTannen, V.}
\index{Coquand, T.
\index{Gunter, C.A.}
\index{Scedrov, A.}
\begin{chunk}{axiom.bib}
@inproceedings{Brea89,
 author = "BreazuTannen, V. and Coquand, T. and Gunter, C.A. and
 Scedrov, A.",
 title = "Inheritance and Explicit Coercion",
 booktitle = "Logic in Computer Science",
 year = "1989",
 isbn = "0818619546",
 abstract =
 "A method is presented for providing semantic interpretations for
 languages which feature inheritance in the framework of statically
 checked, rich type disciplines. The approach is illustrated by an
 extension of the language Fun of L. Cardelli and P. Wegner (1985),
 which is interpreted via a translation into an extended polymorphic
 lambda calculus. The approach interprets inheritances in Fun as
 coercion functions already definable in the target of the
 translation. Existing techniques in the theory of semantic domains can
 then be used to interpret the extended polymorphic lambda calculus,
 thus providing many models for the original language. The method
 allows the simultaneous modeling of parametric polymorphism, recursive
 types, and inheritance, which has been regarded as problematic because
 of the seemingly contradictory characteristics of inheritance and type
 recursion on higher types. The main difficulty in providing
 interpretations for explicit type disciplines featuring inheritance is
 identified. Since interpretations follow the typechecking
 derivations, coherence theorems are required, and the authors prove
 them for their semantic method.",
 paper = "Brea89.pdf"
}

\end{chunk}

\index{BreazuTannen, Val}
\index{Coquand, Thierry}
\index{Gunter, Carl A.}
\index{Scedrov, Andre}
\begin{chunk}{axiom.bib}
@article{Brea91,
 author = "BreazuTannen, Val and Coquand, Thierry and Gunter, Carl A. and
 Scedrov, Andre",
 title = "Inheritance as Implicit Coercion",
 volume = "93",
 number = "1",
 year = "1991",
 pages = "172221",
 abstract =
 "We present a method for providing semantic interpretations for
 languages with a type system featuring inheritance polymorphism. Our
 approach is illustrated on an extension of the language Fun of
 Cardelli and Wegner, which we interpret via a translation into an
 extended polymorphic lambda calculus. Our goal is to interpret
 inheritances in Fun via coercion functions which are definable in the
 target of the translation. Existing techniques in the theory of
 semantic domains can be then used to interpret the extended
 polymorphic lambda calculus, thus providing many models for the
 original language. This technique makes it possible to model a rich
 type discipline which includes parametric polymorphism and recursive
 types as well as inheritance. A central difficulty in providing
 interpretations for explicit type disciplines featuring inheritance in
 the sense discussed in this paper arises from the fact that programs
 can typecheck in more than one way. Since interpretations follow the
 typechecking derivations, coherence theorems are required: that is,
 one must prove that the meaning of a program does not depend on the
 way it was typechecked. Proofs of such theorems for our proposed
 interpretation are the basic technical results of this
 paper. Interestingly, proving coherence in the presence of recursive
 types, variants, and abstract types forced us to reexamine fundamental
 equational properties that arise in proof theory (in the form of
 commutative reductions) and domain theory (in the form of strict
 vs. nonstrict functions).",
 paper = "Brea91.pdf"
}

\end{chunk}

\index{Smolka, G.}
\begin{chunk}{axiom.bib}
@phdthesis{Smol89a,
 author = "Smolka, G.",
 title = "Logic Programming over Polymorphically OrderSorted Types"
 institution = "Fachbereich Informatik, Universitat Kaiserslautern",
 year = "1989"
}

\end{chunk}

\index{Wirsing, Martin}
\index{Broy, Manfred}
\begin{chunk}{axiom.bib}
@inproceedings{Wirs82,
 author = "Wirsing, Martin and Broy, Manfred",
 title = "An Analysis of Semantic Models for Algebraic Specifications",
 booktitle = "Theoretical Foundations of Programming Methodology",
 year = "1982",
 publisher = "Springer",
 pages = "351413",
 isbn = "9789400978935",
 abstract =
 "Data structures, algorithms and programming languages can be
 described in a uniform implementationindependent way by axiomatic
 abstract data types i.e. by algebraic specifications defining
 abstractly the properties of objects and functions. Different semantic
 models such as initial and terminal algebras have been proposed in
 order to specify the meaning of such specifications often involving a
 considerable amount of category theory. A more concrete semantics
 encompassing these different approaches is presented:

 Abstract data types are specified in hierarchies, employing
 ``primitive'' types on which other types are based. The semantics is
 defined to be the class of all partial heterogeneous algebras
 satisfying the axioms and respecting the hierarchy. The interpretation
 of a specification as its initial or terminal algebra is just a
 constraint on the underlying data. These constraints can be modified
 according to the specification goals. E.g. the data can be specified
 using total functions; for algorithms partial functions with
 syntactically checkable domains seem appropriate whereas for
 programming languages the general notion of partiality is needed,
 Modeltheoretic and deductionoriented conditions are developed which
 ensure properties leading to criteria for the soundness and complexity
 of specifications. These conditions are generalized to parameterized
 types, i.e. type procedures mapping types into types. Syntax and
 different semantics of parameter are defined and discussed. Criteria
 for proper parameterized specifications are developed. It is shown
 that the properties of proper specifications viz. of snowballing and
 impeccable types are preserved under application of parameterized
 types — finally guaranteeing that the composition of proper small
 specifications always leads to a proper large specification.",

\end{chunk}

\begin{chunk}{axiom.bib}
@misc{GAPx17,
 author = "The GAP Group",
 title = "GAP  Reference Manual",
 year = "2017",
 link = "\url{https://www.gapsystem.org/Manuals/doc/ref/manual.pdf}"
}

\end{chunk}

\index{Char, Bruce}
\index{Geddes, Keith O.}
\index{Gonnet, Gaston H.}
\index{Leong, Benton}
\index{Monagan, Michael B.}
\index{Watt, Stephen M.}
\begin{chunk}{axiom.bib}
@book{Char91a,
 author = "Char, Bruce and Geddes, Keith O. and Gonnet, Gaston H. and
 Leong, Benton and Monagan, Michael B. and Watt, Stephen M.",
 title = "Maple V Library Reference Manual",
 publisher = "Springer",
 year = "1991",
 isbn = "9781475721331"
 abstract =
 "The design and implementation of the Maple system is an ongoing
 project of the Symbolic Com putation Group at the University of
 Waterloo in Ontario, Canada. This manual corresponds with version V
 (roman numeral five) of the Maple system. The online help subsystem
 can be invoked from within a Maple session to view documentation on
 specific topics. In particular, the command ?updates points the user
 to documentation updates for each new version of Maple. The Maple
 project was first conceived in the autumn of 1980, growing out of
 discussions on the state of symbolic computation at the University of
 Waterloo. The authors wish to acknowledge many fruitful discussions
 with colleagues at the University of Waterloo, particularly Morven
 Gen tleman, Michael Malcolm, and Frank Tompa. It was recognized in
 these discussions that none ofthe locaIlyavailable systems for
 symbolic computation provided the facilities that should be expected
 for symbolic computation in modern computing environments. We
 concluded that since the basic design decisions for the thencurrent
 symbolic systems such as ALTRAN, CAMAL, REDUCE, and MACSYMA were based
 on 1960's computing technology, it would be wise to design a new
 system ``from scratch//. Thus we could take advantage of the software
 engineering technology which had become available in recent years, as
 well as drawing from the lessons of experience. Maple's basic features
 (elementary data structures, Input/output, arithmetic with numbers,
 and elementary simplification) are coded in a systems programming
 language for efficiency."
}

\end{chunk}

\index{Monk, J. Donald}
\begin{chunk}{axiom.bib}
@book{Monk76,
 author = "Monk, J. Donald",
 title = "Mathematical Logic",
 publisher = "Springer",
 year = "1976",
 isbn = "9781468494525"
}

\end{chunk}

\index{Meyer, Albert R.}
\index{Reinhold, Mark B.}
\begin{chunk}{axiom.bib}
@inproceedings{Meye86,
 author = "Meyer, Albert R. and Reinhold, Mark B.",
 title = "Type is not a type",
 booktitle = "POPL 86",
 pages = "287295",
 abstract =
 "A function has a dependent type when the type of its result
 depends upon the value of its argument. Dependent types originated in
 the type theory of intuitionistic mathematics and have reappeared
 independently in programming languages such as CLU, Pebble, and
 Russell. Some of these languages make the assumption that there exists
 a typeofalltypes which is its own type as well as the type
 of all other types. Girard proved that this approach is inconsistent
 from the perspective of intuitionistic logic. We apply Girard's
 techniques to establish that the typeofalltypes assumption creates
 serious pathologies from a programming perspective: a system using
 this assumption is inherently not normalizing, term equality is
 undecidable, and the resulting theory fails to be a conservative
 extension of the theory of the underlying base types. The failure of
 conservative extension means that classical reasoning about programs
 in such a system is not sound.",
}

\end{chunk}

\index{Howe, Douglas J.}
\begin{chunk}{axiom.bib}
@techreprot{Howe87,
 author = "Howe, Douglas J.",
 title = "The Computational Behaviour of Girard's Paradox",
 institution = "Cornell University",
 year = "1987",
 link = "\url{https://ecommons.cornell.edu/handle/1813/6660}",
 number = "TR 87820",
 abstract =
 "In their paper ``Type'' Is Not a Type, Meyer and Reinhold argued that
 serious pathologies can result when a type of all types is added to a
 programing language with dependent types. Central to their argument is
 the claim that by following the proof of Girard's paradox it is
 possible to construct in their calculus $\lambda^{\tau \tau}$ a term
 having a fixedpoint property. Because of the tremendous amount of
 formal detail involved, they were unable to establish this claim. We
 have made use of the Nuprl proof development system in constructing a
 formal proof of Girard's paradox and analysing the resulting term. We
 can show that the term does not have the desired fixedpoint property,
 but does have a weaker form of it that is sufficient to establish some
 of the results of Meyer and Reinhold. We believe that the method used
 here is in itself of some interest, representing a new kind of
 application of a computer to a problem in symbolic logic.",
}

\end{chunk}

\index{Coquand, T.}
\begin{chunk}{axiom.bib}
@techreport{Coqu86,
 author = "Coquand, Thierry",
 title = "An Analysis of Girard's Paradox",
 institution = "Institut National de Recherche en Informatique et en
 Automatique",
 year = "1986",
 abstract =
 "We study the consistency of a few formal systems, specially some
 extensions of Church's calculus and the construction system. We show
 that Church's calculus is not compatible with the notion of
 secondorder type. We apply this result for showing that the calculus
 of construction with four levels is inconsistent. We suggest finally
 some consistent extensions of these two calculi."
 paper = "Coqu86.pdf"
}

\end{chunk}

\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Card88,
 author = "Cardelli, Luca",
 title = "A Semantics of Multiple Inheritance",
 journal = "Information and Computation",
 volume = "76",
 number = "23",
 year = "1988",
 pages = "138164",
 paper = "Card88.pdf"
}

\end{chunk}

\index{Godel, Kurt}
\begin{chunk}{axiom.bib
@misc{Gode58,
 author = "Godel, Kurt",
 title = {\"Uber eine bisher noch nicht benutzte Erweiterung des Finiten
 Standpunktes},
 journal = "Dialectica 12",
 year = "1958",
 pages = "280287"
}

\end{chunk}

\index{Girard, JeanYves}
\index{Taylor, Paul}
\index{Lafont, Yves}
\begin{chunk}{axiom.bib}
@book{Gira89,
 author = "Girard, JeanYves",
 title = "Proofs and Types",
 publisher = "Cambridge University Press",
 year = "1989"
}

\end{chunk}

\index{Pierce, Benjamin C.}
\begin{chunk}{axiom.bib}
@phdthesis{Pier91,
 author = "Pierce, Benjamin C.",
 title = "Programming with Intersection Types and Bounded Polymorphism",
 institution = "Carnegie Mellon University",
 year = "1991",
 comment = "CMUCS91205",
 abstract =
 "Intersection types and bounded quantification are complementary
 mechanisms for extending the expressive power of statically typed
 programming languages. They begin with a common framework: a simple,
 typed language with higherorder functions and a notion of subtyping.
 Intersection types extend this framework by giving every pair of types
 $\sigma$ and $\tau$ a greatest lower bound, $\sigma \land \tau$,
 corresponding intuitively to the intersection of the sets of values
 described by $\sigma$ and $\tau$. Bounded quantification extends the
 basic framework along a different axis by adding polymorphic functions
 that operate uniformly on all the subtypes of a given type. This thesis
 unifies and extends prior work on intersection types and bounded
 quantification, previously studied only in isolation, by investigating
 theoretical and practical aspects of a typed $\lambda$calculus
 incorporating both.

 The practical utility of this calculus, called $F_\land$ is
 established by examples showing, for instance, that it allows a rich
 form of ``coherent overloading'' and supports an analog of abstract
 interpretation during typechecking; for example, the addition function
 is given a type showing that it maps pairs of positive inputs to a
 positive result, pairs of zero inputs to a zero result, etc. More
 familiar programming examples are presented in terms of an extention
 of Forsythe (an Algollike language with intersection types),
 demonstrating how parametric polymorphism can be used to simplify and
 generalize Forsythe's design. We discuss the novel programming and
 debugging styles that arise in $F_\land$.

 We prove the correctness of a simple semidecision procedure for the
 subtype relation and the partial correctness of an algorithm for
 synthesizing minimal types of $F_\land$ terms. Our main tool in this
 analysis is a notion of ``canonical types,'' which allows proofs to be
 factored so that intersections are handled separately from the other
 type constructors.

 A pair of negative results illustrates some subtle complexities of
 $F_\land$. First, the subtype relation of $F_\land$ is shown to be
 undecidable; in fact, even the sutype relation of pure secondorder
 bounded quantification is undecidable, a surprising result in its own
 right. Second, the failure of an important technical property of the
 subtype relation  the existence of least upper bounds  indicates
 that typed semantic models of $F_\land$ will be more difficult to
 construct and analyze than the known typed models of intersection
 types. We propose, for future study, some simpler fragments of
 $F_\land$ that share most of its essential features, while recovering
 decidability and least upper bounds.

 We study the semantics of $F_\land$ from several points of view. An
 untyped model based on partial equivalence relations demonstrates the
 consistency of the typing rules and provides a simple interpolation
 for programs, where ``$\sigma$ is a subtype of $\tau$'' is read as
 ``$\sigma$ is a subset of $\tau$.'' More refined models can be
 obtained using a translation from $F_\land$ into the pure polymorphic
 $\lambda$calculus; in these models, ``$\sigma$ is a subtype of
 $\tau$'' is interpreted by an explicit coercion function from $\sigma$
 to $\tau$. The nonexistence of least upper bounds shows up here in
 the failure of known techniques for proving the coherence of the
 translation semantics. Finally, an equational theory of equivalences
 between $F_\land$ terms is presented and its soundness for both styles
 of model is verified.",
 paper = "Pier91.pdf"
}

\end{chunk}

\index{Pierce, Benjamin C.}
\begin{chunk}{axiom.bib}
@techreport{Pier91a,
 author = "Pierce, Benjamin C.",
 title = "Bounded Quantification is Undecidable",
 year = "1991",
 number = "CMUCS91161",
 link = "\url{http://repository.cmu.edu/cgi/viewcontent.cgi?article=3059}",
 abstract =
 "$$F_\le$ is a typed $\lambda$calculus with subtyping and bounded
 secondorder polymorphism. First introduced by Cardelli and Wegner, it
 has been widely studied as a core calculus for type systems with
 subtyping.

 Curien and Ghelli proved the partial correctness of a recursive
 procedure for computing minimal types of $$F_\le$ terms and showed
 that the termination of this procedure is equivalent to the
 termination of its major component, a procedure for checking the
 subtype relation between $$F_\le$ types. Ghelli later claimed that
 this procedure is also guaranteed to terminate, but the discovery of a
 subtle bug in his proof led him recently to observe that, in fact,
 there are inputs on which the subtyping procedure diverges. This
 reopens the question of the decidability of subtyping and hence of
 typechecking.

 This question is settled here in the negative, using a reduction from
 the halting problem for twocounter Turing machines to show that the
 subtype relation of $$F_\le$ is undecidable.",
 paper = "Pier91a.pdf"
}

\end{chunk}

\index{Meyer, Bertrand}
\begin{chunk}{axiom.bib}
@book{Meye97,
 author = "Meyer, Bertrand",
 title = "ObjectOriented Software Construction",
 year = "1997",
 publisher = "Prentice Hall"
}

\end{chunk}

\index{Goldberg, Adele}
\index{Robson, David}
\begin{chunk}{axiom.bib}
@book{Gold83,
 author = "Goldberg, Adele and Robson, David",
 title = "Smalltalk80: The Language and Its Implementation",
 publisher = "AddisonWesley",
 year = "1983"
}

\end{chunk}

\index{Kirkerud, Bjorn}
\begin{chunk}{axiom.bib}
@book{Kirk89,
 author = "Kirkerud, Bjorn",
 title = "ObjectOriented Programming With Simula",
 year = "1989",
 series = "International Computer Science Series",
 publisher = "AddisonWesley"
}

\end{chunk}

\index{Birtwistle, Graham M.}
\begin{chunk}{axiom.bib}
@book{Birt80,
 author = "Birtwistle, Graham M.",
 title = "Simula Begin",
 year = "1980",
 publisher = "ChartwellBratt",
 isbn = "9780862380090"
}

\end{chunk}

\index{Stroustrup, Bjarne}
\begin{chunk}{axiom.bib}
@book{Stro95,
 author = "Stroustrup, Bjarne",
 title = "The C++ Programming Language (2nd Edition)",
 publisher = "AddisonWesley",
 year = "1995",
 isbn = "0201539926"
}

\end{chunk}

\index{Bruce, Kim B.}
\begin{chunk}{axiom.bib}
@inproceedings{Bruc93,
 author = "Bruce, Kim B.",
 title = "Safe type checking in a staticallytyped objectoriented
 programming language",
 booktitle = "POPL 93",
 year = "1993",
 isbn = "0897915607",
 pages = "285298",
 abstract =
 " In this paper we introduce a staticallytyped, functional,
 objectoriented programming language, TOOPL, which supports classes,
 objects, methods, instance variable, subtypes, and inheritance. It has
 proved to be surprisingly difficult to design staticallytyped
 objectoriented languages which are nearly as expressive as Smalltalk
 and yet have no holes in their typing systems. A particular problem
 with statically type checking objectoriented languages is determining
 whether a method provided in a superclass will continue to type check
 when inherited in a subclass. This program is solved in our language
 by providing type checking rules which guarantee that a method which
 type checks as part of a class will type check correctly in all legal
 subclasses in which it is inherited. This feature enables library
 providers to provide only the interfaces of classes with executables
 and still allow users to safely create subclasses. The design of TOOPL
 has been guided by an analysis of the semantics of the language, which
 is given in terms of a sufficiently rich model of the Fbounded
 secondorder lambda calculus. This semantics supported the language
 design by providing a means of proving that the typechecking rules
 for the language are sound, ensuring that welltyped terms produce
 objects of the appropriate type. In particular, in a welltyped
 program it is impossible to send a message to an object which lacks a
 corresponding method.",
 paper = "Bruc93.pdf"
}

\end{chunk}

\index{Abdali, S. Kamal}
\index{Cherry, Guy W.}
\index{Soiffer, Neil}
\begin{chunk}{axiom.bib}
@inproceedings{Abda86,
 author = "Abdali, S. Kamal and Cherry, Guy W. and Soiffer, Neil",
 title = "A Smalltalk System for Algebraic Manipulation",
 booktitle = "OOPSLA 86",
 pages = "277293",
 year = "1986",
 abstract =
 "This paper describes the design of an algebra system Views
 implemented in Smalltalk. Views contains facilities for dynamic
 creation and manipulation of computational domains, for viewing these
 domains as various categories such as groups, rings, or fields, and
 for expressing algorithms generically at the level of categories. The
 design of Views has resulted in the addition of some new abstractions
 to Smalltalk that are quite useful in their own right. Parameterized
 classes provide a means for runtime creation of new classes that
 exhibit generally very similar behavior, differing only in minor ways
 that can be described by different instantiations of certain
 parameters. Categories allow the abstraction of the common behavior of
 classes that derives from the class objects and operations satisfying
 certain laws independently of the implementation of those objects and
 operations. Views allow the runtime association of classes with
 categories (and of categories with other categories), facilitating the
 use of code written for categories with quite different
 interpretations of operations. Together, categories and views provide
 an additional mechanism for code sharing that is richer than both
 single and multiple inheritance. The paper gives algebraic as well as
 nonalgebraic examples of the abovementioned features.",
 paper = "Abda86.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Berger, Emery}
\begin{chunk}{axiom.bib}
@techreport{Berg92,
 author = "Berger, Emery",
 title = "FP + OOP = Haskell",
 institution = "University of Texas",
 number = "TR9230",
 abstract =
 "The programming language Haskell adds objectoriented functionality
 (using a concept known as type classes) to a pure functional
 programming framework. This paper describes these extensions and
 analyzes its accomplishments as well as some problems."
}

\end{chunk}

\index{Barendregt, H. P.}
\begin{chunk}{axiom.bib}
@book{Bare84,
 author = "Barendregt, H. P.",
 title = "The Lambda Calculus: Its Syntax and Semantics",
 publisher = "Elsevier Science",
 year = "1984"
}

\end{chunk}

\index{Goguen, Joseph}
\index{Meseguer, Jose}
\begin{chunk}{axiom.bib}
@article{Gogu92,
 author = "Goguen, Joseph and Meseguer, Jose",
 title = "Ordersorted Algebra I : Equational Deduction for Multiple
 Inheritance, Overloading, Exceptions, and Partial Operations",
 journal = "Theoretical Computer Science",
 volume = "105",
 number = "2"
 year = "1992",
 pages = "217273",
 abstract =
 "This paper generalizes manysorted algebra (MSA) to ordersorted
 algebra (OSA) by allowing a partial ordering relation on the set of
 sorts. This supports abstract data types with multiple inheritance (in
 roughly the sense of objectoriented programming), several forms of
 polymorphism and overloading, partial operations (as total on
 equationally defined subsorts), exception handling, and an operational
 semantics based on term rewriting. We give the basic algebraic
 constructions for OSA, including quotient, image, product and term
 algebra, and we prove their basic properties, including quotient,
 homomorphism, and initiality theorems. The paper's major mathematical
 results include a notion of OSA deduction, a completeness theorem for
 it, and an OSA Birkhoff variety theorem. We also develop conditional
 OSA, including initiality, completeness, and McKinseyMalcev
 quasivariety theorems, and we reduce OSA to (conditional) MSA, which
 allows lifting many known MSA results to OSA. Retracts, which
 intuitively are left inverses to subsort inclusions, provide
 relatively inexpensive runtime error handling. We show that it is
 safe to add retracts to any OSA signature, in the sense that it gives
 rise to a conservative extension. A final section compares and
 contrasts many different approaches to OSA. This paper also includes
 several examples demonstrating the flexibility and applicability of
 OSA, including some standard benchmarks like stack and list, as well
 as a much more substantial example, the number hierarchy from the
 naturals up to the quaternions.",
 paper = "Gogu92.pdf"
}

\end{chunk}

\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@inproceedings{Card86,
 author = "Cardelli, Luca",
 title = "Typechecking Dependent Types and Subtypes",
 link =
 "\url{http://lucacardelli.name/Papers/Dependent%20Typechecking.US.pdf}",
 year = "1996",
 journal = "LNCS",
 volume = "523",
 pages = "4557"
 paper = "Card86.pdf"
}

\end{chunk}

\index{Zariski, Oscar}
\index{Samuel, Pierre}
\begin{chunk}{axiom.bib}
@book{Zari75,
 author = "Zariski, Oscar and Samuel, Pierre",
 title = "Commutative Algebra",
 Series = "Graduate Texts in Mathematics",
 year = "1975",
 publisher = "SpringerVerlag",
 isbn = "9780387900896"
}

\end{chunk}

\index{Marcus, Daniel A.}
\begin{chunk}{axiom.bib}
@book{Marc77,
 author = "Marcus, Daniel A.",
 title = "Number Fields",
 publisher = "Springer",
 year = "1977",
 isbn = "9780387902791"
}

\end{chunk}

\index{Lang, Serge}
\begin{chunk}{axiom.bib}
@book{Lang05,
 author = "Lang, Serge",
 title = "Algebra",
 publisher = "Springer",
 year = "2005",
 series = "Graduate Texts in Mathematics",
 isbn = "9780387953854"
}

\end{chunk}

\index{Fuh, YouChin}
\index{Mishra, Prateek}
\begin{chunk}{axiom.bib}
@article{Fuhx89,
 author = "Fuh, YouChin and Mishra, Prateek",
 title = "Polymorphic Subtype Inference  Closing the TheoryPractice Gap",
 journal = "Lecture Notes in Computer Science",
 volume = "352",
 year = "1989",
 pages = "167183",
 paper = "Fuhx89.pdf"
}

\end{chunk}

\index{Kaes, Stefan}
\begin{chunk}{axiom.bib}
@article{Kaes92,
 author = "Kaes, Stefan",
 title = "Type Inference in the Presence of Overloading, Subtyping, and
 Recursive Types",
 journal = "LISP Pointers",
 volume = "V",
 number = "1",
 pages = "193204"
 year = "1992",
 paper = "Kaes92.pdf"
}

\end{chunk}

\index{Robinson, J. S. Derek}
\begin{chunk}{axiom.bib}
@book{Robi96,
 author = "Robinson, J. S. Derek",
 title = "A Course in the Theory of Groups",
 year = "1996",
 series = "Graduate Texts in Mathematics",
 isbn = "9781461264439",
 publisher = "Springer"
}

\end{chunk}

\index{Thatte, Satish R.}
\begin{chunk}{axiom.bib}
@article{That91,
 author = "Thatte, Satish R.",
 title = "Coercive Type Isomorphism",
 journal = "LNCS",
 volume = "523",
 year = "1991",
 pages = "2949",
 abstract =
 "There is a variety of situations in programming in which it is useful
 to think of two distinct types as representations of the same abstract
 structure. However, language features which allow such relations to
 be effectively expressed at an abstract level are lacking. We propose
 a generalization of MLstyle type inference to deal effectively with
 this problem. Under the generalization, the (normally free) algebra
 of type expressions is subjected to an equational theory generated by
 a finite set of userspecified equations that express
 interconvertibility relations between objects of ``equivalent'' types.
 Each type equation is accompanied by a pair of conversion functions
 that are (at least partial) inverses. We show that so long as the
 equational theory satisfies a reasonably permissive syntactic
 constraint, the resulting type system admits a complete type infer
 ence algorithm that produces unique principal types. The main
 innovation required in type inference is the replacement of ordinary
 free unification by unification in the userspecified equational
 theory. The syntactic constraint ensures that the latter is unitary,
 i.e., yields unique most general unifiers. The proposed constraint is
 of independent interest as the first known syntactic
 characterization for a class of unitary theories. Some of the
 applicatloils of the system are similar to those of Wadler's views
 [Wad87]. However, our system is considerably more general, and more
 orthogonal to the underlying language.",
 paper = "That91.pdf"
}

\end{chunk}

\index{Bundgen, Reinhard}
\begin{chunk}{axiom.bib}
@book{Bund93,
 author = "Bundgen, Reinhard",
 title = "The ReDuX System Documentation",
 year = "1993",
 publisher = "WSI"
}

\end{chunk}

\index{Bundgen, Reinhard}
\begin{chunk}{axiom.bib}
@inproceedings{Bund93a,
 author = "Bundgen, Reinhard",
 title = {Reduce the Redex $>$ ReDuX},
 booktitle = "Proc. Rewriting Techniques and Applications 93",
 year = "1993",
 pages = "446450",
 publisher = "SpringerVerlag",
 isbn = "3540568689"
}

\end{chunk}

\index{Cohn, P. M.}
\begin{chunk}{axiom.bib}
@book{Cohn91,
 author = "Cohn, P. M.",
 title = "Algebra",
 publisher = "John Wiley and Sons",
 year = "1991",
 isbn = "0471101680",
 paper= = "Cohn91.pdf"
}

\end{chunk}

\index{Jouannaud, JeanPierre}
\index{Kirchner, Claude}
\begin{chunk}{axiom.bib}
@book{Joua90,
 author = "Jouannaud, JeanPierre and Kirchner, Claude",
 title = "Solving Equations in Abstract Algebras: A Rulebased Survey of
 Unification",
 year = "1990",
 publisher = "Universite do ParisSud"
}

\end{chunk}

\index{Kowalsky, Hans Joachim}
\begin{chunk}{axiom.bib}
@book{Kowa63,
 author = "Kowalsky, Hans Joachim",
 title = "Linear Algebra",
 year = "1963",
 publisher = "Walter de Gruyter",
 comment = "(German)"
}

\end{chunk}

\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@inproceedings{Reyn80,
 author = "Reynolds, John C.",
 title = "Using Category Theory to Design Implicit Conversions and
 Generic Operators",
 booktitle = "Lecture Notes in Computer Science",
 year = "1980",
 abstract =
 "A generalization of manysorted algebras, called categorysorted
 algebras, is defined and applied to the languagedesign problem of
 avoiding anomalies in the interaction of implicit conversions and
 generic operators. The definition of a simple imperative language
 (without any binding mechanisms) is used as an example.",
 paper = "Reyn80.pdf"
}

\end{chunk}


\index{Stansifer, R.}
\begin{chunk}{axiom.bib}
@inproceedings{Stan88,
 author = "Stansifer, R.",
 title = "Type Inference with Subtypes",
 booktitle = "POPL 88",
 pages = "8897"
 year = "1988",
 abstract =
 "We give an algorithm for type inference in a language with functions,
 records, and variant records. A similar language was studied by
 Cardelli who gave a type checking algorithm. This language is
 interesting because it captures aspects of objectoriented programming
 using subtype polymorphism. We give a type system for deriving types
 of expressions in the language and prove the type inference algorithm
 is sound, i.e., it returns a type derivable from the proof system. We
 also prove that the type the algorithm finds is a ``principal'' type,
 i.e., one which characterizes all others. The approach taken here is
 due to Milner for universal polymorphism. The result is a synthesis of
 subtype polymorphism and universal polymorphism.",
 paper = "Stan88.pdf"
}

\end{chunk}

\index{Huet, Gerard}
\index{Oppen, Derek C.}
\begin{chunk}{axiom.bib}
@techreport{Huet80,
 author = "Huet, Gerard and Oppen, Derek C.",
 title = "Equations and Rewrite Rules: A Survey",
 institution = "Stanford Verification Group",
 number = "STANCS80785",
 year = "1980",
 abstract =
 "Equations occur frequently in mathematics, logic and computer
 science. in this paper, we survey the main results concerning
 equations, and the methods available for reasoning about them and
 computing with them. The survey is selfcontained and unified, using
 traditional abstract algebra.

 Reasoning about equations may involve deciding if an equation follows
 from a given set of equations (axioms), or if an equation is true in a
 given theory. When used in this manner, equations state properties
 that hold between objects. Equations may also be used as definitions;
 this use is well known in computer science: programs written in
 applicative languages, abstract interpreter definitions, and algebraic
 data type definitions are clearly of this nature. When these
 equations are regarded as oriented ``rewrite rules'', we may actually
 use them to compute.

 In addition to covering these topics, we discuss the problem of
 ``solving'' equations (the ``unification'' problem), the problem of
 proving termination of sets of rewrite rules, and the decidability and
 complexity of word problems and of combinations of equational
 theories. We restrict ourselves to firstorder equations, and do not
 treat equations which define nonterminating computations or recent
 work on rewrite rules applied to equational congruence classes.",
 paper = "Huet80.pdf"
}

\end{chunk}

\index{Remy, Didier}
\begin{chunk}{axiom.bib}
@inproceedings{Remy89,
 author = "Remy, Didier",
 title = "Typechecking Records and Variants in a Natural Extension of ML",
 booktitle = "POPL 89",
 isbn = "9780897912945",
 publisher = "ACM",
 link = "\url{https://www.cs.cmu.edu/~aldrich/courses/819/row.pdf}",
 abstract =
 "We describe an extension of ML with records where inheritance is
 given by ML generic polymorphism. All common operations on records but
 concatenation are supported, in particular, the free extension of
 records. Other operations such as renaming of fields are added. The
 solution relies on an extension of ML, where the language of types is
 sorted and considered modulo equations, and on a record extension of
 types. The solution is simple and modular and the type inference
 algorithm is efficient in practice.",
 paper = "Remy89.pdf"
}

\end{chunk}

\index{Wand, Mitchell}
\index{O'Keefe, Patrick}
\begin{chunk}{axiom.bib}
@inproceedings{Wand89,
 author = "Wand, Mitchell and O'Keefe, Patrick",
 title = "On the Complexity of Type Inference with Coercion",
 booktitle = "PFCA 89",
 pages = "293298",
 isbn = "0897913280",
 abstract =
 "We consider the following problem: Given a partial order $(C,\le)$ of
 base types and coercions between them, a set of constants with types
 generated from $C$, and a term $M$ in the lambda calculus with these
 constants, does $M$ have a typing with this set of types? This
 problem abstracts the problem of typability over a fixed set of base
 types and coercions (e.g. int $\le$ real, or a fixed set of coercions
 between opaque data types). We show that in general, the problem of
 typability of lambdaterms over a given partiallyordered set of base
 types is NPcomplete. However, if the partial order is known to be a
 tree, then the satisfiability problem is solvable in (loworder)
 polynomial time. The latter result is of practical importance, as
 trees correspond to the coercion structure of singleinheritance
 object systems.",
 paper = "Wand89.pdf"
}

\end{chunk}

\index{Lincoln, Patrick}
\index{Mitchell, John C.}
\begin{chunk}{axiom.bib}
@inproceedings{Linc92,
 author = "Lincoln, Patrick and Mitchell, John C.",
 title = "Algorithmic Aspects of Type Inference with Subtypes",
 booktitle = "POPL 92",
 pages = "293304",
 year = "1992",
 abstract =
 "We study the complexity of type inference for programming languages
 with subtypes. There are three language variations that effect the
 problem: (i) basic functions may have polymorphic or more limited
 types, (ii) the subtype hierarchy may be fixed or vary as a result of
 subtype declarations within a program, and (iii) the subtype hierarchy
 may be an arbitrary partial order or may have a more restricted form,
 such as a tree or lattice. The naive algorithm for infering a most
 general polymorphic type, undervariable subtype hypotheses, requires
 deterministic exponential time. If we fix the subtype ordering, this
 upper bound grows to nondeterministic exponential time. We show that
 it is NPhard to decide whether a lambda term has a type with respect
 to a fixed subtype hierarchy (involving only atomic type names). This
 lower bound applies to monomorphic or polymorphic languages. We give
 PSPACE upper bounds for deciding polymorphic typability if the subtype
 hierarchy has a lattice structure or the subtype hierarchy varies
 arbitrarily. We also give a polynomial time algorithm for the limited
 case where there are of no function constants and the type hierarchy
 is either variable or any fixed lattice.",
 paper = "Linc92.pdf"
}

\end{chunk}

\index{Davis, Martin D.}
\index{Sigal, Ron}
\index{Weyuker, Elaine J.}
\begin{chunk}{axiom.bib}
@book{Davi94,
 author = "Davis, Martin D. and Sigal, Ron and Weyuker, Elaine J.",
 title = "Computability, Complexity, and Languages: Fundamentals of
 Theoretical Computer Science",
 publisher = "Academic Press",
 year = "1994",
 isbn = "9780122063824"
}

\end{chunk}

\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm90,
 author = "Farmer, William M.",
 title = "A Partial Functions Version of Church's Simple Theory of Types",
 journal = "The Journal of Symbolic Logic",
 volume = "55",
 number = "3",
 year = "1990",
 pages = "12691291",
 abstract =
 "Church's simple theory of types is a system of higherorder logic in
 which functions are assumed to be total. We present in this paper a
 version of Church's system called PF in which functions may be
 partial. The semantics of PF, which is based on Henkin's
 generalmodels semantics, allows terms to be nondenoting but requires
 formulas to always denote a standard truth value. We prove that PF is
 complete with respect to its semantics. The reasoning mechanism in PF
 for partial functions corresponds closely to mathematical practice,
 and the formulation of PF adheres tightly to the framework of
 Church's system.",
 paper = "Farm90.pdf"
}

\end{chunk}

\index{Odifreddi, Piergiorgio}
\begin{chunk}{axiom.bib}
@book{Odif92,
 author = "Odifreddi, Piergiorgio",
 title = "Classical Recursion Theory: The Theory of Functions and Sets of
 Natural Numbers",
 publisher = "Elsevier",
 year = "1992"
}

\end{chunk}

\index{Buchberger, Bruno}
\index{Collins, George Edwin}
\index{Loos, Rudiger}
\begin{chunk}{axiom.bib}
@book{Buch82,
 author = "Buchberger, Bruno and Collins, George Edwin and Loos, Rudiger",
 title = "Computer Algebra: Symbolic and Algebraic Computation",
 publisher = "Springer",
 isbn = "9783211816844",
 paper = "Buch82.pdf"
}

\end{chunk}

\index{Lauer, M.}
\begin{chunk}{axiom.bib}
@InCollection{Laue82,,
 author = "Lauer, M.",
 title = "Computing by Homomorphic Images",
 booktitle = "Computer Algebra: Symbolic and Algebraic Computation",
 pages = "139168",
 year = "1982",
 publisher = "Springer",
 isbn = "9783211816844",
 abstract =
 "After explaining the general technique of Computing by homomorphic
 images, the Chinese remainder algorithm and the Hensel lifting
 construction are treated extensively. Chinese remaindering is first
 presented in an abstract setting. Then the specialization to Euclidean
 domains, in particular $\mathbb{Z}$, $\mathbb{K}[y]$, and
 $\mathbb{Z}[y_1,\ldots,y_n]$ is considered. For both techniques,
 Chinese remaindering as well as the lifting algorithms, a complete
 computational example is presented and the most frequent application
 is discussed."
}

\end{chunk}

\index{Huet, Gerard}
\index{Plotkin, G.}
\begin{chunk}{axiom.bib}
@book{Huet91,
 author = "Huet, Gerard and Plotkin, G.",
 title = "Logical Frameworks",
 publisher = "Cambridge University",
 year = "1991"
}

\end{chunk}

\index{Harper, Robert}
\index{Honsell, Furio}
\index{Plotkin, Gordon}
@article{Harp93,
 author = "Harper, Robert and Honsell, Furio and Plotkin, Gordon",
 title = "A Framework for Defining Logics",
 journal = "J. ACM",
 volume = "40",
 number = "1",
 year = "1993",
 pages = "143184",
 abstract =
 "The Edinburgh Logical Framework (LF) provides a means to define (or
 present) logics. It is based on a general treatment of syntax, rules,
 and proofs by means of a typed &lgr;calculus with dependent
 types. Syntax is treated in a style similar to, but more general than,
 MartinLo¨f's system of arities. The treatment of rules and proofs
 focuses on his notion of a judgment. Logics are represented in LF via
 a new principle, the judgments as types principle, whereby each
 judgment is identified with the type of its proofs. This allows for a
 smooth treatment of discharge and variable occurence conditions and
 leads to a uniform treatment of rules and proofs whereby rules are
 viewed as proofs of higherorder judgments and proof checking is
 reduced to type checking. The practical benefit of our treatment of
 formal systems is that logicindependent tools, such as proof editors
 and proof checkers, can be constructed.",
 paper = "Harp93.pdf"
}

\end{chunk}

\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Pfen89,
 author = "Pfenning, Frank",
 title = "Elf: A Language for Logic Definition and Verified Metaprogramming",
 booktitle = "Logic in Computer Science 89",
 year = "1989",
 pages = "313322",
 abstract =
 "We describe Elf, a metalanguage for proof manipulation environments
 that are independent of any particular logical system. Elf is
 intended for metaprograms such as theorem provers, proof
 transformers, or type inference programs for programming languages
 with complex type systems. Elf unifies logic definition (in the style
 of LF, the Edinburgh Logical Framework) with logic programming (in the
 style of $\lambda$Prolog). It achieves this unification by giving
 types an operational interpretation, much the same way that Prolog gives
 certain formulas (Hornclauses) an operational interpretation.
 Novel features of Elf include: (1) the Elf search process
 automatically constructs terms that can represent objectlogic proofs,
 and thus a program need not construct them explicitly, (2) the partial
 correctness of metaprograms with respect to a given logic can be
 expressed and proved in Elf itself, and (3) Elf exploits Elliott’s
 unification algorithm for a $\lambda$calculus with dependent types.",
 paper = "Pfen89.pdf"
}

\end{chunk}

\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Pfen91,
 author = "Pfenning, Frank",
 title = "Logic Programming in the LF Logical Framework",
 booktitle = "Proc. First Workshop on Logical Frameworks",
 year = "1991",
 paper = "Pfen91.pdf"
}

\end{chunk}

\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@inproceedings{Pfen91a,
 author = "Pfenning, Frank",
 title = "Unification and AntiUnification in the Calculus of Constructions",
 booktitle = "Logic in Computer Science 91",
 year = "1991",
 pages = "7485",
 abstract =
 "We present algorithms for unification and anti unification in the
 Calculus of Constructions, where occurrences of free variables (the
 variables subject to instantiation) are restricted to higherorder
 patterns, a notion investigated for the simplytyped $\lambda$calculus
 by Miller. Most general unifiers and least common antiinstances are
 shown to exist and are unique up to a simple equivalence. The
 unification algorithm is used for logic program execution and type and
 term reconstruction in the current implementation of Elf and has
 shown itself to be practical. The main application of the
 antiunification algorithm we have in mind is that of proof
 generalization.",
 paper = "Pfen91a.pdf"
}

\end{chunk}
+Fix call to nonexported GCL package symbol
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index ec72a72..7265b8b 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5762,6 +5762,8 @@ books/bookvol10.1 add Tutorial on Quantifier Elimination by Hong
readme add June Huh quote
20170702.01.tpd.patch
books/bookvol10.1 add Weber thesis
+20170721.01.tpd.patch
+bookvol5 system:chdir to system::chdir
diff git a/src/interp/patches.lisp.pamphlet b/src/interp/patches.lisp.pamphlet
index bc2bfda..0b98323 100644
 a/src/interp/patches.lisp.pamphlet
+++ b/src/interp/patches.lisp.pamphlet
@@ 123,7 +123,7 @@ It used to read:
((eql (directoryp (interpmakedirectory (car args))) 1)
(setq $currentdirectory
(namestring (truename (interpmakedirectory (car args)))))))
#+(or :kcl :ibcl :CCL) (system:CHDIR $currentdirectory)
+#+(or :kcl :ibcl :CCL) (system::chdir $currentdirectory)
#+(and :lucid :ibm/370)
(setq *defaultpathnamedefaults* "")
#(and :lucid :ibm/370)
@@ 143,7 +143,7 @@ It used to read:
((eql (directoryp (interpmakedirectory (car args))) 1)
(setq $currentdirectory
(namestring (truename (interpmakedirectory (car args)))))))
#+(or :kcl :ibcl :CCL) (system:CHDIR $currentdirectory)
+#+(or :kcl :ibcl :CCL) (system::chdir $currentdirectory)
#+(and :lucid :ibm/370)
(setq *defaultpathnamedefaults* "")
#(and :lucid :ibm/370)
diff git a/src/interp/util.lisp.pamphlet b/src/interp/util.lisp.pamphlet
index 75071e1..08b5e86 100644
 a/src/interp/util.lisp.pamphlet
+++ b/src/interp/util.lisp.pamphlet
@@ 1145,7 +1145,7 @@ Run the etags command on all of the lisp code. Then run the
final TAGS file is constructed in the {\bf tmp} directory.
\begin{chunk}{maketagsfile}
(defun maketagsfile ()
#+:gcl (system:chdir "/tmp")
+#+:gcl (system::chdir "/tmp")
#:gcl (vmlisp::obey (concatenate 'string "cd " "/tmp"))
(obey (concat "etags " (makeabsolutefilename "../../src/interp/*.lisp")))
(spadtagsfromdirectory "../../src/interp" "boot")

1.7.5.4