From 8aee0ff8e7a2e5d27c5b48b54e27979a5d1651b0 Mon Sep 17 00:00:00 2001 From: Francesco Mazzoli Date: Wed, 16 Jan 2013 17:26:42 +0000 Subject: [PATCH] renaming, more stuff --- docs/{background.agda => InterimReport.agda} | 10 +- docs/{background.bib => InterimReport.bib} | 292 +++++++------- docs/{background.tex => InterimReport.tex} | 382 ++++++++++++++----- docs/Makefile | 4 +- docs/background-notes.org | 71 ---- 5 files changed, 451 insertions(+), 308 deletions(-) rename docs/{background.agda => InterimReport.agda} (92%) rename docs/{background.bib => InterimReport.bib} (95%) rename docs/{background.tex => InterimReport.tex} (77%) delete mode 100644 docs/background-notes.org diff --git a/docs/background.agda b/docs/InterimReport.agda similarity index 92% rename from docs/background.agda rename to docs/InterimReport.agda index 15e04d7..c433d59 100644 --- a/docs/background.agda +++ b/docs/InterimReport.agda @@ -1,4 +1,4 @@ -module background where +module InterimReport where import Level @@ -29,7 +29,7 @@ module Core where if true / _ then x else _ = x if false / _ then _ else x = x - if_then_else_ : forall {a} {P : Bool -> Set a} -> + if_then_else_ : forall {a} {P : Bool -> Set a} (x : Bool) -> P true -> P false -> P x if true then x else _ = x if false then _ else y = y @@ -123,14 +123,14 @@ module Ext where map _ nil = nil map f (x :: xs) = f x :: map f xs - cong : forall {A B} {x y : A} -> (f : A -> B) -> x == y -> f x == f y + cong : forall {A B} {x y : A} (f : A -> B) -> x == y -> f x == f y cong f refl = refl map-id==id : {A : Set} (xs : List A) -> map id xs == id xs map-id==id nil = refl map-id==id (x :: xs) = cong (_::_ x) (map-id==id xs) - _#_ : forall {A B C : Set} -> (B -> C) -> (A -> B) -> (A -> C) + _#_ : {A B C : Set} -> (B -> C) -> (A -> B) -> (A -> C) f # g = \ x -> f (g x) map-#==#-map : forall {A B C} (f : B -> C) (g : A -> B) (xs : List A) -> @@ -142,7 +142,7 @@ module Ext where postulate ext : forall {A B} {f g : A -> B} -> ((x : A) -> f x == g x) -> f == g - map-id==id' : {A : Set} -> map {A} id == id + map-id==id' : forall {A} -> map {A} id == id map-id==id' = ext map-id==id map-#==#-map' : forall {A B C} (f : B -> C) (g : A -> B) -> diff --git a/docs/background.bib b/docs/InterimReport.bib similarity index 95% rename from docs/background.bib rename to docs/InterimReport.bib index c1f8a16..eab760f 100644 --- a/docs/background.bib +++ b/docs/InterimReport.bib @@ -60,25 +60,14 @@ publisher = {Cambridge University Press} } - -@inbook{Constable86, -author = {Constable, Robert L. and Stuart F. Allen and H. M. Bromley and W. R. Cleaveland and J. F. Cremer and R. W. Harper and Douglas J. Howe and T. B. Knoblock and N. P. Mendler and P. Panangaden and James T. Sasaki and Scott F. Smith}, -title = {Implementing Mathematics with the NuPRL Proof Development System}, -year = {1986}, -publisher = {Prentice-Hall}, -address = {NJ} +@inbook{NuPRL, + author = {Robert L. Constable and the PRL Group}, + title = {Implementing Mathematics with The NuPRL Proof Development System}, + year = 1986, + publisher = Prentice-Hall } -@article{Altenkirch2010, -author = {Altenkirch, Thorsten and Danielsson, N and L\"{o}h, A and Oury, Nicolas}, -file = {:home/bitonic/docs/papers/PiSigma.pdf:pdf}, -journal = {Functional and Logic \ldots}, -number = {Sheard 2005}, -title = {{$\Pi$$\Sigma$: dependent types without the sugar}}, -url = {http://www.springerlink.com/index/91W712G2806R575H.pdf}, -year = {2010} -} @article{Altenkirch2007, address = {New York, New York, USA}, author = {Altenkirch, Thorsten and McBride, Conor and Swierstra, Wouter}, @@ -93,14 +82,6 @@ title = {{Observational equality, now!}}, url = {http://portal.acm.org/citation.cfm?doid=1292597.1292608}, year = {2007} } -@article{Barendregt1991, -author = {Barendregt, Henk}, -file = {:home/bitonic/docs/papers/lambda-cube.pdf:pdf}, -journal = {Journal of functional programming}, -title = {{Introduction to generalized type systems}}, -url = {http://www.diku.dk/hjemmesider/ansatte/henglein/papers/barendregt1991.pdf}, -year = {1991} -} @article{Bove2009, author = {Bove, Ana and Dybjer, Peter and Norell, Ulf}, file = {:home/bitonic/docs/papers/agda-overview.pdf:pdf}, @@ -109,74 +90,39 @@ title = {{A brief overview of Agda - a functional language with dependent types} url = {http://www.springerlink.com/index/h12lq70470983732.pdf}, year = {2009} } -@article{Brady2012, -author = {Brady, Edwin}, -file = {:home/bitonic/docs/papers/idris-implementation.pdf:pdf}, -journal = {Unpublished draft}, -number = {November}, -title = {{Implementing General Purpose Dependently Typed Programming Languages}}, -url = {http://www.cs.st-andrews.ac.uk/~eb/drafts/impldtp.pdf}, -year = {2012} +@article{Pierce2000, +author = {Pierce, Benjamin C. and Turner, David N.}, +doi = {10.1145/345099.345100}, +file = {:home/bitonic/docs/papers/local-type-inference.pdf:pdf}, +issn = {01640925}, +journal = {ACM Transactions on Programming Languages and Systems}, +month = jan, +number = {1}, +pages = {1--44}, +title = {{Local type inference}}, +url = {http://portal.acm.org/citation.cfm?doid=345099.345100}, +volume = {22}, +year = {2000} } -@article{Chapman2010, +@article{Yorgey2012, address = {New York, New York, USA}, -author = {Chapman, James and Dagand, Pierre-\'{E}variste and McBride, Conor and Morris, Peter}, -doi = {10.1145/1863543.1863547}, -file = {:home/bitonic/docs/papers/conor-levitation.pdf:pdf}, -isbn = {9781605587943}, -journal = {Proceedings of the 15th ACM SIGPLAN international conference on Functional programming - ICFP '10}, -pages = {3}, +author = {Yorgey, Brent a. and Weirich, Stephanie and Cretin, Julien and {Peyton Jones}, Simon and Vytiniotis, Dimitrios and Magalh\~{a}es, Jos\'{e} Pedro}, +doi = {10.1145/2103786.2103795}, +file = {:home/bitonic/docs/papers/haskell-promotion.pdf:pdf}, +isbn = {9781450311205}, +journal = {Proceedings of the 8th ACM SIGPLAN workshop on Types in language design and implementation - TLDI '12}, +pages = {53}, publisher = {ACM Press}, -title = {{The gentle art of levitation}}, -url = {http://portal.acm.org/citation.cfm?doid=1863543.1863547}, -year = {2010} -} -@article{Church1936, -author = {Church, Alonzo}, -file = {:home/bitonic/docs/papers/church-lc.pdf:pdf}, -journal = {American journal of mathematics}, -number = {2}, -pages = {345--363}, -title = {{An unsolvable problem of elementary number theory}}, -url = {http://www.ams.org/leavingmsn?url=http://dx.doi.org/10.2307/2371045}, -volume = {58}, -year = {1936} -} -@article{Church1940, -author = {Church, Alonzo}, -file = {:home/bitonic/docs/papers/church-stlc.pdf:pdf}, -journal = {J. Symb. Log.}, -number = {2}, -pages = {56--68}, -title = {{A formulation of the simple theory of types}}, -url = {http://www.ams.org/leavingmsn?url=http://dx.doi.org/10.2307/2266170}, -volume = {5}, -year = {1940} -} -@article{Coquand1986, -author = {Coquand, Thierry and Huet, Gerard}, -file = {:home/bitonic/docs/papers/coc.pdf:pdf}, -title = {{The calculus of constructions}}, -url = {http://hal.inria.fr/docs/00/07/60/24/PDF/RR-0530.pdf}, -year = {1986} -} -@article{Curry1934, -author = {Curry, Haskell B.}, -file = {:home/bitonic/docs/papers/curry-stlc.pdf:pdf}, -journal = {Proceedings of the National Academy of Sciences of the United States of America}, -number = {1930}, -pages = {584--590}, -title = {{Functionality in combinatory logic}}, -url = {http://www.ncbi.nlm.nih.gov/pmc/articles/pmc1076489/}, -volume = {511}, -year = {1934} +title = {{Giving Haskell a promotion}}, +url = {http://dl.acm.org/citation.cfm?doid=2103786.2103795}, +year = {2012} } -@article{Dybjer1991, -author = {Dybjer, Peter}, -file = {:home/bitonic/docs/papers/dybjer-inductive.ps:ps}, -journal = {Logical Frameworks}, -title = {{Inductive sets and families in Martin-L\"{o}f's type theory and their set-theoretic semantics}}, -url = {http://books.google.com/books?hl=en\&lr=\&id=X9wfWwslFQIC\&oi=fnd\&pg=PA280\&dq=Inductive+Sets+and+Families+in+Martin-L\%C3\%B6f\%27s+Type+Theory+and+Their+Set-Theoretic+Semantics\&ots=LewzM17GcW\&sig=vF4GgtlEBSf1uwRV1o\_unDtLats}, +@article{Barendregt1991, +author = {Barendregt, Henk}, +file = {:home/bitonic/docs/papers/lambda-cube.pdf:pdf}, +journal = {Journal of functional programming}, +title = {{Introduction to generalized type systems}}, +url = {http://www.diku.dk/hjemmesider/ansatte/henglein/papers/barendregt1991.pdf}, year = {1991} } @article{Hurkens1995, @@ -195,28 +141,6 @@ publisher = {Bibliopolis}, title = {{Intuitionistic type theory}}, year = {1984} } -@article{McBride2004, -author = {McBride, Conor}, -doi = {10.1017/S0956796803004829}, -file = {:home/bitonic/docs/papers/view-from-the-left.ps.gz:gz}, -journal = {Journal of Functional Programming}, -month = jan, -number = {1}, -pages = {69--111}, -title = {{The View from The Left}}, -url = {http://strictlypositive.org/view.ps.gz}, -volume = {14}, -year = {2004} -} -@phdthesis{Norell2007, -author = {Norell, Ulf}, -file = {:home/bitonic/docs/papers/ulf-thesis.pdf:pdf}, -isbn = {9789172919969}, -school = {Chalmers University of Technology and G\"{o}teborg University}, -title = {{Towards a practical programming language based on dependent type theory}}, -url = {http://www.cse.chalmers.se/~ulfn/papers/thesis.pdf}, -year = {2007} -} @article{Oury2008, address = {New York, New York, USA}, author = {Oury, Nicolas and Swierstra, Wouter}, @@ -230,27 +154,34 @@ title = {{The power of Pi}}, url = {http://portal.acm.org/citation.cfm?doid=1411204.1411213}, year = {2008} } -@article{Pierce2000, -author = {Pierce, Benjamin C. and Turner, David N.}, -doi = {10.1145/345099.345100}, -file = {:home/bitonic/docs/papers/local-type-inference.pdf:pdf}, -issn = {01640925}, -journal = {ACM Transactions on Programming Languages and Systems}, -month = jan, -number = {1}, -pages = {1--44}, -title = {{Local type inference}}, -url = {http://portal.acm.org/citation.cfm?doid=345099.345100}, -volume = {22}, -year = {2000} +@article{Chapman2010, +address = {New York, New York, USA}, +author = {Chapman, James and Dagand, Pierre-\'{E}variste and McBride, Conor and Morris, Peter}, +doi = {10.1145/1863543.1863547}, +file = {:home/bitonic/docs/papers/conor-levitation.pdf:pdf}, +isbn = {9781605587943}, +journal = {Proceedings of the 15th ACM SIGPLAN international conference on Functional programming - ICFP '10}, +pages = {3}, +publisher = {ACM Press}, +title = {{The gentle art of levitation}}, +url = {http://portal.acm.org/citation.cfm?doid=1863543.1863547}, +year = {2010} } -@article{Pollack1990, -author = {Pollack, Robert}, -file = {:home/bitonic/docs/papers/implicit-syntax.ps:ps}, -journal = {Informal Proceedings of First Workshop on Logical Frameworks}, -title = {{Implicit syntax}}, -url = {http://reference.kfupm.edu.sa/content/i/m/implicit\_syntax\_\_1183660.pdf}, -year = {1992} +@phdthesis{Norell2007, +author = {Norell, Ulf}, +file = {:home/bitonic/docs/papers/ulf-thesis.pdf:pdf}, +isbn = {9789172919969}, +school = {Chalmers University of Technology and G\"{o}teborg University}, +title = {{Towards a practical programming language based on dependent type theory}}, +url = {http://www.cse.chalmers.se/~ulfn/papers/thesis.pdf}, +year = {2007} +} +@article{Coquand1986, +author = {Coquand, Thierry and Huet, Gerard}, +file = {:home/bitonic/docs/papers/coc.pdf:pdf}, +title = {{The calculus of constructions}}, +url = {http://hal.inria.fr/docs/00/07/60/24/PDF/RR-0530.pdf}, +year = {1986} } @article{Reynolds1994, author = {Reynolds, John C.}, @@ -260,6 +191,51 @@ title = {{An introduction to the polymorphic lambda calculus}}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.7.9916\&rep=rep1\&type=pdf}, year = {1994} } +@article{Brady2012, +author = {Brady, Edwin}, +file = {:home/bitonic/docs/papers/idris-implementation.pdf:pdf}, +journal = {Unpublished draft}, +number = {November}, +title = {{Implementing General Purpose Dependently Typed Programming Languages}}, +url = {http://www.cs.st-andrews.ac.uk/~eb/drafts/impldtp.pdf}, +year = {2012} +} +@article{Church1936, +author = {Church, Alonzo}, +file = {:home/bitonic/docs/papers/church-lc.pdf:pdf}, +journal = {American journal of mathematics}, +number = {2}, +pages = {345--363}, +title = {{An unsolvable problem of elementary number theory}}, +url = {http://www.ams.org/leavingmsn?url=http://dx.doi.org/10.2307/2371045}, +volume = {58}, +year = {1936} +} +@article{Altenkirch2010, +author = {Altenkirch, Thorsten and Danielsson, N and L\"{o}h, A and Oury, Nicolas}, +file = {:home/bitonic/docs/papers/PiSigma.pdf:pdf}, +journal = {Functional and Logic \ldots}, +number = {Sheard 2005}, +title = {{$\Pi$$\Sigma$: dependent types without the sugar}}, +url = {http://www.springerlink.com/index/91W712G2806R575H.pdf}, +year = {2010} +} +@article{Pollack1990, +author = {Pollack, Robert}, +file = {:home/bitonic/docs/papers/implicit-syntax.ps:ps}, +journal = {Informal Proceedings of First Workshop on Logical Frameworks}, +title = {{Implicit syntax}}, +url = {http://reference.kfupm.edu.sa/content/i/m/implicit\_syntax\_\_1183660.pdf}, +year = {1992} +} +@article{Dybjer1991, +author = {Dybjer, Peter}, +file = {:home/bitonic/docs/papers/dybjer-inductive.ps:ps}, +journal = {Logical Frameworks}, +title = {{Inductive sets and families in Martin-L\"{o}f's type theory and their set-theoretic semantics}}, +url = {http://books.google.com/books?hl=en\&lr=\&id=X9wfWwslFQIC\&oi=fnd\&pg=PA280\&dq=Inductive+Sets+and+Families+in+Martin-L\%C3\%B6f\%27s+Type+Theory+and+Their+Set-Theoretic+Semantics\&ots=LewzM17GcW\&sig=vF4GgtlEBSf1uwRV1o\_unDtLats}, +year = {1991} +} @article{Sulzmann2007, address = {New York, New York, USA}, author = {Sulzmann, Martin and Chakravarty, Manuel M. T. and Jones, Simon Peyton and Donnelly, Kevin}, @@ -273,6 +249,41 @@ title = {{System F with type equality coercions}}, url = {http://portal.acm.org/citation.cfm?doid=1190315.1190324}, year = {2007} } +@article{Curry1934, +author = {Curry, Haskell B.}, +file = {:home/bitonic/docs/papers/curry-stlc.pdf:pdf}, +journal = {Proceedings of the National Academy of Sciences of the United States of America}, +number = {1930}, +pages = {584--590}, +title = {{Functionality in combinatory logic}}, +url = {http://www.ncbi.nlm.nih.gov/pmc/articles/pmc1076489/}, +volume = {511}, +year = {1934} +} +@article{Church1940, +author = {Church, Alonzo}, +file = {:home/bitonic/docs/papers/church-stlc.pdf:pdf}, +journal = {J. Symb. Log.}, +number = {2}, +pages = {56--68}, +title = {{A formulation of the simple theory of types}}, +url = {http://www.ams.org/leavingmsn?url=http://dx.doi.org/10.2307/2266170}, +volume = {5}, +year = {1940} +} +@article{McBride2004, +author = {McBride, Conor}, +doi = {10.1017/S0956796803004829}, +file = {:home/bitonic/docs/papers/view-from-the-left.ps.gz:gz}, +journal = {Journal of Functional Programming}, +month = jan, +number = {1}, +pages = {69--111}, +title = {{The View from The Left}}, +url = {http://strictlypositive.org/view.ps.gz}, +volume = {14}, +year = {2004} +} @article{Vytiniotis2011, author = {Vytiniotis, Dimitrios and Jones, Simon Peyton and Schrijvers, Tom and Sulzmann, Martin}, file = {:home/bitonic/docs/papers/outsidein.pdf:pdf}, @@ -284,16 +295,13 @@ url = {http://journals.cambridge.org/production/action/cjoGetFulltext?fulltextid volume = {21}, year = {2011} } -@article{Yorgey2012, -address = {New York, New York, USA}, -author = {Yorgey, Brent a. and Weirich, Stephanie and Cretin, Julien and {Peyton Jones}, Simon and Vytiniotis, Dimitrios and Magalh\~{a}es, Jos\'{e} Pedro}, -doi = {10.1145/2103786.2103795}, -file = {:home/bitonic/docs/papers/haskell-promotion.pdf:pdf}, -isbn = {9781450311205}, -journal = {Proceedings of the 8th ACM SIGPLAN workshop on Types in language design and implementation - TLDI '12}, -pages = {53}, -publisher = {ACM Press}, -title = {{Giving Haskell a promotion}}, -url = {http://dl.acm.org/citation.cfm?doid=2103786.2103795}, -year = {2012} +@article{Jacobs1997, +author = {Jacobs, Bart and Rutten, Jan}, +file = {:home/bitonic/docs/papers/coalgebra-coind.pdf:pdf}, +journal = {EATCS Bulletin}, +number = {1997}, +title = {{A tutorial on (co) algebras and (co) induction}}, +url = {http://synrc.com/publications/cat/Logic/CoinductionCoalgebrasTutorial.pdf}, +volume = {62}, +year = {1997} } diff --git a/docs/background.tex b/docs/InterimReport.tex similarity index 77% rename from docs/background.tex rename to docs/InterimReport.tex index 4d21c16..85eedd1 100644 --- a/docs/background.tex +++ b/docs/InterimReport.tex @@ -35,6 +35,7 @@ \usepackage{wasysym} \usepackage{turnstile} \usepackage{centernot} +\usepackage{stmaryrd} %% ----------------------------------------------------------------------------- %% Utils @@ -81,7 +82,7 @@ breaklinks=true, bookmarks=true, pdfauthor={Francesco Mazzoli }, - pdftitle={Observational Equality}, + pdftitle={Observational Equality - Interim Report}, colorlinks=false, pdfborder={0 0 0} } @@ -92,7 +93,7 @@ % avoid problems with \sout in headers with hyperref: \pdfstringdefDisableCommands{\renewcommand{\sout}{}} -\title{Observational Equality} +\title{Observational Equality - Interim Report} \author{Francesco Mazzoli \href{mailto:fm2209@ic.ac.uk}{\nolinkurl{}}} \date{December 2012} @@ -157,13 +158,15 @@ the rules are preceded by what the rule looks like and what it shows (for example \axdesc{typing}{\Gamma \vdash \termsyn : \tysyn}). In the languages presented I will also use different fonts for different things: -\begin{tabular}{c | l} - $\lccon{Sans}$ & Sans serif, capitalised, for type constructors. \\ - $\lccon{sans}$ & Sans serif, not capitalised, for data constructors. \\ - $\lcsyn{roman}$ & Roman, underlined, for the syntax of the language. \\ - $\lcfun{roman}$ & Roman, bold, for defined functions and values. \\ - $math$ & Math mode font for quantified variables and syntax elements. -\end{tabular} +\begin{center} + \begin{tabular}{c | l} + $\lccon{Sans}$ & Sans serif, capitalised, for type constructors. \\ + $\lccon{sans}$ & Sans serif, not capitalised, for data constructors. \\ + $\lcsyn{roman}$ & Roman, underlined, for the syntax of the language. \\ + $\lcfun{roman}$ & Roman, bold, for defined functions and values. \\ + $math$ & Math mode font for quantified variables and syntax elements. + \end{tabular} +\end{center} Moreover, I will from time to time give examples in the Haskell programming language as defined in \citep{Haskell2010}, which I will typeset in @@ -227,6 +230,7 @@ are said to be \emph{normalising}, and the `final' term (where no reductions are possible on the term or on its subterms) is called \emph{normal form}. \subsection{The simply typed $\lambda$-calculus} +\label{sec:stlc} \newcommand{\tya}{A} \newcommand{\tyb}{B} @@ -290,10 +294,11 @@ This typing system takes the name of `simply typed lambda calculus' (STLC), and enjoys a number of properties. Two of them are expected in most type systems \citep{Pierce2002}: \begin{description} -\item[Progress] A well-typed term is not stuck - either it is a value or it can - take a step according to the evaluation rules. With `value' we mean a term - whose subterms (including itself) don't appear to the left of the $\bred$ - relation. +\item[Progress] A well-typed term is not stuck - either it is a \emph{canonical} + value or it can take a step according to the evaluation rules. With canonical + value we indicate terms formed by canonical constructors, in this case only + $\lambda$\footnote{See section \ref{sec:fun-ext} for more information on + canonicity.}. \item[Preservation] If a well-typed term takes a step of evaluation, then the resulting term is also well typed. \end{description} @@ -321,8 +326,8 @@ $$ $$ \lcfix{x : \tya}{\termt} \bred \termt[\lcfix{x : \tya}{\termt}] $$ -Which will deprive us of normalisation. There is however a price to pay, which -will be made clear in the next section. +Which will deprive us of normalisation, which is a particularly bad thing if we +want to use the STLC as described in the next section. \subsection{The Curry-Howard correspondence} \label{sec:curry-howard} @@ -683,9 +688,7 @@ hierarchy are called `universes'. Theories where $\lcsetz : \lcsetz$ are inconsistent due to Girard's paradox \citep{Hurkens1995}, and thus lose their well-behavedness. Some impredicativity sometimes has its place, either because the theory retain good properties (normalization, consistency, etc.) anyway, -like in System F and CoC; or because we are at a stage at which we do not care - -we will see instances in section \ref{foo} -% TODO put citation here +like in System F and CoC; or because we are at a stage at which we do not care. Note that the Curry-Howard correspondence runs through ITT as it did with the STLC with the difference that ITT corresponds to an higher order propositional @@ -798,12 +801,10 @@ The introduction and elimination for $\top$ and $\lcbool$ are unsurprising. Note that in the $\lcite{\dotsb}{\dotsb}{\dotsb}$ construct the type of the branches are dependent on the value of the conditional. -% TODO: explain better here - The rules for $\lccon{W}$, on the other hand, are quite an eyesore. The idea behind $\lccon{W}$ types is to build up `trees' where the number of `children' -of each node is dependent on the value in the node. This is captured by the -$\lhd$ constructor, where the argument on the left is the value, and the +of each node is dependent on the value (`shape') in the node. This is captured +by the $\lhd$ constructor, where the argument on the left is the value, and the argument on the right is a function that returns a child for each possible value of $\tyb[\text{node value}]$, if $\lcw{x}{\tya}{\tyb}$. The recursor $\lcrec{\termt}{x}{\tyc}{\termp}$ uses $p$ to inductively prove that @@ -909,7 +910,7 @@ proof of equality will be a $\lccon{refl}$. We can use $\neg These elements conclude our presentation of a `core' type theory. For an extended example of a similar theory in use see Section 6.2 of -\cite{Thompson1991}\footnote{Note that while I attempted to formalise the proof +\citep{Thompson1991}\footnote{Note that while I attempted to formalise the proof in Agda, I found a bug in the book! See the errata for details: \url{http://www.cs.kent.ac.uk/people/staff/sjt/TTFP/errata.html}.}. The above language and examples have been codified in Agda\footnote{More on Agda in the @@ -919,10 +920,10 @@ language and examples have been codified in Agda\footnote{More on Agda in the \label{sec:practical} While our core type theory equipped with $\lccon{W}$ types is very usefully -conceptually as a simple but complete language, things get messy very fast. In -this section I will present the elements that are usually included in theorem -provers or programming languages to make them usable by mathematicians or -programmers. +conceptually as a simple but complete language, things get messy very fast, +since handling $\lccon{W}$ types directly is incredibly cumbersome. In this +section I will present the elements that are usually included in theorem provers +or programming languages to make them usable by mathematicians or programmers. All the features presented are present in the second version of the Agda system \citep{Norell2007, Bove2009}. Agda follows a tradition of theorem provers based @@ -937,22 +938,30 @@ programming languages, while still offering good tools to express mathematics\footnote{In fact, currently, Agda is used almost exclusively to express mathematics, rather than to program.}. This is reflected in a series of differences that I will not discuss here (most notably the absence of tactics -but better support for pattern matching in Agda/Epigram). I will take the same -approach as Agda/Epigram and will give a perspective focused on functional -programming rather than theorem proving. Every feature will be presented as it -is Agda. +but better support for pattern matching in Agda/Epigram). Every feature will be +presented as it is Agda. -\subsection{Bidirectional type checking} +As previously, all the examples presented have been codified in Agda, see +appendix \ref{app:agda-code}. -Lastly, the theory I present is fully explicit in the sense that the user has to -specify every type when forming abstractions, products, etc. This can be a -great burden if one wants to use the theory directly. Complete inference is -undecidable (which is hardly surprising considering the role that types play) -but partial inference (also called `bidirectional type checking' in this -context) in the style of \cite{Pierce2000} will have to be deployed in a -practical system. +\subsection{Type inference} + +The theory I presented is fully explicit in the sense that the user has to +specify every type when forming abstractions, products, etc. + +For the programmer used to Hindley-Milner as in Haskell and SML (and for any +human being), this is a great burden. Complete inference is undecidable - which +is hardly surprising considering the role that types play - but partial +inference in the style of \cite{Pierce2000}, also called `bidirectional type +checking' in this context, will have to be deployed in a practical system. + +Agda gives users an explicit way to indicate which fields should be implicit, by +wrapping them in curly braces in type signatures: $\{A : \lcsetz\} \tyarr +\dotsb$. It also allows to omit types of arguments altimeter, if they can be +inferred by other arguments: $\{A\} \tyarr (x : A) \tyarr \dotsb$. \subsection{Inductive families} +\label{sec:inductive-families} \newcommand{\lcdata}[1]{\lcsyn{data}\appsp #1} \newcommand{\lcdb}{\ |\ } @@ -966,14 +975,16 @@ Inductive families were first introduced by \cite{Dybjer1991}. For the reader familiar with the recent developments present in the GHC compiler for Haskell, inductive families will look similar to GADTs (Generalised Abstract Data Types) \citep[Section 7.4.7]{GHC}. -Haskell style data types provide \emph{parametric polymorphism}, so that we can +Haskell-style data types provide \emph{parametric polymorphism}, so that we can define types that range over type parameters: \begin{lstlisting} List a = Nil | Cons a (List a) \end{lstlisting} In this way we define the \texttt{List} type once while allowing elements to be of any type. In Haskell \texttt{List} will be a type constructor of kind -\texttt{* -> *}, while \texttt{Nil :: List a} and \texttt{Cons :: a -> List a -> List a}\footnote{Note that the \texttt{a}s are implicitly quantified type variables}. +\texttt{* -> *}, while \texttt{Nil :: List a} and \texttt{Cons :: a -> List a -> + List a}\footnote{Note that the \texttt{a}s are implicitly quantified type + variables.}. Inductive families bring this concept one step further by allowing some of the parameters to be constrained by constructors. We call these `variable' @@ -1008,14 +1019,9 @@ constructors, while indices' won't. In this $\lccon{Vec}$ example, when we form a new list the length is $\lccon{zero}$. When we append a new element to an existing list of length $n$, the new list is of length $\app{\lccon{suc}}{n}$, that is, one more than the -previous length. Also note that we are using the $\{n : \lcnat\} \tyarr \dotsb$ -syntax to indicate an argument that we will omit when using $\_::\_$. In the -future I shall also omit the type of these implicit parameters, in line with how -Agda works. - -Once we have $\lccon{Vec}$ we can do things much more safely than with normal -lists. For example, we can define an $\lcfun{head}$ function that returns the -first element of the list: +previous length. Once $\lccon{Vec}$ is defined we can do things much more +safely than with normal lists. For example, we can define an $\lcfun{head}$ +function that returns the first element of the list: \[ \begin{array}{l} \lcfun{head} : \{A\ n\} \tyarr \lcvec{A}{(\app{\lccon{suc}}{n})} \tyarr A @@ -1088,26 +1094,32 @@ This expresses the fact that the resulting type can be dependent on the number. In other words, we are proving that $P$ holds for all $n : \lcnat$. Naturally a reduction rule will be associated with each eliminator: -$$ +\[ \begin{array}{l c l} \app{\app{\app{\app{\lcfun{NatInd}}{P}}{z}}{f}}{\lccon{zero}} & \bred & z \\ \app{\app{\app{\app{\lcfun{NatInd}}{P}}{z}}{f}}{(\app{\lccon{suc}}{n})} & \bred & \app{\app{f}{n}}{(\app{\app{\app{\app{\lcfun{NatInd}}{P}}{z}}{f}}{n})} \end{array} -$$ +\] Which echoes the \texttt{natInd} function defined in Haskell. An extensive -account on combinators and inductive families can be found in \cite{McBride2004}. +account on combinators and inductive families can be found in \citep{McBride2004}. \subsubsection{Pattern matching and guarded recursion} However, combinators are far more cumbersome to use than the techniques usually employed in functional programming: pattern matching and recursion. -\emph{General} recursion cannot be added if we want to keep our theory free of -$\bot$. The common solution to this problem is to allow recursive calls only if -the arguments are structurally smaller than what the function received. Pattern -matching on the other hand gains considerable power with inductive families, -since when we match a constructor we are gaining information on the indices of -the family. Thus matching constructors will enable us to restrict patterns of -other arguments. +\emph{General} recursion (exemplified by the $\lcsyn{fix}$ combinator in section +\ref{sec:stlc}) cannot be added if we want to keep our theory free of $\bot$. +The common solution to this problem is to allow recursive calls only if the +arguments are structurally smaller than what the function received, what is +known as \emph{structural} recursion. For example, if we have a $\lccon{Tree}$ +family with a $\lccon{node}\appsp l \appsp r$ (and maybe others) constructor, +functions that work on $\lccon{Tree}$ will be able to make recursive calls on +$l$ and $r$. + +Pattern matching on the other hand gains considerable power with inductive +families, since when we match a constructor we are gaining information on the +indices of the family. Thus matching constructors will enable us to restrict +patterns of other arguments. Following this discipline defining $\lcfun{head}$ becomes easy: \[ @@ -1152,8 +1164,8 @@ typed, family. One option is to have a \emph{cumulative} theory, where $\lcset{n} : \lcset{m}$ iff $n < m$. Then we can have a sufficiently large level in our type signature and forget about it. Moreover, levels in this setting can be inferred -mechanically \citep{Pollack1990}, and thus we might lift the burden of universes -from the user. This is the approach taken by Epigram. +mechanically \citep{Pollack1990}, and thus we can lift the burden of specifying +universes from the user. This is the approach taken by Epigram. Another more expressive (but currently more cumbersome) way is to expose universes more, giving the user a way to quantify them and to take the least @@ -1168,8 +1180,10 @@ example we can define a level-polymorphic $\times$: Levels can be made implicit as shown and can be almost always inferred. However, having to decorate each type signature with quantified levels adds quite a lot of noise. An inference algorithm that automatically quantifies and -instantiates levels (much like Hindley-Milner for types) seems feasible, but -currently not implemented anywhere. +instantiates levels (much like Hindley-Milner for types) seems feasible, but is +currently not implemented anywhere. The ideal situation would be polymorphic, +cumulative levels, with an easy way to omit treatment of levels unless in the +(possibly few) cases where the inference algorithm breaks down. \subsection{Coinduction} @@ -1185,7 +1199,7 @@ fibs = 0 : 1 : zipWith (+) fibs (tail fibs) While we can clearly write useful programs of this kind, we need to be careful, since \texttt{length fibs}, for example, does not make much sense\footnote{Note that if instead of machine \texttt{Int}s we used naturals as defined - previously, getting the length of an infinite list would a productive + previously, getting the length of an infinite list would be a productive definition.}. In less informal terms, we need to distinguish between \emph{productive} and @@ -1195,12 +1209,15 @@ not, for example, for \texttt{let x = x in x :: [Int]}. It is very desirable to recover \emph{only} the productive definition so that total programs working with infinite data can be written. -This desire has lead to work on coindunction -% TODO finish +This desire has lead to separate the notion of (finite) data and \emph{codata}, +which can be worked on by \emph{coinduction} - an overview is given in +\citep{Jacobs1997}. Research is very active on this subject since coinduction +as implemented by Coq and Agda is not satisfactory in different ways. \section{Many equalities} \subsection{Revision, and function extensionality} +\label{sec:fun-ext} \epigraph{\emph{Half of my time spent doing research involves thinking up clever schemes to avoid needing functional extensionality.}}{@larrytheliquid} @@ -1235,11 +1252,11 @@ process goes on `under the hood' and is outside the control of the user. Propositional equality, on the other hand, is available to the user to reason about equality, internalising it as a type. As we have seen in section \ref{sec:propeq} propositional equality is introduced by reflexivity and -eliminated with a `Leibnitz's law' style rule ($\lcfun{subst}$). Now that we -have inductive families and dependent pattern matching we do not need hard coded -rules to express this concepts\footnote{Here I use Agda notation, and thus I - cannot redefine $=$ and use subscripts, so I am forced to use $\equiv$ with - implicit types. After I will carry on using the old notation.}: +eliminated with a `Leibnitz's law' style rule ($\lcfun{subst}$). Note that now +that we have inductive families and dependent pattern matching we do not need +hard coded rules to express this concepts\footnote{Here I use Agda notation, and + thus I cannot redefine $=$ and use subscripts, so I am forced to use $\equiv$ + with implicit types. After I will carry on using the old notation.}: \[ \begin{array}{l} \lcdata{\lccon{\_\equiv\_}} : \{A : \lcsetz\} : A \tyarr A \tyarr \lcsetz \lcwhere \\ @@ -1252,7 +1269,19 @@ rules to express this concepts\footnote{Here I use Agda notation, and thus I \end{array} \] Here matching $\lccon{refl}$ tells the type checker that $t \defeq m$, and thus -$\app{B}{t} \defeq \app{B}{m}$, so we can just return $n$. +$\app{B}{t} \defeq \app{B}{m}$, so we can just return $n$. This shows the +connection between type families indices and propositional equality, also +highlighted in \citep{McBride2004}. + +It is worth noting that all $\lcfun{subst}$s, in ITT, are guaranteed to reduce +at the top level. This is because $\lccon{refl}$ is the only constructor for +propositional equality, and thus without false assumptions every top level proof +will have that shape. Extending this idea to other types, in ITT, at the top +level, expressions have \emph{canonical} values - a property known as +\emph{canonicity}. We call canonical those values formed by constructors: +$\lambda$, $(,)$, $\lhd$, etc. In other words a value is canonical if it's not +something that is supposed to reduced (an eliminator) but is stuck on some +variable. While propositional equality is a very useful construct, we can prove less terms equal than we would like to. For example, if we have the usual functions @@ -1293,9 +1322,9 @@ what is known as the `equality reflection' rule: \UnaryInfC{$\Gamma \vdash t \defeq m$} \end{prooftree} \end{center} -This jump from types to a metatheoretic relation has deep consequences. But -firstly, let's get extensionality out of the way. Given $\Gamma = \lcfun{eq} : -(x : A) \tyarr \app{f}{x} = \app{g}{x}$, we have: +This jump from types to a metatheoretic relation has deep consequences. +Firstly, let's get extensionality out of the way. Given $\Gamma = \Gamma'; +\lcfun{eq} : (x : A) \tyarr \app{f}{x} = \app{g}{x}$, we have: \begin{center} \begin{prooftree} \AxiomC{$\Gamma; x : A \vdash \app{\lcfun{eq}}{x} : \app{f}{x} = \app{g}{x}$} @@ -1311,7 +1340,7 @@ firstly, let's get extensionality out of the way. Given $\Gamma = \lcfun{eq} : \end{center} Since the above is possible, theories that include the equality reflection rule are often called `Extensional Type Theories', or ETTs. A notable exponent of -this discipline is the NuPRL system \citep{Constable86}. Moreover, equality +this discipline is the NuPRL system \citep{NuPRL}. Moreover, equality reflection simplifies $\lcfun{subst}$-like operations, since if we have $t = m$ and $\app{A}{t}$, then by equality reflection clearly $\app{A}{t} \defeq \app{A}{m}$. @@ -1329,15 +1358,36 @@ relying on terms only. \subsection{Observational equality} -A recent development by \cite{Altenkirch2007} promises to keep the well +\newcommand{\lcprop}{\lccon{Prop}} +\newcommand{\lcdec}[1]{\llbracket #1 \rrbracket} +\newcommand{\lcpropsyn}{\mathit{prop}} +\newcommand{\lcpropf}[3]{\forall #1 : #2.\appsp #3} +\newcommand{\lcparr}{\Rightarrow} + +A recent development by \citet{Altenkirch2007} promises to keep the well behavedness of ITT while being able to gain many useful equality proofs, -including function extensionality. Starting from a theory similar to the one -presented in section \ref{sec:itt} but with only $\lcset{0}$, a propositional -subuniverse of $\lcsetz$ is introduced, plus a `decoding' function: +including function extensionality. The main idea is to give the user the +possibility to \emph{coerce} (or transport) values from a type $A$ to a type +$B$, if the type checker can prove structurally that $A$ and $B$ are equal. -\newcommand{\lcprop}{\lccon{Prop}} +This said, starting from a theory similar to the one presented in section +\ref{sec:itt} but with only $\lcset{0}$ and without propositional equality, a +propositional subuniverse of $\lcsetz$ is introduced, plus a `decoding' function +$\lcdec{\_}$: \begin{center} + \axname{syntax} + $$ + \begin{array}{rcl} + \termsyn & ::= & \dotsb \separ \lcprop \separ \lcdec{\lcpropsyn} \\ + \lcpropsyn & ::= & \bot \separ \top \separ \lcpropsyn \wedge \lcpropsyn \separ \lcpropf{x}{\termsyn}{\lcpropsyn} + \end{array} + $$ + + \axname{typing} + + \vspace{0.5cm} + \begin{tabular}{c c c} \AxiomC{\phantom{1em}} \UnaryInfC{$\Gamma \vdash \lcprop : \lcsetz$} @@ -1361,21 +1411,177 @@ subuniverse of $\lcsetz$ is introduced, plus a `decoding' function: \vspace{0.5cm} - \begin{tabular}{c} + \begin{tabular}{c c} \AxiomC{$\Gamma \vdash S : \lcsetz$} - \AxiomC{$\Gamma \vdash q : \lcprop$} - \BinaryInfC{$\Gamma \vdash p \wedge q : \lcprop$} - \noLine - \UnaryInfC{\phantom{1em}} + \AxiomC{$\Gamma \vdash Q : \lcprop$} + \BinaryInfC{$\Gamma \vdash \lcpropf{x}{S}{Q} : \lcprop$} + \DisplayProof + & + \AxiomC{$\Gamma \vdash P : \lcprop$} + \UnaryInfC{$\Gamma \vdash \lcdec{P} : \lcsetz$} \DisplayProof \end{tabular} + + \vspace{0.5cm} + + \axdesc{reduction}{\termsyn \bred \termsyn} + \begin{eqnarray*} + \lcdec{\bot} & \bred & \bot \\ + \lcdec{\top} & \bred & \top \\ + \lcdec{P \wedge Q} & \bred & \lcdec{P} \times \lcdec{Q} \\ + \lcdec{\lcpropf{x}{S}{P}} & \bred & (x : S) \tyarr \lcdec{P} + \end{eqnarray*} \end{center} +I will use $P \lcparr Q$ as an abbreviation for $\lcpropf{\_}{P}{Q}$. Note that +$\lcprop$ has no `data', but only proofs. This has the consequence that code +using proofs, when compiled, will be able to safely erase every $\lcprop$, as +long as it doesn't compute under binder - and we most likely don't need to +compute under binders after we type checked and we just need to run the code. +Moreover, we can extend $\lcprop$ with other axioms while retaining canonicity, +since + +\newcommand{\lccoe}[4]{\lcfun{coe}\appsp#1\appsp#2\appsp#3\appsp#4} +\newcommand{\lccoh}[4]{\lcfun{coh}\appsp#1\appsp#2\appsp#3\appsp#4} + +Once we have $\lcprop$, we can define the operations that will let us form +equalities and coerce between types and values: +\begin{center} + \axname{typing} + + \vspace{0.5cm} + \begin{tabular}{c c} + \AxiomC{$\Gamma \vdash S : \lcsetz$} + \AxiomC{$\Gamma \vdash T : \lcsetz$} + \BinaryInfC{$\Gamma \vdash S = T : \lcprop$} + \DisplayProof + & + \AxiomC{$\Gamma \vdash Q : \lcdec{S = T}$} + \AxiomC{$\Gamma \vdash s : S$} + \BinaryInfC{$\Gamma \vdash \lccoe{S}{T}{Q}{s} : T$} + \DisplayProof + \end{tabular} + + \vspace{0.5cm} + + \begin{tabular}{c c} + \AxiomC{$\Gamma \vdash s : S$} + \AxiomC{$\Gamma \vdash t : T$} + \BinaryInfC{$\Gamma \vdash (s : S) = (t : T) : \lcprop$} + \DisplayProof + & + \AxiomC{$\Gamma \vdash Q : \lcdec{S = T}$} + \AxiomC{$\Gamma \vdash s : S$} + \BinaryInfC{$\Gamma \vdash \lccoh{S}{T}{Q}{s} : \lcdec{(s : S) = (\lccoe{S}{T}{Q}{s})}$} + \DisplayProof + \end{tabular} +\end{center} + +In the first row, $=$ forms equality between types, and $\lcfun{coe}$ (`coerce') +transports values of equal types. On the second row, $=$ forms equality between +values, and $\lcfun{coh}$ (`coherence') guarantees that all equalities are +really between equal things. Now the tricky part is to define reduction rules +to reduce the proofs of equality to something $\lcdec{\_}$ can reduce, so that +proof of equality will exist only between equal things, and that $\lcfun{coe}$ +will compute only when those proofs are derivable. + +Let's start with type-level $=$: +$$ +\begin{array}{r@{\ } c@{\ } l c l} + \bot & = & \bot & \bred & \top \\ + \top & = & \top & \bred & \top \\ + \lcbool & = & \lcbool & \bred & \top \\ + (s : S) \times T & = & (s' : S') \times T' & \bred & S = S' \wedge \lcpropf{s}{S}{\lcpropf{s'}{S'}{(s : S) = (s' : S') \lcparr T[x] = T'[x']}} \\ + (s : S) \tyarr T & = & (s' : S') \tyarr T' & \bred & S' = S \wedge \lcpropf{s'}{S'}{\lcpropf{s}{S}{(s' : S') = (s : S) \lcparr T[x] = T'[x']}} \\ + \lcw{s}{S}{T} & = & \lcw{s'}{S'}{T'} & \bred & S = S' \wedge \lcpropf{s}{S}{\lcpropf{s'}{S'}{(s : S) = (s' : S') \lcparr T'[x'] = T[x]}} \\ + S & = & T & \bred & \bot\ \text{for every other canonical sets $S$ and $T$} +\end{array} +$$ +The rule for $\times$ is unsurprising: it requires the left types to be equal, +and the right types to be equal when the left values are equal. The rules for +$\tyarr$ and $\lccon{W}$ are similar but with some twists to make the rules for +$\lcfun{coe}$ simpler: +$$ +\begin{array}{r@{} l@{} l@{} l c l} + \lccoe{&\bot}{&\bot}{&Q}{z} & \bred & z \\ + \lccoe{&\top}{&\top}{&Q}{u} & \bred & u \\ + \lccoe{&\lcbool}{&\lcbool}{&Q}{b} & \bred & b \\ + \lccoe{&((x : S) \times T)}{&((x' : S') \times T')}{&Q}{(s, t)} & \bred & \\ + \multicolumn{6}{l}{ + \lcind + \begin{array}{l@{\ } l@{\ } c@{\ } l@{\ }} + \lcsyn{let} & Q_S & \mapsto & \lcfst Q : \lcdec{S = S'} \\ + & s' & \mapsto & \lccoe{S}{S'}{Q}{s} : S' \\ + & Q_T & \mapsto & \lcsnd Q \appsp s \appsp s' \appsp (\lccoh{S}{S'}{Q_S}{s}) : \lcdec{T[s] = T[s']} \\ + & t' & \mapsto & \lccoe{T[t]}{T'[s']}{Q_T}{t} : T'[s'] \\ + \multicolumn{4}{l}{\lcsyn{in}\ (s', t')} + \end{array} + }\\ + \lccoe{&((x : S) \tyarr T)}{&((x' : S') \tyarr T')}{&Q}{f} & \bred & \dotsb \\ + \lccoe{&(\lcw{x}{S}{T})}{&(\lcw{x'}{S'}{T'})}{&Q}{(s \lhd f)} & \bred & \dotsb \\ + \lccoe{&S}{&T}{&Q}{x} & \bred & \lcabsurdd{T}{Q} +\end{array} +$$ +The rule for $\times$ is hairy but straightforward: we are given a $(s, t) : (x +: S) \times T$ and, given the reduction rules specified before, a $$Q : \lcdec{S + = S'} \times ((s : S) \tyarr (s' : S') \tyarr \lcdec{(s : S) = (s' : S')} +\tyarr \lcdec{T[x] = T'[x']})$$ We need to obtain a $(x' : S') \times T'$. We +can easily get the left part with the left of $Q$ and a coercion, and the right +with the help of $\lcfun{coh}$ and the right of $Q$. The rules for the other +binders are similar but not reproduced here for brevity. The reader can refer +to the paper for more details. + +Now we are left + +% TODO put the coind paper \section{What to do} +My goal is to advance the practice of OTT. Conor McBride and other +collaborators already implemented OTT and other measures as part of efforts +towards a new version of Epigram\footnote{Available at + \url{http://www.e-pig.org/darcs/Pig09/web/}.}. However the development is +stale right now and it is not clear when and if Epigram 2 will be released. + +The first thing that would be very useful to do is to have a small, core +language that supports OTT, on which a more expressive language can be built. +This follows an established tradition in functional programming and theorem +proving of layering several languages of increasing complexity, so that type +checking at a lower level is much simpler and those more prone to bugs. For +example GHC Haskell uses an internal language, System F\textsubscript{C} +\citep{Sulzmann2007}, which includes only minimal features compared to Haskell. + +I have already implemented a type theory as described in section +\ref{sec:itt}\footnote{Available at \url{https://github.com/bitonic/mfixed}.} to +make myself comfortable with how such systems work. From that, a good starting +point to build something more useful could be $\Pi\Sigma$ +\citep{Altenkirch2007}, a core, partial language, thought to be a good target +for high level languages like Agda, with facilities to implement inductive +families and corecursion. Starting to think about how OTT would work in such a +language is my immediate goal. + +If these attempts are successful, I can work towards understanding what a higher +level language would look like and how it would be `elaborated' to the lower +level core theory. Epigram 2 can certainly be an inspiration, although it +employs a sophisticated reflection system \citep{Chapman2010} that I do not plan +to implement. In any case, there are many things to do, with the most salients +point being how to treat inductive families and pattern matching, corecursion, +and what a friendly interface for OTT would be from the +programmer/mathematician's perspective (or maybe whether it's feasible to always +infer coercions and/or equality proofs automatically). + +Interestingly, the mentioned System F\textsubscript{C} was introduced in GHC to +be able to implement GADTs, which as said in section +\ref{sec:inductive-families} bear many similarities to inductive families. To +do that it uses a system of coercions that is not too far away from OTT +coercions. This is not a coincidence, since indices and propositional equality +are often connected, and offers a lot of food for thought. For instance, GHC +Haskell automatically generates and applies equality proofs, and the inference +engine that serves this purpose is quite complex \citep{Vytiniotis2011} and +often very confusing to the user; we would like to have a less `magic' but +clearer system. \bibliographystyle{authordate1} -\bibliography{background} +\bibliography{InterimReport} \appendix \section{Agda code} @@ -1385,6 +1591,6 @@ subuniverse of $\lcsetz$ is introduced, plus a `decoding' function: xleftmargin=0pt } -\lstinputlisting{background.agda} +\lstinputlisting{InterimReport.agda} \end{document} diff --git a/docs/Makefile b/docs/Makefile index 52dfaec..1514e88 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -3,9 +3,9 @@ OBJECTS = $(patsubst %.tex, %.pdf, $(SOURCES)) all: $(OBJECTS) -background.pdf: background.tex background.bib background.agda +InterimReport.pdf: InterimReport.tex InterimReport.bib InterimReport.agda xelatex -halt-on-error $< -o $@ - bibtex background + bibtex InterimReport xelatex -halt-on-error $< -o $@ xelatex -halt-on-error $< -o $@ diff --git a/docs/background-notes.org b/docs/background-notes.org deleted file mode 100644 index 8d65233..0000000 --- a/docs/background-notes.org +++ /dev/null @@ -1,71 +0,0 @@ -* TODO [15/24] Papers - - [X] “Observational Equality, Now!” OTT2.pdf - - [X] “The Gentle Art of Levitation” conor-levitation.pdf - Offers a nice metalanguage - - [ ] “Dependently Typed Functional Programs and their Proofs” conor-thesis.pdf - His PhD thesis, probably relevant - - [ ] “Elaborating Inductive Definition” elaborating-inductive.pdf - Don’t really knof what it’s about yet, probably interesting - - [X] “Giving Haskell a Promotion” haskell-promotion.pdf - Could cite it as related developement in the mainstream. - - [X] “Implementing General Purpose Dependently Typed Programming Languages” idris-implementation.pdf - - [ ] “Indexed Containers” indexed-containers.pdf - Don’t really know what this is about yet, but is probably relevant - - [X] “Local Type Inference” local-type-inference.pdf - Will cite when talking about bidirectional type checking - - [ ] “Unification Under a Mixed Prefix” miller-unification.pdf - I think it’s needed to write Epigram/Agda style dep. pattern matching - - [X] “OutsideIn(X)” outsidein.pdf - Cite this as an example of how damn hard inference is... - - [ ] “A tutorial implementation of dynamic pattern unification” pattern-unification.pdf - - [X] “ΠΣ: Dependent Types without the Sugar” PiSigma.pdf - - [X] “The Power of Pi” powerofpi.pdf - - [ ] “Simply Easy!” simply-easy.pdf - - [ ] “Dependently Typed Programming with Singletons” singleton-types.pdf - As haskell-promotion.pdf, could cite it as related developement in the - mainstream. - - [X] “System F with Type Equality Coercions” systemf-coercions.pdf - Same as haskell-promotion.pdf, but even better - - [X] “Towards a practical programming language based on dependent type theory” ulf-thesis.pdf - Agda - - [X] “The View from the Left” view-from-the-left.ps.gz - - [X] “Introduction to Generalised Type Systems” lambda-cube.pdf - - [ ] “Computation and Reasoning: A Type Theory for Computer Science” - I can’t find this one. - - [X] “Intuitionistic Type Theory” martin-lof-tt.pdf - - [ ] “Implicit Syntax” implicit-syntax.pdf - - [X] “The Calculus of Constructions” coc.pdf - - [X] “An Extended Calculus of Constructions” luo-thesis.ps - -* Outline -** Simple and not-so-simple types -*** The untyped lambda-calculus -*** The STLC -*** The Curry-Howard isomorphism -** Intuitionistic Type Theory -*** A Core Type Theory -*** More than one equality -*** Data - A core type theory with 0, 1, 2, W -*** Inductive families -**** Dependent pattern matching -*** Corecursion -*** Quotient types -** Observational Type Theory -*** Extensionality is hard to get - The “naive” extensionality and the problems with it -*** A solution -*** The problems with W - See blog post -** Things to do -*** Usable, Observational TT implementation - In the style of ΠΣ, Mini-TT - Possible features: - - Corecursion - - Quotient types - - Total or non-total? -*** Inference for coercions - Specifically, what to do when we express inductive families indices as - equalities -*** Eliminators based calculus - In the style of ETT -- 2.30.2