Skip to content

Commit

Permalink
finishing up the metacoq tour
Browse files Browse the repository at this point in the history
  • Loading branch information
MevenBertrand committed Apr 24, 2022
1 parent cc118c6 commit e4ede87
Show file tree
Hide file tree
Showing 15 changed files with 537 additions and 157 deletions.
26 changes: 2 additions & 24 deletions Manuscript/code/PCUICCumulativitySpec.v
Original file line number Diff line number Diff line change
@@ -1,70 +1,57 @@
Inductive cumulSpec0 {cf : checker_flags} (Σ : global_env_ext) Γ (pb : conv_pb)
: term -> term -> Type :=

| cumul_Trans : forall t u v,
is_closed_context Γ -> is_open_term Γ u ->
Σ ;;; Γ ⊢ t ≤s[pb] u ->
Σ ;;; Γ ⊢ u ≤s[pb] v ->
Σ ;;; Γ ⊢ t ≤s[pb] v

| cumul_Sym : forall t u,
Σ ;;; Γ ⊢ t ≤s[Conv] u ->
Σ ;;; Γ ⊢ u ≤s[pb] t

| cumul_Refl : forall t,
Σ ;;; Γ ⊢ t ≤s[pb] t

(* Cumulativity rules *)

| cumul_Ind : forall i u u' args args',
cumul_Ind_univ Σ pb i #|args| u u' ->
All2 (fun t u => Σ ;;; Γ ⊢ t ≤s[Conv] u) args args' ->
Σ ;;; Γ ⊢ mkApps (tInd i u) args ≤s[pb] mkApps (tInd i u') args'

| cumul_Construct : forall i k u u' args args',
cumul_Construct_univ Σ pb i k #|args| u u' ->
All2 (fun t u => Σ ;;; Γ ⊢ t ≤s[Conv] u) args args' ->
Σ ;;; Γ ⊢ mkApps (tConstruct i k u) args ≤s[pb]
mkApps (tConstruct i k u') args'

| cumul_Sort : forall s s',
| cumul_Sort : forall s s',
compare_universe pb Σ s s' ->
Σ ;;; Γ ⊢ tSort s ≤s[pb] tSort s'

| cumul_Const : forall c u u',
R_universe_instance (compare_universe Conv Σ) u u' ->
Σ ;;; Γ ⊢ tConst c u ≤s[pb] tConst c u'

(* congruence rules *)

| cumul_Evar : forall e args args',
All2 (fun t u => Σ ;;; Γ ⊢ t ≤s[Conv] u) args args' ->
Σ ;;; Γ ⊢ tEvar e args ≤s[pb] tEvar e args'

| cumul_App : forall t t' u u',
Σ ;;; Γ ⊢ t ≤s[pb] t' ->
Σ ;;; Γ ⊢ u ≤s[Conv] u' ->
Σ ;;; Γ ⊢ tApp t u ≤s[pb] tApp t' u'

| cumul_Lambda : forall na na' ty ty' t t',
eq_binder_annot na na' ->
Σ ;;; Γ ⊢ ty ≤s[Conv] ty' ->
Σ ;;; Γ ,, vass na ty ⊢ t ≤s[pb] t' ->
Σ ;;; Γ ⊢ tLambda na ty t ≤s[pb] tLambda na' ty' t'

| cumul_Prod : forall na na' a a' b b',
eq_binder_annot na na' ->
Σ ;;; Γ ⊢ a ≤s[Conv] a' ->
Σ ;;; Γ ,, vass na a ⊢ b ≤s[pb] b' ->
Σ ;;; Γ ⊢ tProd na a b ≤s[pb] tProd na' a' b'

| cumul_LetIn : forall na na' t t' ty ty' u u',
eq_binder_annot na na' ->
Σ ;;; Γ ⊢ t ≤s[Conv] t' ->
Σ ;;; Γ ⊢ ty ≤s[Conv] ty' ->
Σ ;;; Γ ,, vdef na t ty ⊢ u ≤s[pb] u' ->
Σ ;;; Γ ⊢ tLetIn na t ty u ≤s[pb] tLetIn na' t' ty' u'

| cumul_Case indn : forall p p' c c' brs brs',
cumul_predicate (fun Γ t u => Σ ;;; Γ ⊢ t ≤s[Conv] u)
Γ (compare_universe Conv Σ) p p' ->
Expand All @@ -74,11 +61,9 @@ Inductive cumulSpec0 {cf : checker_flags} (Σ : global_env_ext) Γ (pb : conv_pb
Σ ;;; Γ ,,, inst_case_branch_context p br ⊢ bbody br ≤s[Conv] bbody br'
) brs brs' ->
Σ ;;; Γ ⊢ tCase indn p c brs ≤s[pb] tCase indn p' c' brs'

| cumul_Proj : forall p c c',
Σ ;;; Γ ⊢ c ≤s[Conv] c' ->
Σ ;;; Γ ⊢ tProj p c ≤s[pb] tProj p c'

| cumul_Fix : forall mfix mfix' idx,
All2 (fun x y =>
Σ ;;; Γ ⊢ x.(dtype) ≤s[Conv] y.(dtype) ×
Expand All @@ -87,7 +72,6 @@ Inductive cumulSpec0 {cf : checker_flags} (Σ : global_env_ext) Γ (pb : conv_pb
eq_binder_annot x.(dname) y.(dname)
) mfix mfix' ->
Σ ;;; Γ ⊢ tFix mfix idx ≤s[pb] tFix mfix' idx

| cumul_CoFix : forall mfix mfix' idx,
All2 (fun x y =>
Σ ;;; Γ ⊢ x.(dtype) ≤s[Conv] y.(dtype) ×
Expand All @@ -102,25 +86,21 @@ Inductive cumulSpec0 {cf : checker_flags} (Σ : global_env_ext) Γ (pb : conv_pb
(** Beta and iota red *)
| cumul_beta : forall na t b a,
Σ ;;; Γ ⊢ tApp (tLambda na t b) a ≤s[pb] b {0 := a}

| cumul_iota : forall ci c u args p brs br,
nth_error brs c = Some br ->
#|args| = (ci.(ci_npar) + context_assumptions br.(bcontext))%nat ->
Σ ;;; Γ ⊢ tCase ci p (mkApps (tConstruct ci.(ci_ind) c u) args) brs ≤s[pb]
iota_red ci.(ci_npar) p args br

| cumul_proj : forall i pars narg args u arg,
nth_error args (pars + narg) = Some arg ->
Σ ;;; Γ ⊢ tProj (i, pars, narg) (mkApps (tConstruct i 0 u) args) ≤s[pb] arg

(** Definitions *)
| cumul_zeta : forall na b t b',
Σ ;;; Γ ⊢ tLetIn na b t b' ≤s[pb] b' {0 := b}

| cumul_rel i body :
option_map decl_body (nth_error Γ i) = Some (Some body) ->
Σ ;;; Γ ⊢ tRel i ≤s[pb] lift0 (S i) body

| cumul_delta : forall c decl body (isdecl : declared_constant Σ c decl) u,
decl.(cst_body) = Some body ->
Σ ;;; Γ ⊢ tConst c u ≤s[pb] body@[u]
Expand All @@ -130,12 +110,10 @@ Inductive cumulSpec0 {cf : checker_flags} (Σ : global_env_ext) Γ (pb : conv_pb
unfold_fix mfix idx = Some (narg, fn) ->
is_constructor narg args = true ->
Σ ;;; Γ ⊢ mkApps (tFix mfix idx) args ≤s[pb] mkApps fn args

| cumul_cofix_case : forall ip p mfix idx args narg fn brs,
| cumul_cofix_case : forall ip p mfix idx args narg fn brs,
unfold_cofix mfix idx = Some (narg, fn) ->
Σ ;;; Γ ⊢ tCase ip p (mkApps (tCoFix mfix idx) args) brs
≤s[pb] tCase ip p (mkApps fn args) brs

| cumul_cofix_proj : forall p mfix idx args narg fn,
unfold_cofix mfix idx = Some (narg, fn) ->
Σ ;;; Γ ⊢ tProj p (mkApps (tCoFix mfix idx) args)
Expand Down
7 changes: 7 additions & 0 deletions Manuscript/code/PCUICGuard.v
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Inductive FixCoFix : Type := Fix | CoFix.

Axiom guard : FixCoFix -> global_env_ext -> context
-> mfixpoint term -> Prop.

Definition fix_guard := guard Fix.
Definition cofix_guard := guard CoFix.
16 changes: 16 additions & 0 deletions Manuscript/code/PCUICReduction.v
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
Inductive red1 (Σ : global_env) (Γ : context) : term -> term -> Type :=
(** Reductions *)
| red_beta na t b a :
Σ ;;; Γ |- tApp (tLambda na t b) a ~> b {0 := a}

| red_zeta na b t b' :
Σ ;;; Γ |- tLetIn na b t b' ~> b' {0 := b}

(** Congruences*)
| app_red_l M1 N1 M2 : Σ ;;; Γ |- M1 ~> N1 -> Σ ;;; Γ |- tApp M1 M2 ~> tApp N1 M2
| app_red_r M2 N2 M1 : Σ ;;; Γ |- M2 ~> N2 -> Σ ;;; Γ |- tApp M1 M2 ~> tApp M1 N2
where " Σ ;;; Γ |- t ~> u " := (red1 Σ Γ t u).

Definition red Σ Γ := clos_refl_trans (fun t u : term => Σ;;; Γ |- t ~> u).
5 changes: 5 additions & 0 deletions Manuscript/code/PCUICSN.v
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Axiom normalisation :
wf_ext Σ ->
forall Γ t,
welltyped Σ Γ t ->
Acc (cored Σ Γ) t.
4 changes: 2 additions & 2 deletions Manuscript/gradual-elab.tex
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,7 @@ \subsection{System Definition}
\[\begin{tikzcd}
\tcol{s} \arrow[d, dashed, "\star" very near end] \arrow[r, phantom, "\cons" description] &
\tcol{t} \arrow[d, dashed, "\star" very near end] \\
\tcol{s'} \arrow[r,phantom,"\acons" description] & \tcol{s'}
\tcol{s'} \arrow[r,phantom,"\acons" description] & \tcol{t'}
\end{tikzcd}\]
\caption{\kl{Consistent conversion}, as a diagram}
\end{marginfigure}
Expand Down Expand Up @@ -1565,7 +1565,7 @@ \subsection{Simulation}
%
For the case of structural precision, the hardest point is of course that of
\kl(red){top-level}, where we use \cref{lem:catchup-lambda,lem:catchup-cons},
to show that a similar reductions can also happen in $\tcol{t'}$, once the destructed
to show that a similar reduction can also happen in $\tcol{t'}$, once the destructed
term has properly caught up.

We must also take care when handling the premises of precision where typing is involved.
Expand Down
2 changes: 1 addition & 1 deletion Manuscript/gradual-intro.tex
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@

In this part, we go over a collaboration with Kenji Maillard, Éric Tanter and Nicolas
Tabareau to address the challenge of gradualizing a full-blown dependently-typed language:
\kl{CIC} \sidetextcite{LennonBertrand2022}.
\kl{CIC} \sidecite{LennonBertrand2022}.
\cref{chap:gradual-dependent} gives a general overview of the challenges and trade-offs
involved in \kl(typ){gradual} \kl(typ){dependent} types, culminating with the
\kl{Fire Triangle of Graduality}, which identifies an irreconcilable tension between
Expand Down
2 changes: 2 additions & 0 deletions Manuscript/header.tex
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,8 @@
%----------------------------------------------------------------------------------------
% TABLE OF CONTENTS & LIST OF FIGURES/TABLES
%----------------------------------------------------------------------------------------
\cleardoubleevenemptypage

\knowledgeconfigure{protect links}

\begingroup % Local scope for the following commands
Expand Down
19 changes: 19 additions & 0 deletions Manuscript/knowledge_declarations.tex
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,9 @@

% Chapter 3

\knowledge{notion}
| substitution

% Section 3.2

\knowledge{notion}
Expand Down Expand Up @@ -229,6 +232,7 @@
\knowledge{notion}
| algorithmic conversion
| algorithmic@conv
| Algorithmic@conv
| conversion@algo

\knowledge{notion}
Expand All @@ -244,6 +248,7 @@
\knowledge{notion}
| one-step reduction
| one-step@red
| One-step reduction

\knowledge{notion}
| weak-head reduction
Expand Down Expand Up @@ -302,6 +307,7 @@
\knowledge{notion}
| confluence
| Confluence
| confluent

\knowledge{notion}
| normal form
Expand Down Expand Up @@ -341,6 +347,8 @@
| logical consistency
| logically consistent
| Logical consistency
| consistency@log
| consistent@log

\knowledge{notion}
| closed term
Expand Down Expand Up @@ -493,8 +501,19 @@
\knowledge{notion}
| conversion problem

\knowledge{notion}
| well-scoped
| well-scoping

\knowledge{notion}
| parallel reduction
| Parallel reduction

\knowledge{notion}
| diamond property

\knowledge{notion}
| best parallel reduct

% Gradual CIC

Expand Down
6 changes: 3 additions & 3 deletions Manuscript/main.tex
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
french,english,
fontsize=10pt, % Base font size
twoside=true, % Use different layouts for even and odd pages (in particular, if twoside=true, the margin column will be always on the outside)
open=any, % If twoside=true, uncomment this to force new chapters to start on any page, not only on right (odd) pages
%open=any, % If twoside=true, uncomment this to force new chapters to start on any page, not only on right (odd) pages
secnumdepth=2, % How deep to number headings. Defaults to 1 (sections)
numbers=enddot,
]{kaobook/kaobook}
Expand Down Expand Up @@ -36,7 +36,7 @@
\usepackage{styles/macros}
\usepackage{styles/alectryon-minted}

% \includeonly{header,intro-fr,toc,intro-en,technical-intro}
% \includeonly{technical-intro,metacoq-tour}

\graphicspath{{./figures/}} % Paths where images are looked for

Expand All @@ -63,7 +63,7 @@
\include{technical-intro}

\pagelayout{wide} % No margins
\addpart{Bidirectional typing for dependent types}
\addpart{Bidirectional Calculus of Inductive Constructions}
\label{part:bidir}
\pagelayout{margin} % Restore margins

Expand Down
38 changes: 22 additions & 16 deletions Manuscript/metacoq-intro.tex
Original file line number Diff line number Diff line change
@@ -1,23 +1,27 @@
\kl{Coq} is a very complex tool. Even its \kl{kernel}, which is only but a very small
fraction of the tool, is already quite complex, relying on subtle implicit invariants which
fraction of it, is already quite complex: it relies on subtle implicit invariants, which
might not be properly maintained, especially when the code evolves.
In practice, around one critical bug is found every year.%
\sidenote{A \href{https://github.com/coq/coq/blob/master/dev/doc/critical-bugs}{compilation}
of those is maintained by \kl{Coq}’s development team.}
Although it is not easy to exploit these to actually derive an inconsistency, even less
so inadvertedly, simply relying on the \kl{De Bruijn criterion} is not enough if
one wants to have a maximal trust in the kernel – and thus \kl{Coq} as a whole.
Although it is in practice generally difficult to exploit these
and actually derive an inconsistency,
even less so inadvertently, simply relying on the \kl{De Bruijn criterion}%
\sidenote{Keeping a small, trusted kernel that is the only one responsible for the validity
of proofs.}
is not enough if one wants to trust \kl{Coq}.

Although \kl{CIC} is well-understood and has been widely studied,
this is much less true of the type theory which is actually implemented, \kl{PCUIC}.
This is why bugs usually creep in with the extra level of complexity added by \kl{PCUIC},
which is rarely handled in details on paper proofs.%
\sidenote{This is for instance the case of the completeness issue
explained in \cref{sec:bidir-pcuic-inductives}.}

This all begs for a precise investigation of \kl{PCUIC}, from the subtleties of the
This begs for a precise investigation of \kl{PCUIC}, from the heights of the
type system’s meta-theory, all the way down to the sophisticated details of the implementation.
Due to the level of complexity of the endeavour, it is not feasible on paper. Nor is it
desirable: if in the end we wish to write down a certified kernel, it is natural to do so
Due to the complexity of the endeavour, it is not feasible on paper. Nor is it
desirable: if in the end we wish to implement a certified kernel, it is natural to do so
in a proof assistant, so that we can run that certified implementation.
Instead, the natural framework is the \kl{MetaCoq} project,
which aims at giving tools to reify and manipulate \kl{Coq} terms%
Expand All @@ -32,25 +36,27 @@
The first pertains to Gödel’s second incompleteness
theorem. Because of it, it is impossible to prove \kl{Coq}’s consistency inside \kl{Coq} itself,
meaning that the meta-theoretical study can only be partial, since otherwise it would allow
a proof of consistency contradicting Gödel’s theorem. Indeed, \kl{MetaCoq} relies on a single
a proof of consistency contradicting Gödel’s theorem. Indeed, \kl{MetaCoq} relies on an
axiom, asserting the \kl{normalization} of \kl{PCUIC},
which is the blind spot we must allow in order to circumvent this limitation.
which is the blind spot we must allow in order to circumvent Gödel’s limitation.
A second is that writing down a certified kernel is not enough. Indeed, executing directly
such a kernel in \kl{Coq} would be much too slow to actually be able to type-check
any reasonable term. Rather, we must rely on extraction, a procedure which erases the
such a kernel in \kl{Coq} would be much too slow to actually type-check
any reasonably-sized term. Rather, we must rely on extraction, a procedure which erases the
proof-related content of a certified program to only keep the algorithmically relevant one.
As this erasure itself is a complex transformation, \kl{MetaCoq} also incorporates a certified
erasure procedure.

In this part, I shall describe the portion of \kl{MetaCoq} which is relevant to the thesis.
\Cref{chap:metacoq-general} gives a general overview of the meta-theoretical study of \kl{PCUIC},
with the main definitions and properties.
In this part of the thesis,
I shall describe the portion of \kl{MetaCoq} which is relevant to it.
\Cref{chap:metacoq-general} gives a general overview of
the meta-theory of \kl{PCUIC}, with the main definitions, properties, and proof ideas.
My technical contributions to this part of the development is relatively minor,
mainly consisting of small patches. However, since I reuse quite a lot of those properties
mainly consisting of small patches. However, since I rely on that formalization
in my main contributions, it seems fitting to at least say a word on it.

\Cref{chap:kernel-correctness} concentrates on the formalization of bidirectional typing, as
presented in \arefpart{bidir}, and on the proof of correctness and completeness of
the kernel implementation based on it. It contains my main technical
the kernel implementation based on it. This is my main technical
contributions to the \kl{MetaCoq} project.

Throughout the part, source files of the \kl{MetaCoq} project
Expand Down
Loading

0 comments on commit e4ede87

Please sign in to comment.