Skip to content

Commit

Permalink
Implement better entrypoint
Browse files Browse the repository at this point in the history
  • Loading branch information
Ailrun committed Sep 5, 2024
1 parent 6f9246f commit dad711c
Show file tree
Hide file tree
Showing 17 changed files with 193 additions and 87 deletions.
16 changes: 10 additions & 6 deletions driver/dune
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
(include_subdirs no)

(library
(name mcltt)
(modules main parser lexer test)
(inline_tests)
(preprocess
(pps ppx_inline_test)))
(name Mcltt)
(modules main lexer)
(libraries MclttExtracted)
; (inline_tests)
; (preprocess
; (pps ppx_inline_test))
)

(env
(dev
(flags
(:standard -w -67 -w -32 -w -39))))
(:standard -w -67 -w -32 -w -33 -w -39))))

(ocamllex lexer)
3 changes: 3 additions & 0 deletions driver/extracted/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Generated Files
*.ml
*.mli
2 changes: 2 additions & 0 deletions driver/extracted/dune
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
(library
(name MclttExtracted))
49 changes: 33 additions & 16 deletions driver/lexer.mll
Original file line number Diff line number Diff line change
@@ -1,22 +1,39 @@
{
open Parser
open Lexing
open MclttExtracted.Parser

let get_position lexbuf = (lexbuf.lex_start_p, lexbuf.lex_curr_p)
}

let string = ['a'-'z']+
let string = ['a'-'z''A'-'Z']+

rule read =
parse
| '(' { LPAREN () }
| ')' { RPAREN () }
| '.' { DOT () }
| ':' { COLON () }
| "zero" { ZERO () }
| "succ " { SUCC () }
| "fun" { LAMBDA () }
| "pi" { PI () }
| [' ' '\t' '\n' ] { read lexbuf }
| "Nat" { NAT () }
| ['0'-'9']+ as lxm { INT (int_of_string lxm) }
| "Type" { TYPE () }
| string { VAR (Lexing.lexeme lexbuf) }
| eof { EOF () }
| "->" { ARROW (get_position lexbuf) }
| '@' { AT (get_position lexbuf) }
| '|' { BAR (get_position lexbuf) }
| ':' { COLON (get_position lexbuf) }
| ',' { COMMA (get_position lexbuf) }
| "=>" { DARROW (get_position lexbuf) }
| "(*" { comment lexbuf }
| '(' { LPAREN (get_position lexbuf) }
| ')' { RPAREN (get_position lexbuf) }
| "zero" { ZERO (get_position lexbuf) }
| "succ" { SUCC (get_position lexbuf) }
| "rec" { REC (get_position lexbuf) }
| "return" { RETURN (get_position lexbuf) }
| "end" { END (get_position lexbuf) }
| "fun" { LAMBDA (get_position lexbuf) }
| "forall" { PI (get_position lexbuf) }
| [' ' '\t'] { read lexbuf }
| ['\n'] { new_line lexbuf; read lexbuf }
| "Nat" { NAT (get_position lexbuf) }
| ['0'-'9']+ as lxm { INT (get_position lexbuf, int_of_string lxm) }
| "Type" { TYPE (get_position lexbuf) }
| string { VAR (get_position lexbuf, Lexing.lexeme lexbuf) }
| eof { EOF (get_position lexbuf) }
| _ as c { failwith (Printf.sprintf "unexpected character: %C at line %d, column %d" c lexbuf.lex_start_p.pos_lnum (lexbuf.lex_start_p.pos_cnum - lexbuf.lex_start_p.pos_bol)) }
and comment =
parse
| "*)" { read lexbuf }
| _ { comment lexbuf }
18 changes: 16 additions & 2 deletions driver/main.ml
Original file line number Diff line number Diff line change
@@ -1,11 +1,25 @@
open Parser.MenhirLibParser.Inter
module Parser = MclttExtracted.Parser
module Entrypoint = MclttExtracted.Entrypoint
open Parser
open MenhirLibParser.Inter
open Entrypoint

let parse text =
(* Before parsing, we must generate a token stream from a lexer buffer,
which we then feed into the parser. *)
let rec loop lexbuf = lazy (Buf_cons (Lexer.read lexbuf, loop lexbuf)) in
let token_stream = loop (Lexing.from_string text) in
match Parser.prog 50 token_stream with
| Parsed_pr (e, _) -> Some e
| Parsed_pr ((exp, _typ), _) -> Some exp
| Fail_pr_full (_, _) -> None
| _ -> None

let main () =
print_string "File path to load: ";
let fp = read_line () in
let chan = open_in fp in
(* Before parsing, we must generate a token stream from a lexer buffer,
which we then feed into the parser. *)
let rec loop lexbuf = lazy (Buf_cons (Lexer.read lexbuf, loop lexbuf)) in
let token_stream = loop (Lexing.from_channel chan) in
Entrypoint.main 50 token_stream
2 changes: 1 addition & 1 deletion driver/test.ml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
(* Unit test cases for parsing *)

open Main
open Parser.Cst
open Entrypoint.Cst

let x, y, z = ("x", "y", "z")

Expand Down
2 changes: 1 addition & 1 deletion dune-project
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
(lang dune 3.4)
(lang dune 3.16)
(using menhir 2.1)
(name mcltt)
21 changes: 8 additions & 13 deletions theories/CoqMakefile.mk.local-late
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,19 @@ MENHIR := menhir
COQMAKEFILE := CoqMakefile.mk
COQPROJECTFILE := _CoqProject
PARSERBASE := parser.ml
POSTPARSERBASE := entrypoint.ml
PARSERFILE := ../driver/$(PARSERBASE)
PARSEREXTRACTIONDIR := Frontend
PARSEREXTRACTIONCOQFILE := $(PARSEREXTRACTIONDIR)/ParserExtraction.v
PARSEREXTRACTIONRESULTFILE := $(PARSEREXTRACTIONDIR)/$(PARSERBASE)
POSTPARSERFILE := ../driver/$(POSTPARSERBASE)
FRONTENDDIR := Frontend
PARSEREXTRACTIONCOQFILE := $(FRONTENDDIR)/ParserExtraction.v
PARSEREXTRACTIONRESULTFILE := $(FRONTENDDIR)/$(PARSERBASE)
POSTPARSEREXTRACTIONCOQFILE := $(FRONTENDDIR)/PostParserExtraction.v
POSTPARSEREXTRACTIONRESULTFILE := $(FRONTENDDIR)/$(POSTPARSERBASE)
COQPARSERFILE := $(patsubst %.vy,%.v,$(shell find ./ -name '*.vy'))
COQFILES := $(sort $(shell find ./ -name '*.v') $(COQPARSERFILE))

post-all:: $(COQPARSERFILE)
@+$(MAKE) "$(PARSERFILE)"
@echo "Separate Extraction main." | $(COQTOP) $(COQFLAGS) $(COQLIBS) -topfile Entrypoint.v -l Entrypoint.v

$(COQPARSERFILE): %.v : %.vy
$(MENHIR) --coq "$?"

$(PARSERBASE): $(PARSEREXTRACTIONCOQFILE)
@+$(MAKE) -f "$(COQMAKEFILE)" -B "$(patsubst %.v,%.vo,$?)"

$(PARSERFILE): $(PARSEREXTRACTIONCOQFILE)
@+$(MAKE) "$(PARSERBASE)"
@echo "MOVE $(PARSERBASE) => $@"
@mv "$(PARSERBASE)" "$@"
@mv "$(patsubst %.ml,%.mli,$(PARSERBASE))" "$(patsubst %.ml,%.mli,$@)"
2 changes: 1 addition & 1 deletion theories/Core/Completeness/NatCases.v
Original file line number Diff line number Diff line change
Expand Up @@ -561,7 +561,7 @@ Proof.
apply_relation_equivalence.
(on_all_hyp: fun H => directed destruct (H _ _ HinΔℕ)).
destruct_by_head per_univ.
unshelve epose proof (IHequiv_m_m' _ _ equiv_ρ_ρ' _ _) as [? [? [? []]]]; shelve_unifiable; only 4: solve [mauto]; eauto.
unshelve epose proof (IHequiv_m_m' _ _ equiv_ρ_ρ' _ _ _ _ _ _ _) as [? [? [? []]]]; shelve_unifiable; only 4: solve [mauto]; eauto.
handle_per_univ_elem_irrel.
match goal with
| _: {{ rec m ⟦return A | zero -> MZ | succ -> MS end⟧ ~_ ↘ ~?r0 }},
Expand Down
13 changes: 8 additions & 5 deletions theories/Core/Soundness/FunctionCases.v
Original file line number Diff line number Diff line change
Expand Up @@ -280,10 +280,13 @@ Proof.
match_by_head per_univ_elem ltac:(fun H => directed invert_per_univ_elem H).
inversion_clear_by_head pi_glu_exp_pred.
match goal with
| _: {{ ⟦ A ⟧ ρ ↘ ~?a' }},
| _: glu_univ_elem i ?P' ?El' a,
_: {{ ⟦ A ⟧ ρ ↘ ~?a' }},
_: {{ ⟦ N ⟧ ρ ↘ ~?n' }} |- _ =>
rename a' into a;
rename n' into n
rename n' into n;
rename P' into Pa;
rename El' into Ela
end.
assert {{ Dom a ≈ a ∈ per_univ i }} as [] by mauto 3.
handle_per_univ_elem_irrel.
Expand All @@ -302,15 +305,15 @@ Proof.
assert {{ DG b ∈ glu_univ_elem i ↘ OP n equiv_n ↘ OEl n equiv_n }} by mauto 3.
handle_functional_glu_univ_elem.
assert {{ Δ ⊢w Id : Δ }} by mauto 3.
assert {{ Δ ⊢ IT[Id] ® P1 }} by mauto 2.
assert {{ Δ ⊢ IT[Id] ® Pa }} by mauto 2.
assert {{ Δ ⊢ IT[Id] : Type@i }} by (eapply glu_univ_elem_univ_lvl; revgoals; eassumption).
assert {{ Δ ⊢ IT : Type@i }} by mauto 2.
assert {{ Δ ⊢ IT[Id] ≈ A[σ] : Type@i }} as HAeq by (eapply glu_univ_elem_typ_unique_upto_exp_eq'; revgoals; eassumption).
assert {{ Δ ⊢ N[σ] : IT[Id] ® n ∈ El1 }} by (rewrite HAeq; eassumption).
assert {{ Δ ⊢ N[σ] : IT[Id] ® n ∈ Ela }} by (rewrite HAeq; eassumption).
assert (exists mn : domain, {{ $| m & n |↘ mn }} /\ {{ Δ ⊢ M[σ][Id] N[σ] : OT[Id,,N[σ]] ® mn ∈ OEl n equiv_n }}) as [] by mauto 2.
destruct_conjs.
functional_eval_rewrite_clear.
assert {{ Δ ⊢ N[σ] : A[σ][Id] ® n ∈ El1 }} by (autorewrite with mcltt; eassumption).
assert {{ Δ ⊢ N[σ] : A[σ][Id] ® n ∈ Ela }} by (autorewrite with mcltt; eassumption).
assert {{ Δ ⊢s σ∘Id,,N[σ] ® ρ ↦ n ∈ SbΓA }} as Hcons by (unfold SbΓA; mauto 2).
(on_all_hyp: destruct_glu_rel_by_assumption SbΓA).
simplify_evals.
Expand Down
10 changes: 5 additions & 5 deletions theories/Core/Soundness/LogicalRelation/Lemmas.v
Original file line number Diff line number Diff line change
Expand Up @@ -522,9 +522,9 @@ Proof.
simpl in *.
autorewrite with mcltt in *; mauto 3.
}
assert {{ Δ ⊢ IT[σ] ≈ IT'[σ] : Type@i }} by mauto 4 using glu_univ_elem_per_univ_typ_escape.
assert {{ Δ ⊢ N : IT[σ] ® n ∈ IEl }} by (simpl; bulky_rewrite1; eassumption).
assert (exists mn : domain, {{ $| m & n |↘ mn }} /\ {{Δ ⊢ M[σ] N : OT[σ,,N] ® mn ∈ OEl n equiv_n }}) by mauto 3.
assert {{ Δ ⊢ IT'[σ] ≈ IT[σ] : Type@i }} by (symmetry; mauto 4 using glu_univ_elem_per_univ_typ_escape).
assert {{ Δ ⊢ N : IT'[σ] ® n ∈ IEl }} by (simpl; bulky_rewrite1; eassumption).
assert (exists mn : domain, {{ $| m & n |↘ mn }} /\ {{Δ ⊢ M[σ] N : OT'[σ,,N] ® mn ∈ OEl n equiv_n }}) by mauto 3.
destruct_conjs.
eexists; split; mauto 3.
match_by_head per_univ_elem ltac:(fun H => directed invert_per_univ_elem H).
Expand All @@ -538,9 +538,9 @@ Proof.
end.
assert {{ DG b ∈ glu_univ_elem i ↘ OP n equiv_n ↘ OEl n equiv_n }} by mauto 3.
assert {{ DG b' ∈ glu_univ_elem i ↘ OP' n equiv_n ↘ OEl' n equiv_n }} by mauto 3.
assert {{ Δ ⊢ OT[σ,,N] ® OP n equiv_n }} by (eapply glu_univ_elem_trm_typ; mauto 3).
assert {{ Δ ⊢ OT'[σ,,N] ® OP n equiv_n }} by (eapply glu_univ_elem_trm_typ; mauto 3).
assert {{ Sub b <: b' at i }} by mauto 3.
assert {{ Δ ⊢ OT[σ,,N] ⊆ OT'[σ,,N] }} by mauto 3.
assert {{ Δ ⊢ OT'[σ,,N] ⊆ OT[σ,,N] }} by mauto 3.
intuition.
Qed.

Expand Down
4 changes: 2 additions & 2 deletions theories/Core/Soundness/Realizability.v
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ Proof.
apply wf_sub_id_inversion in Hτ.
pose proof (wf_ctx_sub_length _ _ Hτ).
transitivity {{{ #(length Γ1)[Id] }}}; [mauto 3 |].
replace (length Γ) with (length (Γ1 ++ {{{ Γ2, A0 }}})) by eassumption.
replace (length Γ) with (length (Γ1 ++ {{{ Γ2, A0 }}})) by lia.
rewrite var_arith, H.
bulky_rewrite.
- pose proof (app_ctx_vlookup _ _ _ _ HΔ0 eq_refl) as Hvar.
Expand All @@ -65,7 +65,7 @@ Proof.
rewrite <- @exp_eq_sub_compose_typ; mauto 2.
deepexec wf_ctx_sub_ctx_lookup ltac:(fun H => destruct H as [Γ1' [? [Γ2' [? [-> [? [-> []]]]]]]]).
repeat rewrite List.app_length in *.
replace (length Γ1) with (length Γ1') in * by eassumption.
replace (length Γ1) with (length Γ1') in * by lia.
clear_refl_eqs.
replace (length Γ2) with (length Γ2') by (simpl in *; lia).

Expand Down
53 changes: 53 additions & 0 deletions theories/Entrypoint.v
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
From Coq Require Extraction.
From Coq Require Import ExtrOcamlBasic ExtrOcamlNatInt ExtrOcamlNativeString.

From Equations Require Import Equations.

From Mcltt.Core Require Import Base.
From Mcltt.Core.Syntactic Require Import SystemOpt.
From Mcltt.Extraction Require Import TypeCheck.
From Mcltt.Frontend Require Import Elaborator Parser.
Import MenhirLibParser.Inter.
Import Syntax_Notations.

Variant main_result :=
| AllGood : forall A M, {{ ⋅ ⊢ M : A }} -> main_result
| TypeCheckingFailure : forall A M, ~ {{ ⋅ ⊢ M : A }} -> main_result
| ElaborationFailure : forall cst, elaborate cst nil = None -> main_result
| ParserFailure : Aut.state -> Aut.Gram.token -> main_result
| ParserTimeout : nat -> main_result
.

Definition inspect {A} (x : A) : { y | x = y } := exist _ x eq_refl.
Extraction Inline inspect.

#[local]
Ltac impl_obl_tac := try eassumption.
#[tactic="impl_obl_tac"]
Equations main (log_fuel : nat) (buf : buffer) : main_result :=
| log_fuel, buf with Parser.prog log_fuel buf => {
| Parsed_pr (cst_exp, cst_typ) _ with inspect (elaborate cst_typ nil) => {
| exist _ (Some A) _ with inspect (elaborate cst_exp nil) => {
| exist _ (Some M) _ with type_check_closed A _ M _ => {
| left _ => AllGood A M _
| right _ => TypeCheckingFailure A M _
}
| exist _ None _ => ElaborationFailure cst_exp _
}
| exist _ None _ => ElaborationFailure cst_typ _
}
| Fail_pr_full s t => ParserFailure s t
| Timeout_pr => ParserTimeout log_fuel
}
.
Next Obligation.
eapply elaborator_gives_user_exp; eassumption.
Qed.
Next Obligation.
eapply elaborator_gives_user_exp; eassumption.
Qed.

Extraction Language OCaml.

Set Extraction Flag 1007.
Set Extraction Output Directory "../driver/extracted".
28 changes: 13 additions & 15 deletions theories/Frontend/Elaborator.v
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ Fixpoint elaborate (cst : Cst.obj) (ctx : list string) : option exp :=
| None => None
end
| Cst.natrec n mx m z sx sr s =>
match elaborate m (mx :: ctx), elaborate z ctx, elaborate s (sx :: sr :: ctx), elaborate n ctx with
match elaborate m (mx :: ctx), elaborate z ctx, elaborate s (sr :: sx :: ctx), elaborate n ctx with
| Some m, Some z, Some s, Some n => Some (a_natrec m z s n)
| _, _, _, _ => None
end
Expand Down Expand Up @@ -180,19 +180,17 @@ Lemma lookup_bound s : forall ctx m, lookup s ctx = Some m -> m < (List.length c
+ simpl in H.
destruct string_dec in H.
* contradiction n.
* destruct (lookup s ctx).
--inversion H.
rewrite H1.
simpl.
pose (IHctx (m-1)).
rewrite <- H1 in l.
rewrite (Nat.add_sub n1 1) in l.
rewrite <- H1.
rewrite Nat.add_1_r.
apply (Arith_prebase.lt_n_S_stt (n1) (List.length ctx)).
apply l.
reflexivity.
--discriminate H.
* destruct (lookup s ctx);
try discriminate.
inversion H.
rewrite H1.
simpl.
pose (IHctx (m-1)).
rewrite <- H1 in l.
rewrite (Nat.add_sub n1 1) in l.
rewrite <- H1.
specialize (l eq_refl).
lia.
Qed.

Import StrSProp.Dec.
Expand Down Expand Up @@ -221,7 +219,7 @@ Proof.
- assert (cst_variables cst1 [<=] StrSProp.of_list ctx) by fsetdec.
assert (cst_variables cst2 [<=] StrSProp.of_list (s :: ctx)) by (simpl; fsetdec).
assert (cst_variables cst3 [<=] StrSProp.of_list ctx) by fsetdec.
assert (cst_variables cst4 [<=] StrSProp.of_list (s0 :: s1 :: ctx)) by (simpl; fsetdec).
assert (cst_variables cst4 [<=] StrSProp.of_list (s1 :: s0 :: ctx)) by (simpl; fsetdec).
destruct (IHcst1 _ H0) as [ast [-> ?]];
destruct (IHcst2 _ H1) as [ast' [-> ?]];
destruct (IHcst3 _ H2) as [ast'' [-> ?]];
Expand Down
Loading

0 comments on commit dad711c

Please sign in to comment.