From 3c2a1df721a3419f9428062ac15dd877a7a931c2 Mon Sep 17 00:00:00 2001 From: Pierre Rousselin <119015057+Villetaneuse@users.noreply.github.com> Date: Fri, 1 Dec 2023 20:44:19 +0100 Subject: [PATCH] Adapt to Coq/Coq#18164 (#103) --- Bedrock/Word.v | 1 - src/ADTRefinement/FixedPoint.v | 5 +- .../Extraction/External/Lists.v | 4 +- .../QueryStructures/CallRules/TupleList.v | 4 +- .../QueryStructures/QueryStructures.v | 2 +- src/Common.v | 7 +- src/Common/BoundedLookup.v | 4 +- src/Common/Enumerable.v | 4 +- src/Common/Equality.v | 7 +- src/Common/FixedPoints.v | 4 +- src/Common/Frame.v | 12 +- src/Common/Le.v | 11 +- src/Common/List/FlattenList.v | 2 +- src/Common/List/ListFacts.v | 34 ++--- src/Common/List/Operations.v | 2 +- src/Common/NatFacts.v | 31 ++-- src/Common/StringFacts.v | 35 ++--- src/Common/Wf.v | 13 +- src/Computation/Decidable.v | 4 +- src/Computation/ListComputations.v | 10 +- src/Narcissus/Automation/Decision.v | 8 +- .../AbstractInterpretation/NonTerminalMap.v | 2 +- src/Parsers/BaseTypesLemmas.v | 6 +- src/Parsers/BooleanRecognizerOptimized.v | 57 ++++---- src/Parsers/BooleanRecognizerTests.v | 2 +- src/Parsers/ContextFreeGrammar/Carriers.v | 2 +- .../ContextFreeGrammar/Fix/Definitions.v | 2 +- src/Parsers/ContextFreeGrammar/Fix/Fix.v | 6 +- src/Parsers/ContextFreeGrammar/Fold.v | 2 +- src/Parsers/ContextFreeGrammar/Properties.v | 2 +- .../ContextFreeGrammar/ValidReflective.v | 3 +- src/Parsers/ExtrOcamlParsers.v | 2 +- src/Parsers/GenericCorrectnessBaseTypes.v | 2 +- src/Parsers/GenericRecognizer.v | 17 +-- src/Parsers/GenericRecognizerMin.v | 130 ++++++++--------- src/Parsers/GenericRecognizerOptimized.v | 19 ++- .../GenericRecognizerOptimizedTactics.v | 16 +- src/Parsers/MinimalParseOfParse.v | 66 ++++----- src/Parsers/ParserImplementation.v | 4 +- .../All/MinimalReachableOfReachable.v | 36 ++--- .../Reachable/MaybeEmpty/MinimalOfCore.v | 32 ++-- src/Parsers/Reachable/MaybeEmpty/OfParse.v | 4 +- .../OnlyFirst/MinimalReachableOfReachable.v | 36 ++--- .../OnlyLast/MinimalReachableOfReachable.v | 36 ++--- src/Parsers/Reachable/ParenBalanced/Core.v | 4 +- .../Reachable/ParenBalanced/MinimalOfCore.v | 24 +-- src/Parsers/Reachable/ParenBalanced/OfParse.v | 2 +- .../ParenBalancedHiding/MinimalOfCore.v | 28 ++-- src/Parsers/RecognizerPreOptimized.v | 31 ++-- .../Refinement/BinOpBrackets/BinOpRules.v | 42 +++--- .../Refinement/BinOpBrackets/MakeBinOpTable.v | 16 +- .../Refinement/BinOpBrackets/ParenBalanced.v | 12 +- .../BinOpBrackets/ParenBalancedGrammar.v | 4 +- .../BinOpBrackets/ParenBalancedLemmas.v | 4 +- src/Parsers/Refinement/DisjointLemmas.v | 5 +- src/Parsers/Refinement/DisjointRules.v | 20 +-- src/Parsers/Refinement/DisjointRulesRev.v | 4 +- src/Parsers/Refinement/EmptyLemmas.v | 6 +- src/Parsers/Refinement/FixedLengthLemmas.v | 18 +-- ...IndexedAndAtMostOneNonTerminalReflective.v | 137 +++++++++--------- ...exedAndAtMostOneNonTerminalReflectiveOpt.v | 4 +- src/Parsers/Refinement/PreTactics.v | 9 +- src/Parsers/Reflective/LogicalRelations.v | 2 +- src/Parsers/Reflective/ParserSemantics.v | 7 +- src/Parsers/Reflective/PartialUnfold.v | 2 +- src/Parsers/Reflective/Reify.v | 2 +- src/Parsers/Reflective/Semantics.v | 2 +- .../Reflective/SyntaxEquivalenceReflective.v | 2 +- src/Parsers/SimpleRecognizerCorrect.v | 6 +- src/Parsers/SplitterFromParserADT.v | 6 +- src/Parsers/Splitters/BruteForce.v | 6 +- src/Parsers/Splitters/RDPList.v | 48 +++--- src/Parsers/StringLike/FirstChar.v | 7 +- src/Parsers/StringLike/FirstCharSuchThat.v | 9 +- src/Parsers/StringLike/ForallChars.v | 5 +- src/Parsers/StringLike/LastChar.v | 5 +- src/Parsers/StringLike/LastCharSuchThat.v | 17 +-- src/Parsers/StringLike/OcamlString.v | 29 ++-- src/Parsers/StringLike/Properties.v | 77 +++++----- src/Parsers/StringLike/String.v | 22 +-- .../Constraints/ConstraintChecksRefinements.v | 14 +- .../DataStructures/Bags/BagsProperties.v | 2 +- .../DataStructures/Bags/CountingListBags.v | 4 +- .../DataStructures/Bags/ListBags.v | 2 +- .../DataStructures/Bags/RangeTreeBags.v | 9 +- .../DataStructures/Bags/TreeBags.v | 9 +- .../DataStructures/Bags/TrieBags.v | 30 ++-- .../Operations/General/InsertRefinements.v | 1 - .../Representation/QueryStructure.v | 2 +- 89 files changed, 688 insertions(+), 697 deletions(-) diff --git a/Bedrock/Word.v b/Bedrock/Word.v index 8f5729642..0e8b8ab88 100644 --- a/Bedrock/Word.v +++ b/Bedrock/Word.v @@ -2,7 +2,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** Fixed precision machine words *) Require Import Coq.Arith.Arith - Coq.Arith.Div2 Coq.NArith.NArith Coq.Bool.Bool Coq.ZArith.ZArith. diff --git a/src/ADTRefinement/FixedPoint.v b/src/ADTRefinement/FixedPoint.v index c32e56230..2c2138438 100644 --- a/src/ADTRefinement/FixedPoint.v +++ b/src/ADTRefinement/FixedPoint.v @@ -1,6 +1,8 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Fiat.ADT Fiat.ADTNotation. Require Export Fiat.Computation.FixComp. +Require Import Coq.ZArith.ZArith. +Require Import Coq.Arith.PeanoNat. Import LeastFixedPointFun. @@ -113,9 +115,8 @@ Lemma length_wf' : forall A len l, Proof. induction len; simpl; intros; constructor; intros. - contradiction (NPeano.Nat.nlt_0_r _ H). + contradiction (Nat.nlt_0_r _ H). apply IHlen. - Require Import Coq.ZArith.ZArith. omega. Qed. diff --git a/src/CertifiedExtraction/Extraction/External/Lists.v b/src/CertifiedExtraction/Extraction/External/Lists.v index 06ac5a7ea..71f11a836 100644 --- a/src/CertifiedExtraction/Extraction/External/Lists.v +++ b/src/CertifiedExtraction/Extraction/External/Lists.v @@ -5,7 +5,7 @@ Require Export Bedrock.Platform.Facade.examples.QsADTs. Lemma CompileEmpty_helper {A}: forall (lst : list A) (w : W), Programming.propToWord (lst = @nil A) w -> - ret (bool2w (EqNat.beq_nat (Datatypes.length lst) 0)) ↝ w. + ret (bool2w (Nat.eqb (Datatypes.length lst) 0)) ↝ w. Proof. unfold Programming.propToWord, IF_then_else in *. destruct lst; simpl in *; destruct 1; repeat cleanup; try discriminate; fiat_t. @@ -15,7 +15,7 @@ Hint Resolve CompileEmpty_helper : call_helpers_db. Lemma ListEmpty_helper : forall {A} (seq: list A), - (EqNat.beq_nat (Datatypes.length seq) 0) = match seq with + (Nat.eqb (Datatypes.length seq) 0) = match seq with | nil => true | _ :: _ => false end. diff --git a/src/CertifiedExtraction/Extraction/QueryStructures/CallRules/TupleList.v b/src/CertifiedExtraction/Extraction/QueryStructures/CallRules/TupleList.v index 5fbbe9308..003d2d156 100644 --- a/src/CertifiedExtraction/Extraction/QueryStructures/CallRules/TupleList.v +++ b/src/CertifiedExtraction/Extraction/QueryStructures/CallRules/TupleList.v @@ -123,7 +123,7 @@ Module ADTListCompilation GLabelMap.MapsTo fpointer (Axiomatic Empty) env -> {{ tenv }} Call vtest fpointer (vlst :: nil) - {{ [[`vtest ->> (bool2w (EqNat.beq_nat (Datatypes.length lst) 0)) as _]] :: (DropName vtest tenv) }} ∪ {{ ext }} // env. + {{ [[`vtest ->> (bool2w (Nat.eqb (Datatypes.length lst) 0)) as _]] :: (DropName vtest tenv) }} ∪ {{ ext }} // env. Proof. repeat (SameValues_Facade_t_step || facade_cleanup_call || LiftPropertyToTelescope_t || PreconditionSet_t || injections). @@ -144,7 +144,7 @@ Module ADTListCompilation GLabelMap.MapsTo fpointer (Axiomatic Empty) env -> {{ tenv }} Call vtest fpointer (vlst :: nil) - {{ [[`vtest ->> (bool2w (EqNat.beq_nat (Datatypes.length (rev lst)) 0)) as _]] :: tenv }} ∪ {{ ext }} // env. + {{ [[`vtest ->> (bool2w (Nat.eqb (Datatypes.length (rev lst)) 0)) as _]] :: tenv }} ∪ {{ ext }} // env. Proof. intros. setoid_rewrite rev_length. diff --git a/src/CertifiedExtraction/Extraction/QueryStructures/QueryStructures.v b/src/CertifiedExtraction/Extraction/QueryStructures/QueryStructures.v index fa288da04..0d3a597c8 100644 --- a/src/CertifiedExtraction/Extraction/QueryStructures/QueryStructures.v +++ b/src/CertifiedExtraction/Extraction/QueryStructures/QueryStructures.v @@ -154,7 +154,7 @@ Ltac _compile_length := match_ProgOk ltac:(fun prog pre post ext env => match constr:((pre, post)) with - | (?pre, Cons ?k (ret (bool2w (EqNat.beq_nat (Datatypes.length (rev ?seq)) 0))) (fun _ => ?pre')) => + | (?pre, Cons ?k (ret (bool2w (Nat.eqb (Datatypes.length (rev ?seq)) 0))) (fun _ => ?pre')) => let vlst := find_fast (wrap (FacadeWrapper := WrapInstance (H := QS_WrapFiatWTupleList)) seq) ext in match vlst with | Some ?vlst => eapply (WTupleListCompilation.CompileEmpty_spec (idx := 3) (vlst := vlst)) diff --git a/src/Common.v b/src/Common.v index 2d265ddeb..e3ea01bfa 100644 --- a/src/Common.v +++ b/src/Common.v @@ -1,5 +1,4 @@ Require Import Coq.Lists.List. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Coq.ZArith.ZArith Coq.Lists.SetoidList. Require Export Coq.Setoids.Setoid Coq.Classes.RelationClasses Coq.Program.Program Coq.Classes.Morphisms. @@ -1330,7 +1329,7 @@ Definition path_prod' {A B} {x x' : A} {y y' : B} Lemma lt_irrefl' {n m} (H : n = m) : ~n < m. Proof. - subst; apply Lt.lt_irrefl. + subst; apply Nat.lt_irrefl. Qed. Lemma or_not_l {A B} (H : A \/ B) (H' : ~A) : B. @@ -1627,7 +1626,7 @@ Module opt. Definition snd {A B} := Eval compute in @snd A B. Definition andb := Eval compute in andb. Definition orb := Eval compute in orb. - Definition beq_nat := Eval compute in EqNat.beq_nat. + Definition beq_nat := Eval compute in Nat.eqb. Module Export Notations. Delimit Scope opt_bool_scope with opt_bool. @@ -1641,7 +1640,7 @@ Module opt2. Definition snd {A B} := Eval compute in @snd A B. Definition andb := Eval compute in andb. Definition orb := Eval compute in orb. - Definition beq_nat := Eval compute in EqNat.beq_nat. + Definition beq_nat := Eval compute in Nat.eqb. Definition leb := Eval compute in Compare_dec.leb. Module Export Notations. diff --git a/src/Common/BoundedLookup.v b/src/Common/BoundedLookup.v index 29092d83f..cd6dd889a 100644 --- a/src/Common/BoundedLookup.v +++ b/src/Common/BoundedLookup.v @@ -141,7 +141,7 @@ Section BoundedIndex. Fixpoint fin_beq {m n} (p : Fin.t m) (q : Fin.t n) := match p, q with - | @Fin.F1 m', @Fin.F1 n' => EqNat.beq_nat m' n' + | @Fin.F1 m', @Fin.F1 n' => Nat.eqb m' n' | Fin.FS _ _, Fin.F1 _ => false | Fin.F1 _, Fin.FS _ _ => false | Fin.FS _ p', Fin.FS _ q' => fin_beq p' q' @@ -153,7 +153,7 @@ Section BoundedIndex. Proof. intros; pattern n, p, q; eapply Fin.rect2; simpl; intuition; try (congruence || discriminate). - - symmetry; eapply beq_nat_refl. + - eapply Nat.eqb_refl. - eauto using Fin.FS_inj. Qed. diff --git a/src/Common/Enumerable.v b/src/Common/Enumerable.v index 7dd3cee3c..df38dc10b 100644 --- a/src/Common/Enumerable.v +++ b/src/Common/Enumerable.v @@ -20,7 +20,7 @@ Section enum. Definition enumerate_fun_beq (enumerate : list A) (f g : A -> B) : bool - := EqNat.beq_nat (List.length (List.filter (fun x => negb (beq (f x) (g x))) enumerate)) 0. + := Nat.eqb (List.length (List.filter (fun x => negb (beq (f x) (g x))) enumerate)) 0. Definition enumerate_fun_bl_in (bl : forall x y, beq x y = true -> x = y) @@ -29,7 +29,7 @@ Section enum. Proof. unfold enumerate_fun_beq. intros H x H'. - apply EqNat.beq_nat_true in H. + apply Nat.eqb_eq in H. apply bl. destruct (beq (f x) (g x)) eqn:Heq; [ reflexivity | exfalso ]. apply Bool.negb_true_iff in Heq. diff --git a/src/Common/Equality.v b/src/Common/Equality.v index 450be0d26..ac9498281 100644 --- a/src/Common/Equality.v +++ b/src/Common/Equality.v @@ -1,5 +1,6 @@ Require Import Coq.Lists.List Coq.Lists.SetoidList. Require Import Coq.Bool.Bool. +Require Import Coq.Arith.PeanoNat. Require Import Coq.Strings.Ascii. Require Import Coq.Strings.String. Require Import Coq.Logic.Eqdep_dec. @@ -379,10 +380,10 @@ Definition internal_prod_dec_lb {A B} eq_A eq_B A_lb B_lb x y H (surjective_pairing _))). -Global Instance nat_BoolDecR : BoolDecR nat := EqNat.beq_nat. -Global Instance nat_BoolDec_bl : BoolDec_bl (@eq nat) := @EqNat.beq_nat_true. +Global Instance nat_BoolDecR : BoolDecR nat := Nat.eqb. +Global Instance nat_BoolDec_bl : BoolDec_bl (@eq nat) := fun n m => (proj1 (Nat.eqb_eq n m)). Global Instance nat_BoolDec_lb : BoolDec_lb (@eq nat) := - fun x y => proj2 (@EqNat.beq_nat_true_iff x y). + fun x y => proj2 (@Nat.eqb_eq x y). Lemma unit_bl {x y} : unit_beq x y = true -> x = y. Proof. apply internal_unit_dec_bl. Qed. diff --git a/src/Common/FixedPoints.v b/src/Common/FixedPoints.v index 3c6472f4f..2477f162b 100644 --- a/src/Common/FixedPoints.v +++ b/src/Common/FixedPoints.v @@ -62,7 +62,7 @@ Section fixedpoints. | _ => omega | [ H : forall a, _ -> _ = _ |- _ ] => rewrite H by omega | [ H : _ < _ |- _ ] => hnf in H - | [ H : S _ <= S _ |- _ ] => apply Le.le_S_n in H + | [ H : S _ <= S _ |- _ ] => apply Nat.succ_le_mono in H | [ H : sizeof ?x <= ?y, H' : ?y <= sizeof (step ?x) |- _ ] => let H'' := fresh in assert (H'' : sizeof (step x) <= sizeof x) by apply step_monotonic; @@ -95,7 +95,7 @@ Section listpair. Lemma step_monotonic lss : sizeof_pair (step lss) <= sizeof_pair lss. Proof. unfold step, sizeof_pair; simpl; - apply Plus.plus_le_compat; + apply Nat.add_le_mono; apply length_filter. Qed. diff --git a/src/Common/Frame.v b/src/Common/Frame.v index e2dafcb70..65681af97 100644 --- a/src/Common/Frame.v +++ b/src/Common/Frame.v @@ -1,7 +1,7 @@ Require Import SetoidClass - Coq.Arith.Max - Coq.Classes.Morphisms. + Coq.Classes.Morphisms + Coq.Arith.PeanoNat. Require Export Fiat.Common.Coq__8_4__8_5__Compat. Generalizable All Variables. @@ -212,7 +212,7 @@ Module PreO. Qed. Definition Nat : t le. - Proof. constructor; [ apply Le.le_refl | apply Le.le_trans ]. + Proof. constructor; [ apply Nat.le_refl | apply Nat.le_trans ]. Qed. Definition discrete (A : Type) : t (@Logic.eq A). @@ -408,7 +408,7 @@ Module PO. constructor; intros. - apply PreO.Nat. - solve_proper. - - apply Le.le_antisym; assumption. + - apply Nat.le_antisymm; assumption. Qed. Definition discrete (A : Type) : t (@Logic.eq A) Logic.eq. @@ -585,8 +585,8 @@ Module JoinLat. Proof. constructor; intros. - apply PO.Nat. - solve_proper. - - constructor. simpl. apply Max.le_max_l. apply Max.le_max_r. - apply Max.max_lub. + - constructor. simpl. apply Nat.le_max_l. apply Nat.le_max_r. + apply Nat.max_lub. Qed. (** Max for propositions is the propositional OR, i.e., disjunction *) diff --git a/src/Common/Le.v b/src/Common/Le.v index 08b893fc1..980bb9c12 100644 --- a/src/Common/Le.v +++ b/src/Common/Le.v @@ -1,6 +1,5 @@ (** * Common facts about [≤] *) Require Export Fiat.Common.Coq__8_4__8_5__Compat. -Require Import Coq.Arith.Le. Require Import Coq.ZArith.ZArith. Require Import Coq.Classes.Morphisms. Require Import Coq.Program.Basics. @@ -46,7 +45,7 @@ Proof. induction n; simpl; try reflexivity. rewrite IHn; clear IHn. edestruct le_canonical; [ exfalso | reflexivity ]. - { eapply le_Sn_n; eassumption. } + { eapply Nat.nle_succ_diag_l; eassumption. } Qed. Lemma le_canonical_nS {n m pf} (H : le_canonical n m = left pf) @@ -103,13 +102,13 @@ Proof. omega. Qed. Global Instance le_lt_Proper : Proper (Basics.flip le ==> le ==> impl) lt. Proof. repeat intro. - eapply lt_le_trans; [ | eassumption ]. - eapply le_lt_trans; eassumption. + eapply Nat.lt_le_trans; [ | eassumption ]. + eapply Nat.le_lt_trans; eassumption. Qed. Global Instance le_lt_Proper' : Proper (le ==> Basics.flip le ==> Basics.flip impl) lt. Proof. repeat intro. - eapply lt_le_trans; [ | eassumption ]. - eapply le_lt_trans; eassumption. + eapply Nat.lt_le_trans; [ | eassumption ]. + eapply Nat.le_lt_trans; eassumption. Qed. diff --git a/src/Common/List/FlattenList.v b/src/Common/List/FlattenList.v index 42eebb587..74c46cd3e 100644 --- a/src/Common/List/FlattenList.v +++ b/src/Common/List/FlattenList.v @@ -92,7 +92,7 @@ Section FlattenList. - auto with arith. - unfold compose; rewrite app_length, <- IHseq. - rewrite plus_comm, <- plus_assoc; auto with arith. + rewrite Nat.add_comm, <- Nat.add_assoc; auto with arith. Qed. Lemma length_flatten : diff --git a/src/Common/List/ListFacts.v b/src/Common/List/ListFacts.v index e54018e02..09c578432 100644 --- a/src/Common/List/ListFacts.v +++ b/src/Common/List/ListFacts.v @@ -351,8 +351,8 @@ Section ListFacts. clear IHseq; revert a default; induction seq; simpl; intros; auto with arith. rewrite <- IHseq. - rewrite Plus.plus_comm, <- Plus.plus_assoc; f_equal. - rewrite Plus.plus_comm; reflexivity. + rewrite Nat.add_comm, <- Nat.add_assoc; f_equal. + rewrite Nat.add_comm; reflexivity. Qed. Lemma map_snd {A B C} : @@ -433,7 +433,7 @@ Section ListFacts. Proof. induction seq; simpl; intros; eauto with arith. rewrite IHseq; f_equal; eauto with arith. - repeat rewrite <- Plus.plus_assoc; f_equal; auto with arith. + repeat rewrite <- Nat.add_assoc; f_equal; auto with arith. Qed. Lemma length_flat_map : @@ -603,7 +603,7 @@ Section ListFacts. { destruct y; simpl. { destruct x; simpl; repeat (f_equal; []); try reflexivity; omega. } { rewrite IHls; destruct x; simpl; repeat (f_equal; []); try reflexivity. - rewrite NPeano.Nat.add_succ_r; reflexivity. } } + rewrite Nat.add_succ_r; reflexivity. } } Qed. Lemma in_map_iffT' {A B} @@ -639,7 +639,7 @@ Section ListFacts. (f : A -> nat) (ls : list A) (y : nat) : iffT (In y (map f ls)) { x : A | f x = y /\ In x ls }. Proof. - apply in_map_iffT, NPeano.Nat.eq_dec. + apply in_map_iffT, Nat.eq_dec. Defined. Lemma nth_take_1_drop {A} (ls : list A) n a @@ -670,7 +670,7 @@ Section ListFacts. : drop 1 (drop y ls) = drop (S y) ls. Proof. rewrite drop_drop. - rewrite NPeano.Nat.add_1_r. + rewrite Nat.add_1_r. reflexivity. Qed. @@ -967,7 +967,7 @@ Section ListFacts. | [ H : context[option_map _ ?x] |- _ ] => destruct x eqn:?; unfold option_map in H | _ => solve [ repeat (esplit || eassumption) ] | [ H : context[nth_error (_::_) ?x] |- _ ] => is_var x; destruct x; simpl nth_error in H - | [ H : S _ < S _ |- _ ] => apply lt_S_n in H + | [ H : S _ < S _ |- _ ] => apply (proj2 (Nat.succ_lt_mono _ _)) in H | _ => solve [ eauto with nocore ] | [ |- context[if ?b then _ else _] ] => destruct b eqn:? | [ H : ?A -> ?B |- _ ] => let H' := fresh in assert (H' : A) by (assumption || omega); specialize (H H'); clear H' @@ -975,7 +975,7 @@ Section ListFacts. | _ => progress simpl in * | [ H : forall x, ?f x = ?f ?y -> _ |- _ ] => specialize (H _ eq_refl) | [ H : forall x, ?f ?y = ?f x -> _ |- _ ] => specialize (H _ eq_refl) - | [ H : forall n, S n < S _ -> _ |- _ ] => specialize (fun n pf => H n (lt_n_S _ _ pf)) + | [ H : forall n, S n < S _ -> _ |- _ ] => specialize (fun n pf => H n (proj1 (Nat.succ_lt_mono _ _) pf)) | [ H : nth_error nil ?x = Some _ |- _ ] => is_var x; destruct x | [ H : forall m x, nth_error (_::_) m = Some _ -> _ |- _ ] => pose proof (H 0); specialize (fun m => H (S m)) | [ H : or _ _ |- _ ] => destruct H @@ -1106,11 +1106,11 @@ Section ListFacts. induction ls as [|x xs IHxs]. { simpl; intros; destruct (n - offset); reflexivity. } { simpl; intros. - destruct (beq_nat n offset) eqn:H'; - [ apply beq_nat_true in H' - | apply beq_nat_false in H' ]; + destruct (Nat.eqb n offset) eqn:H'; + [ apply Nat.eqb_eq in H' + | apply Nat.eqb_neq in H' ]; subst; - rewrite ?minus_diag; trivial. + rewrite ?Nat.sub_diag; trivial. destruct (n - offset) eqn:H''. { omega. } { rewrite IHxs by omega. @@ -1997,7 +1997,7 @@ Section ListFacts. List.In n ns -> f n <= fold_right (fun n acc => max (f n) acc) 0 ns. Proof. induction ns; intros; inversion H; subst; simpl; - apply NPeano.Nat.max_le_iff; [ left | right ]; auto. + apply Nat.max_le_iff; [ left | right ]; auto. Qed. Lemma fold_right_higher_is_higher {A} @@ -2006,7 +2006,7 @@ Section ListFacts. fold_right (fun n acc => max (f n) acc) 0 ns <= x. Proof. induction ns; simpl; intros; [ apply le_0_n | ]. - apply NPeano.Nat.max_lub. + apply Nat.max_lub. apply H; left; auto. apply IHns; intros; apply H; right; auto. Qed. @@ -2023,8 +2023,8 @@ Section ListFacts. (* if a list is empty, the result of filtering the list with anything will still be empty *) Lemma filter_nil_is_nil {A} : forall (l : list A) (pred : A -> bool), - beq_nat (Datatypes.length l) 0 = true - -> beq_nat (Datatypes.length (filter pred l)) 0 = true. + Nat.eqb (Datatypes.length l) 0 = true + -> Nat.eqb (Datatypes.length (filter pred l)) 0 = true. Proof. induction l; intros; simpl; try inversion H. reflexivity. @@ -2374,7 +2374,7 @@ Section ListFacts. = option_map (fun v => (start + idx, v)) (nth_error ls idx). Proof. revert start idx; induction ls as [|l ls IHls], idx as [|idx]; try reflexivity. - { simpl; rewrite NPeano.Nat.add_0_r; reflexivity. } + { simpl; rewrite Nat.add_0_r; reflexivity. } { simpl; rewrite <- plus_n_Sm, IHls; reflexivity. } Qed. diff --git a/src/Common/List/Operations.v b/src/Common/List/Operations.v index 5669d0fae..e3100b877 100644 --- a/src/Common/List/Operations.v +++ b/src/Common/List/Operations.v @@ -74,7 +74,7 @@ Module Export List. Fixpoint nth'_helper {A} (n : nat) (ls : list A) (default : A) (offset : nat) := match ls with | nil => default - | x::xs => if EqNat.beq_nat n offset + | x::xs => if Nat.eqb n offset then x else nth'_helper n xs default (S offset) end. diff --git a/src/Common/NatFacts.v b/src/Common/NatFacts.v index 1a33852cb..724dfb4cd 100644 --- a/src/Common/NatFacts.v +++ b/src/Common/NatFacts.v @@ -1,12 +1,11 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Coq.Classes.Morphisms. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Coq.ZArith.ZArith. Lemma min_def {x y} : min x y = x - (x - y). -Proof. apply Min.min_case_strong; omega. Qed. +Proof. apply Nat.min_case_strong; omega. Qed. Lemma max_def {x y} : max x y = x + (y - x). -Proof. apply Max.max_case_strong; omega. Qed. +Proof. apply Nat.max_case_strong; omega. Qed. Ltac coq_omega := omega. Ltac handle_min_max_for_omega := repeat match goal with @@ -19,8 +18,8 @@ Ltac handle_min_max_for_omega_case := repeat match goal with | [ H : context[min _ _] |- _ ] => revert H | [ H : context[max _ _] |- _ ] => revert H - | [ |- context[min _ _] ] => apply Min.min_case_strong - | [ |- context[max _ _] ] => apply Max.max_case_strong + | [ |- context[min _ _] ] => apply Nat.min_case_strong + | [ |- context[max _ _] ] => apply Nat.max_case_strong end; intros. Ltac omega_with_min_max := @@ -59,9 +58,9 @@ Section NatFacts. Lemma beq_nat_eq_nat_dec : forall x y, - beq_nat x y = if eq_nat_dec x y then true else false. + Nat.eqb x y = if eq_nat_dec x y then true else false. Proof. - intros; destruct (eq_nat_dec _ _); [ apply beq_nat_true_iff | apply beq_nat_false_iff ]; assumption. + intros; destruct (eq_nat_dec _ _); [ apply Nat.eqb_eq | apply Nat.eqb_neq ]; assumption. Qed. Lemma min_minus_l x y @@ -110,7 +109,7 @@ Section dec_prod. Proof. destruct max as [|max]; [ clear dec_stabalize' | specialize (dec_stabalize' max) ]. - { destruct (Hdec 0 (le_refl _)) as [Hd|Hd]; [ left | right ]. + { destruct (Hdec 0 (Nat.le_refl _)) as [Hd|Hd]; [ left | right ]. { intro n. induction n as [|n IHn]. { assumption. } @@ -140,7 +139,7 @@ Section dec_prod. Proof. destruct max as [|max]; [ clear dec_stabalize | specialize (dec_stabalize max) ]. - { destruct (Hdec 0 (le_refl _)) as [Hd|Hd]; [ left | right ]. + { destruct (Hdec 0 (Nat.le_refl _)) as [Hd|Hd]; [ left | right ]. { exists 0; split; [ reflexivity | assumption ]. } { intros n Pn. apply Hd. clear -Pn Hstable. @@ -196,8 +195,8 @@ Lemma min_case_strong_r n m (P : nat -> Type) : (n <= m -> P n) -> (m < n -> P m) -> P (min n m). Proof. destruct (Compare_dec.le_lt_dec n m); - first [ rewrite Min.min_r by omega - | rewrite Min.min_l by omega ]; + first [ rewrite Nat.min_r by omega + | rewrite Nat.min_l by omega ]; auto. Qed. @@ -205,19 +204,19 @@ Lemma min_case_strong_l n m (P : nat -> Type) : (n < m -> P n) -> (m <= n -> P m) -> P (min n m). Proof. destruct (Compare_dec.le_lt_dec m n); - first [ rewrite Min.min_r by omega - | rewrite Min.min_l by omega ]; + first [ rewrite Nat.min_r by omega + | rewrite Nat.min_l by omega ]; auto. Qed. Lemma beq_0_1_leb x -: (EqNat.beq_nat x 1 || EqNat.beq_nat x 0)%bool = Compare_dec.leb x 1. +: (Nat.eqb x 1 || Nat.eqb x 0)%bool = Compare_dec.leb x 1. Proof. destruct x as [|[|]]; simpl; reflexivity. Qed. Lemma beq_S_leb x n -: (EqNat.beq_nat x (S n) || Compare_dec.leb x n)%bool = Compare_dec.leb x (S n). +: (Nat.eqb x (S n) || Compare_dec.leb x n)%bool = Compare_dec.leb x (S n). Proof. revert x; induction n as [|n IHn]; simpl. { intros [|[|]]; reflexivity. } @@ -246,7 +245,7 @@ Lemma min_subr_same {x y} : (min x y - x)%natr = 0. Proof. clear; rewrite minusr_minus; omega *. Qed. Lemma beq_nat_min_0 {x y} - : EqNat.beq_nat (min x y) 0 = orb (EqNat.beq_nat x 0) (EqNat.beq_nat y 0). + : Nat.eqb (min x y) 0 = orb (Nat.eqb x 0) (Nat.eqb y 0). Proof. destruct x, y; simpl; reflexivity. Qed. Lemma max_min_n {x y} : max (min x y) y = y. diff --git a/src/Common/StringFacts.v b/src/Common/StringFacts.v index e14e4f912..10b7aca73 100644 --- a/src/Common/StringFacts.v +++ b/src/Common/StringFacts.v @@ -1,8 +1,8 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Coq.ZArith.ZArith. +Require Import Coq.Arith.PeanoNat. Require Import Coq.Strings.Ascii. Require Import Coq.Strings.String. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Fiat.Common.List.Operations. Require Import Fiat.Common.StringOperations. @@ -23,10 +23,10 @@ Proof. induction s; simpl. { intros; destruct m; trivial. } { intros; destruct m. - { apply Lt.lt_n_0 in H. +{ apply Nat.nlt_0_r in H. destruct H. } { rewrite IHs; trivial. - apply Le.le_S_n; trivial. } } + apply Nat.succ_le_mono; trivial. } } Qed. Lemma substring_correct3' {s : string} @@ -47,7 +47,7 @@ Proof. revert n m H; induction s; simpl; intros n m H. { destruct n, m; trivial. } { destruct n, m; trivial. - { apply le_Sn_0 in H; destruct H. } + { apply Nat.nlt_0_r in H; destruct H. } { rewrite IHs; eauto with arith. } { rewrite IHs; eauto with arith. } } Qed. @@ -60,14 +60,15 @@ Proof. induction s; simpl in *. { intros; destruct m, m', n; trivial. } { intros; destruct m, m', n; trivial; simpl in *; - try (apply Lt.lt_n_0 in H; destruct H); - try (apply Lt.lt_n_0 in H'; destruct H'); - try apply Le.le_S_n in H; - try apply Le.le_S_n in H'; - try solve [ try rewrite plus_comm in H; - try rewrite plus_comm in H'; + try (apply Nat.nlt_0_r in H; destruct H); + try (apply Nat.nlt_0_r in H'; destruct H'); + try apply le_S_n in H; + try apply le_S_n in H'; + try solve [ try rewrite Nat.add_comm in H; + try rewrite Nat.add_comm in H'; simpl in *; rewrite !substring_correct0' by auto with arith; trivial ]. + { apply f_equal. apply IHs; trivial. } { apply IHs; trivial. } } @@ -90,7 +91,7 @@ Proof. induction s; simpl; intros. { destruct (y + z), x, y, z; reflexivity. } { destruct x, y, z; try reflexivity; simpl; - rewrite ?plus_0_r, ?substring_correct0, ?string_concat_empty_r; + rewrite ?Nat.add_0_r, ?substring_correct0, ?string_concat_empty_r; try reflexivity. { apply f_equal. apply IHs. } @@ -151,14 +152,14 @@ Proof. induction s; intros. { destruct n, m, n', m'; reflexivity. } { destruct n', m'; - rewrite <- ?plus_n_O, <- ?minus_n_O, ?Min.min_0_r, ?Min.min_0_l; + rewrite <- ?Nat.add_0_r, <- ?Nat.sub_0_r, ?Nat.min_0_r, ?Nat.min_0_l; destruct n, m; trivial; simpl; - rewrite ?IHs, ?substring_correct0, <- ?plus_n_O, <- ?minus_n_O; + rewrite ?IHs, ?substring_correct0, <- ?plus_n_O, ?Nat.sub_0_r; simpl; try reflexivity. - rewrite (plus_comm _ (S _)); simpl. - rewrite (plus_comm n n'). + rewrite (Nat.add_comm _ (S _)); simpl. + rewrite (Nat.add_comm n n'). reflexivity. } Qed. @@ -178,7 +179,7 @@ Lemma substring_min {x x' y y' z str} (H : substring x y str = substring x' y' s Proof. pose proof (fun y x => @substring_substring str 0 z x y) as H'; simpl in *. setoid_rewrite Nat.sub_0_r in H'. - setoid_rewrite Min.min_comm in H'. + setoid_rewrite Nat.min_comm in H'. rewrite <- !H', H; reflexivity. Qed. @@ -232,7 +233,7 @@ Qed. Lemma substring_min_length str x y : substring x (min y (length str)) str = substring x y str. Proof. - apply Min.min_case_strong; try reflexivity. + apply Nat.min_case_strong; try reflexivity. intro H. apply substring_correct4; omega. Qed. diff --git a/src/Common/Wf.v b/src/Common/Wf.v index d097c4682..9be4543dc 100644 --- a/src/Common/Wf.v +++ b/src/Common/Wf.v @@ -1,6 +1,7 @@ (** * Miscellaneous Well-Foundedness Facts *) Require Import Coq.Setoids.Setoid Coq.Program.Program Coq.Program.Wf Coq.Arith.Wf_nat Coq.Classes.Morphisms Coq.Init.Wf. Require Import Coq.Lists.SetoidList. +Require Import Coq.Arith.PeanoNat. Require Export Fiat.Common.Coq__8_4__8_5__Compat. Set Implicit Arguments. @@ -447,8 +448,8 @@ Section wf. | S n' => prod_relationA eqA R (@iterated_prod_relationA n') end. - Fixpoint nat_eq_transfer (P : nat -> Type) (n m : nat) : P n -> (P m) + (EqNat.beq_nat n m = false) - := match n, m return P n -> (P m) + (EqNat.beq_nat n m = false) with + Fixpoint nat_eq_transfer (P : nat -> Type) (n m : nat) : P n -> (P m) + (Nat.eqb n m = false) + := match n, m return P n -> (P m) + (Nat.eqb n m = false) with | 0, 0 => fun x => inl x | S n', S m' => @nat_eq_transfer (fun v => P (S v)) n' m' | _, _ => fun _ => inr eq_refl @@ -461,11 +462,11 @@ Section wf. end. Fixpoint nat_eq_transfer_neq (P : nat -> Type) (n m : nat) - : forall v : P n, (if EqNat.beq_nat n m as b return ((P m) + (b = false)) -> Prop + : forall v : P n, (if Nat.eqb n m as b return ((P m) + (b = false)) -> Prop then fun _ => True else fun v => v = inr eq_refl) (nat_eq_transfer P n m v) - := match n, m return forall v : P n, (if EqNat.beq_nat n m as b return ((P m) + (b = false)) -> Prop + := match n, m return forall v : P n, (if Nat.eqb n m as b return ((P m) + (b = false)) -> Prop then fun _ => True else fun v => v = inr eq_refl) (nat_eq_transfer P n m v) @@ -521,8 +522,8 @@ Section wf. cbv beta in *; generalize dependent (nat_eq_transfer iterated_prod n1' n0'); let Heq := fresh in - destruct (EqNat.beq_nat n1' n0') eqn:Heq; - [ apply EqNat.beq_nat_true_iff in Heq; subst; rewrite <- EqNat.beq_nat_refl in H; + destruct (Nat.eqb n1' n0') eqn:Heq; + [ apply Nat.eqb_eq in Heq; subst; rewrite Nat.eqb_refl in H; exfalso; clear -H; congruence | ] | [ |- context[@nat_eq_transfer iterated_prod n0' n1' _] ] diff --git a/src/Computation/Decidable.v b/src/Computation/Decidable.v index b3f7929f7..027d6d4c6 100644 --- a/src/Computation/Decidable.v +++ b/src/Computation/Decidable.v @@ -150,9 +150,9 @@ Obligation 1. t; destruct (Ascii.ascii_dec n m); auto; discriminate. Qed. Require Import Coq.Arith.Arith. Global Program Instance nat_eq_Decidable {n m : nat} : Decidable (n = m) := { - Decidable_witness := beq_nat n m + Decidable_witness := Nat.eqb n m }. -Obligation 1. t' beq_nat_true_iff. Qed. +Obligation 1. t' Nat.eqb_eq. Qed. Global Program Instance le_Decidable {n m} : Decidable (Nat.le n m) := { Decidable_witness := Compare_dec.leb n m diff --git a/src/Computation/ListComputations.v b/src/Computation/ListComputations.v index e1a1ff0e8..8f160c4b3 100644 --- a/src/Computation/ListComputations.v +++ b/src/Computation/ListComputations.v @@ -107,14 +107,14 @@ Section UpperBound. Definition find_UpperBound (f : A -> nat) (ns : list A) : list A := let max := fold_right (fun n acc => max (f n) acc) O ns in - filter (fun n => NPeano.Nat.leb max (f n)) ns. + filter (fun n => Nat.leb max (f n)) ns. Lemma find_UpperBound_highest_length : forall (f : A -> nat) ns n, List.In n (find_UpperBound f ns) -> forall n', List.In n' ns -> (f n) >= (f n'). Proof. unfold ge, find_UpperBound; intros. - apply filter_In in H; destruct H. apply NPeano.Nat.leb_le in H1. + apply filter_In in H; destruct H. apply Nat.leb_le in H1. rewrite <- H1; clear H1 H n. apply fold_right_max_is_max; auto. Qed. @@ -127,9 +127,9 @@ Instance DecideableEnsembleUpperBound {A} (ns : list A) : DecideableEnsemble (UpperBound (fun a a' => f a <= f a') ns). Proof. -refine {| dec n := NPeano.Nat.leb (fold_right (fun n acc => max (f n) acc) O ns) (f n) |}. - unfold UpperBound, ge; intros; rewrite NPeano.Nat.leb_le; intuition. - - remember (f a); clear Heqn; subst; eapply le_trans; +refine {| dec n := Nat.leb (fold_right (fun n acc => max (f n) acc) O ns) (f n) |}. + unfold UpperBound, ge; intros; rewrite Nat.leb_le; intuition. + - remember (f a); clear Heqn; subst; eapply Nat.le_trans; [ apply fold_right_max_is_max; apply H0 | assumption ]. - eapply fold_right_higher_is_higher; eauto. Defined. diff --git a/src/Narcissus/Automation/Decision.v b/src/Narcissus/Automation/Decision.v index f0c9dfb07..583b75148 100644 --- a/src/Narcissus/Automation/Decision.v +++ b/src/Narcissus/Automation/Decision.v @@ -50,12 +50,12 @@ Qed. Lemma decides_nat_eq : forall (n n' : nat), - decides (EqNat.beq_nat n n') (n = n'). + decides (Nat.eqb n n') (n = n'). Proof. unfold decides, If_Then_Else; intros. - destruct (EqNat.beq_nat n n') eqn: ? ; - try eapply EqNat.beq_nat_true_iff; - try eapply EqNat.beq_nat_false_iff; eauto. + destruct (Nat.eqb n n') eqn: ? ; + try eapply Nat.eqb_true_iff; + try eapply Nat.eqb_false_iff; eauto. Qed. Lemma decides_word_eq {sz}: diff --git a/src/Parsers/AbstractInterpretation/NonTerminalMap.v b/src/Parsers/AbstractInterpretation/NonTerminalMap.v index 6696da88f..5887df386 100644 --- a/src/Parsers/AbstractInterpretation/NonTerminalMap.v +++ b/src/Parsers/AbstractInterpretation/NonTerminalMap.v @@ -16,7 +16,7 @@ Definition positive_to_nonterminal (nt : positive) : default_nonterminal_carrier Lemma positive_to_nonterminal_to_positive nt : nonterminal_to_positive (positive_to_nonterminal nt) = nt. Proof. unfold nonterminal_to_positive, positive_to_nonterminal. - erewrite <- S_pred by apply Pos2Nat.is_pos. + erewrite Nat.lt_succ_pred by apply Pos2Nat.is_pos. rewrite Pos2Nat.id. reflexivity. Qed. diff --git a/src/Parsers/BaseTypesLemmas.v b/src/Parsers/BaseTypesLemmas.v index da64a787c..340a95630 100644 --- a/src/Parsers/BaseTypesLemmas.v +++ b/src/Parsers/BaseTypesLemmas.v @@ -112,13 +112,13 @@ Section recursive_descent_parser. Lemma nonempty_nonterminals {ls nt} (H : is_valid_nonterminal ls nt) : 0 < nonterminals_length ls. Proof. - eapply Lt.le_lt_trans; - [ apply Le.le_0_n + eapply Nat.le_lt_trans; + [ apply Nat.le_0_l | exact (remove_nonterminal_dec ls nt H) ]. Qed. Lemma nonempty_nonterminals' {ls nt} (H : is_valid_nonterminal ls nt) - : negb (EqNat.beq_nat (nonterminals_length ls) 0). + : negb (Nat.eqb (nonterminals_length ls) 0). Proof. pose proof (nonempty_nonterminals H). destruct (nonterminals_length ls); simpl; try reflexivity; try omega. diff --git a/src/Parsers/BooleanRecognizerOptimized.v b/src/Parsers/BooleanRecognizerOptimized.v index eb562e417..c22d1c655 100644 --- a/src/Parsers/BooleanRecognizerOptimized.v +++ b/src/Parsers/BooleanRecognizerOptimized.v @@ -1,7 +1,7 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Definition of a boolean-returning CFG parser-recognizer *) Require Import Coq.Lists.List Coq.Strings.String. -Require Import Coq.Numbers.Natural.Peano.NPeano Coq.Arith.Compare_dec Coq.Arith.Wf_nat. +Require Import Coq.Arith.Compare_dec Coq.Arith.Wf_nat Coq.Arith.PeanoNat. Require Import Fiat.Common.List.Operations. Require Import Fiat.Common.List.ListMorphisms. Require Import Fiat.Parsers.ContextFreeGrammar.Core. @@ -33,7 +33,6 @@ Require Export Fiat.Common.NatFacts. Require Export Fiat.Common.Sigma. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. -Import NPeano. Set Implicit Arguments. Local Open Scope string_like_scope. @@ -58,7 +57,7 @@ Module Export opt. Definition hd {A} := Eval compute in @hd A. Definition tl {A} := Eval compute in @tl A. Definition nth {A} := Eval compute in @nth A. - Definition nth' {A} := Eval cbv beta iota zeta delta -[EqNat.beq_nat] in @nth' A. + Definition nth' {A} := Eval cbv beta iota zeta delta -[Nat.eqb] in @nth' A. Definition fst {A B} := Eval compute in @fst A B. Definition snd {A B} := Eval compute in @snd A B. Definition list_caset {A} := Eval compute in @list_caset A. @@ -68,7 +67,7 @@ Module Export opt. Definition pred := Eval compute in pred. Definition minusr := Eval compute in minusr. Definition id {A} := Eval compute in @id A. - Definition beq_nat := Eval compute in EqNat.beq_nat. + Definition beq_nat := Eval compute in Nat.eqb. Definition leb := Eval compute in Compare_dec.leb. Definition interp_RCharExpr {Char idata} := Eval cbv -[andb orb negb Compare_dec.leb] in @interp_RCharExpr Char idata. End opt. @@ -94,7 +93,7 @@ Module Export opt2. Definition hd {A} := Eval compute in @hd A. Definition tl {A} := Eval compute in @tl A. Definition nth {A} := Eval compute in @nth A. - Definition nth' {A} := Eval cbv beta iota zeta delta -[EqNat.beq_nat] in @nth' A. + Definition nth' {A} := Eval cbv beta iota zeta delta -[Nat.eqb] in @nth' A. Definition fst {A B} := Eval compute in @fst A B. Definition snd {A B} := Eval compute in @snd A B. Definition list_caset {A} := Eval compute in @list_caset A. @@ -104,7 +103,7 @@ Module Export opt2. Definition pred := Eval compute in pred. Definition minusr := Eval compute in minusr. Definition id {A} := Eval compute in @id A. - Definition beq_nat := Eval compute in EqNat.beq_nat. + Definition beq_nat := Eval compute in Nat.eqb. Definition leb := Eval compute in Compare_dec.leb. Definition interp_RCharExpr {Char idata} := Eval cbv -[andb orb negb Compare_dec.leb] in @interp_RCharExpr Char idata. End opt2. @@ -130,7 +129,7 @@ Module Export opt3. Definition hd {A} := Eval compute in @hd A. Definition tl {A} := Eval compute in @tl A. Definition nth {A} := Eval compute in @nth A. - Definition nth' {A} := Eval cbv beta iota zeta delta -[EqNat.beq_nat] in @nth' A. + Definition nth' {A} := Eval cbv beta iota zeta delta -[Nat.eqb] in @nth' A. Definition fst {A B} := Eval compute in @fst A B. Definition snd {A B} := Eval compute in @snd A B. Definition list_caset {A} := Eval compute in @list_caset A. @@ -140,7 +139,7 @@ Module Export opt3. Definition pred := Eval compute in pred. Definition minusr := Eval compute in minusr. Definition id {A} := Eval compute in @id A. - Definition beq_nat := Eval compute in EqNat.beq_nat. + Definition beq_nat := Eval compute in Nat.eqb. Definition leb := Eval compute in Compare_dec.leb. Definition interp_RCharExpr {Char idata} := Eval cbv -[andb orb negb Compare_dec.leb] in @interp_RCharExpr Char idata. End opt3. @@ -271,9 +270,9 @@ Section recursive_descent_parser. | [ H : (_ && _)%bool = true |- _ ] => apply Bool.andb_true_iff in H | [ H : _ = in_left |- _ ] => clear H | [ H : _ /\ _ |- _ ] => destruct H - | [ H : context[negb (EqNat.beq_nat ?x ?y)] |- _ ] => destruct (EqNat.beq_nat x y) eqn:? - | [ H : EqNat.beq_nat _ _ = false |- _ ] => apply EqNat.beq_nat_false in H - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H + | [ H : context[negb (Nat.eqb ?x ?y)] |- _ ] => destruct (Nat.eqb x y) eqn:? + | [ H : Nat.eqb _ _ = false |- _ ] => apply (fun n m => proj1 (Nat.eqb_neq n m)) in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H | [ H : snd ?x = _ |- _ ] => is_var x; destruct x | _ => progress simpl negb in * | [ H : false = true |- _ ] => inversion H @@ -425,7 +424,7 @@ Section recursive_descent_parser. | [ |- _ = 0 ] => reflexivity | [ |- _ = 1 ] => reflexivity | [ |- _ = None ] => reflexivity - | [ |- _ = EqNat.beq_nat _ _ ] => apply f_equal2 + | [ |- _ = Nat.eqb _ _ ] => apply f_equal2 | [ |- _ = Compare_dec.leb _ _ ] => apply f_equal2 | [ |- _ = S _ ] => apply f_equal | [ |- _ = string_beq _ _ ] => apply f_equal2 @@ -447,9 +446,9 @@ Section recursive_descent_parser. Local Ltac misc_opt' := idtac; match goal with - | _ => progress rewrite ?max_min_n, ?Minus.minus_diag, ?Nat.sub_0_r, ?uneta_bool, ?beq_nat_min_0(*, ?bool_rect_flatten*) - | _ => rewrite Min.min_l by assumption - | _ => rewrite Min.min_r by assumption + | _ => progress rewrite ?max_min_n, ?Nat.sub_diag, ?Nat.sub_0_r, ?uneta_bool, ?beq_nat_min_0(*, ?bool_rect_flatten*) + | _ => rewrite Nat.min_l by assumption + | _ => rewrite Nat.min_r by assumption | [ |- context[if ?ltb ?x ?y then _ else _] ] => rewrite if_to_min | [ |- context[min ?x ?y - ?x] ] => rewrite min_sub_same | [ |- context[(min ?x ?y - ?x)%natr] ] => rewrite min_subr_same @@ -1159,7 +1158,7 @@ Section recursive_descent_parser. (fun _ => _) (N0 a' b' c') (list_rect P0 (fun _ _ _ => true) C0 ls' a' b' c') - (EqNat.beq_nat (List.length ls') 0)) + (Nat.eqb (List.length ls') 0)) = list_rect P0 N0 C0 ls' a' b' c') _ _ @@ -1192,12 +1191,12 @@ Section recursive_descent_parser. => is_var e; destruct e | _ => progress simpl | [ |- ?x = ?x ] => reflexivity - | _ => progress rewrite ?Bool.andb_true_r, ?Min.min_idempotent, ?Minus.minus_diag - | [ H : EqNat.beq_nat _ _ = true |- _ ] - => apply EqNat.beq_nat_true in H + | _ => progress rewrite ?Bool.andb_true_r, ?Nat.min_idempotent, ?Nat.sub_diag + | [ H : Nat.eqb _ _ = true |- _ ] + => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H | _ => progress subst - | [ |- context[EqNat.beq_nat ?x ?y] ] - => is_var x; destruct (EqNat.beq_nat x y) eqn:? + | [ |- context[Nat.eqb ?x ?y] ] + => is_var x; destruct (Nat.eqb x y) eqn:? | [ H := _ |- _ ] => subst H | [ |- context[orb _ false] ] => rewrite Bool.orb_false_r | _ => rewrite minusr_minus @@ -1246,13 +1245,13 @@ Section recursive_descent_parser. { misc_opt. rewrite <- andbr_andb. apply (f_equal2 andbr); [ | reflexivity ]. - rewrite Min.min_idempotent. + rewrite Nat.min_idempotent. reflexivity. } { apply (f_equal2 andb); [ | reflexivity ]. step_opt'; []. apply (f_equal2 andb); [ | reflexivity ]. match goal with - | [ |- _ = EqNat.beq_nat (min ?v ?x) ?v ] + | [ |- _ = Nat.eqb (min ?v ?x) ?v ] => refine (_ : Compare_dec.leb v x = _) end. match goal with @@ -1418,13 +1417,13 @@ Section recursive_descent_parser. rewrite bool_rect_andb; simpl. rewrite Bool.andb_true_r. match goal with - | [ |- _ = (orb (negb (EqNat.beq_nat ?x 0)) (andb (EqNat.beq_nat ?x 0) ?y)) ] + | [ |- _ = (orb (negb (Nat.eqb ?x 0)) (andb (Nat.eqb ?x 0) ?y)) ] => let z := fresh in let y' := fresh in set (z := x); set (y' := y); refine (_ : orb (Compare_dec.leb 1 x) y = _); - change (orb (Compare_dec.leb 1 z) y' = orb (negb (EqNat.beq_nat z 0)) (andb (EqNat.beq_nat z 0) y')); + change (orb (Compare_dec.leb 1 z) y' = orb (negb (Nat.eqb z 0)) (andb (Nat.eqb z 0) y')); destruct z, y'; reflexivity end. } etransitivity_rev _. @@ -1627,13 +1626,13 @@ Section recursive_descent_parser. { rewrite bool_rect_andb. rewrite Bool.andb_true_r. match goal with - | [ |- _ = (orb (negb (EqNat.beq_nat ?x 0)) (andb (EqNat.beq_nat ?x 0) ?y)) ] + | [ |- _ = (orb (negb (Nat.eqb ?x 0)) (andb (Nat.eqb ?x 0) ?y)) ] => let z := fresh in let y' := fresh in set (z := x); set (y' := y); refine (_ : orb (Compare_dec.leb 1 x) y = _); - change (orb (Compare_dec.leb 1 z) y' = orb (negb (EqNat.beq_nat z 0)) (andb (EqNat.beq_nat z 0) y')); + change (orb (Compare_dec.leb 1 z) y' = orb (negb (Nat.eqb z 0)) (andb (Nat.eqb z 0) y')); destruct z, y'; reflexivity end. } apply (f_equal2 orb); fin_step_opt; []. @@ -1749,7 +1748,7 @@ Section recursive_descent_parser. | [ |- context G[snd (of_string ?str')] ] => let G' := context G[opt.id (opt.snd (of_string str'))] in change G' - | [ |- context G[EqNat.beq_nat (opt2.id ?x) 0] ] + | [ |- context G[Nat.eqb (opt2.id ?x) 0] ] => let G' := context G[opt2.id (opt2.beq_nat x 0)] in change G' | [ |- context G[(opt2.id ?x, 0)] ] @@ -1758,7 +1757,7 @@ Section recursive_descent_parser. | [ |- context G[(opt2.id ?x, opt2.id ?y)] ] => let G' := context G[opt2.id (x, y)] in change G' - | [ |- context G[EqNat.beq_nat (opt.id ?x) 0] ] + | [ |- context G[Nat.eqb (opt.id ?x) 0] ] => let G' := context G[opt.id (opt.beq_nat x 0)] in change G' | [ |- context G[S (opt2.id ?x)] ] diff --git a/src/Parsers/BooleanRecognizerTests.v b/src/Parsers/BooleanRecognizerTests.v index 469abe443..db21575e7 100644 --- a/src/Parsers/BooleanRecognizerTests.v +++ b/src/Parsers/BooleanRecognizerTests.v @@ -1,6 +1,6 @@ (** * Some simple examples with the boolean-returning CFG parser-recognizer *) Require Import Coq.Lists.List Coq.Strings.String. -Require Import Coq.Numbers.Natural.Peano.NPeano Coq.Arith.Compare_dec Coq.Arith.Wf_nat. +Require Import Coq.Arith.PeanoNat Coq.Arith.Compare_dec Coq.Arith.Wf_nat. Require Import Fiat.Parsers.Grammars.Trivial Fiat.Parsers.Grammars.ABStar. Require Import Fiat.Parsers.Splitters.RDPList Fiat.Parsers.Splitters.BruteForce. Require Import Fiat.Parsers.ContextFreeGrammar.Core. diff --git a/src/Parsers/ContextFreeGrammar/Carriers.v b/src/Parsers/ContextFreeGrammar/Carriers.v index c06ae52dc..f73bbb4f5 100644 --- a/src/Parsers/ContextFreeGrammar/Carriers.v +++ b/src/Parsers/ContextFreeGrammar/Carriers.v @@ -106,7 +106,7 @@ Section grammar. { simpl. rewrite (string_lb eq_refl); trivial. } { simpl; intros H'. - specialize (IHxs idx (Le.le_S_n _ _ H')). + specialize (IHxs idx (proj2 (Nat.succ_le_mono _ _) H')). rewrite first_index_helper_first_index_error, IHxs by omega. apply first_index_error_Some_correct in IHxs. repeat match goal with diff --git a/src/Parsers/ContextFreeGrammar/Fix/Definitions.v b/src/Parsers/ContextFreeGrammar/Fix/Definitions.v index dd8f542c1..ee64a2684 100644 --- a/src/Parsers/ContextFreeGrammar/Fix/Definitions.v +++ b/src/Parsers/ContextFreeGrammar/Fix/Definitions.v @@ -233,7 +233,7 @@ Definition positive_to_nonterminal (nt : positive) : default_nonterminal_carrier Lemma positive_to_nonterminal_to_positive nt : nonterminal_to_positive (positive_to_nonterminal nt) = nt. Proof. unfold nonterminal_to_positive, positive_to_nonterminal. - erewrite <- S_pred by apply Pos2Nat.is_pos. + erewrite Nat.lt_succ_pred by apply Pos2Nat.is_pos. rewrite Pos2Nat.id. reflexivity. Qed. diff --git a/src/Parsers/ContextFreeGrammar/Fix/Fix.v b/src/Parsers/ContextFreeGrammar/Fix/Fix.v index f37c3ac93..08296970e 100644 --- a/src/Parsers/ContextFreeGrammar/Fix/Fix.v +++ b/src/Parsers/ContextFreeGrammar/Fix/Fix.v @@ -80,8 +80,8 @@ Section grammar_fixedpoint. Qed. Create HintDb aggregate_step_db discriminated. - Hint Rewrite PositiveMap.fold_1 PositiveMap.gmapi nonterminal_to_positive_to_nonterminal positive_to_nonterminal_to_positive PositiveMap.gempty PositiveMapAdditionalFacts.gsspec (@state_beq_refl _ gdata) orb_true_iff orb_true_r orb_false_iff (@state_le_bottom_eq_bottom _ gdata) (@no_state_lt_bottom _ gdata) (@state_le_bottom_eq_bottom _ gdata) (@state_ge_top_eq_top _ gdata) (@bottom_lub_r _ gdata) (@bottom_lub_l _ gdata) (@top_lub_r _ gdata) (@top_lub_l _ gdata) (fun a b => @least_upper_bound_correct_l _ gdata a b : _ = true) (fun a b => @least_upper_bound_correct_r _ gdata a b : _ = true) (fun s => @bottom_bottom _ gdata s : _ = true) (fun s => @top_top _ gdata s : _ = true) beq_nat_true_iff @PositiveMapExtensions.lift_brelation_iff : aggregate_step_db. - Hint Rewrite <- beq_nat_refl : aggregate_step_db. + Hint Rewrite PositiveMap.fold_1 PositiveMap.gmapi nonterminal_to_positive_to_nonterminal positive_to_nonterminal_to_positive PositiveMap.gempty PositiveMapAdditionalFacts.gsspec (@state_beq_refl _ gdata) orb_true_iff orb_true_r orb_false_iff (@state_le_bottom_eq_bottom _ gdata) (@no_state_lt_bottom _ gdata) (@state_le_bottom_eq_bottom _ gdata) (@state_ge_top_eq_top _ gdata) (@bottom_lub_r _ gdata) (@bottom_lub_l _ gdata) (@top_lub_r _ gdata) (@top_lub_l _ gdata) (fun a b => @least_upper_bound_correct_l _ gdata a b : _ = true) (fun a b => @least_upper_bound_correct_r _ gdata a b : _ = true) (fun s => @bottom_bottom _ gdata s : _ = true) (fun s => @top_top _ gdata s : _ = true) Nat.eqb_eq @PositiveMapExtensions.lift_brelation_iff : aggregate_step_db. + Hint Rewrite Nat.eqb_refl : aggregate_step_db. Hint Rewrite PositiveMapExtensions.map2_1bis_for_rewrite using reflexivity : aggregate_step_db. Hint Rewrite PositiveMapExtensions.fold_andb_true : aggregate_step_db. @@ -486,7 +486,7 @@ Section grammar_fixedpoint. { apply orb_true_iff; intuition. } { do 2 match goal with | [ H : is_true (orb _ _) |- _ ] => apply orb_true_iff in H - | [ H : _ |- _ ] => setoid_rewrite beq_nat_true_iff in H + | [ H : _ |- _ ] => setoid_rewrite Nat.eqb_eq in H end. repeat intuition (congruence || subst || (autorewrite with aggregate_step_db in * ) || eauto). } } } Qed. diff --git a/src/Parsers/ContextFreeGrammar/Fold.v b/src/Parsers/ContextFreeGrammar/Fold.v index ae285bb31..7c7c6a65c 100644 --- a/src/Parsers/ContextFreeGrammar/Fold.v +++ b/src/Parsers/ContextFreeGrammar/Fold.v @@ -283,7 +283,7 @@ Section fold_correctness. { apply sub_nonterminals_listT_remove_2; assumption. } { apply IH. { apply sub_nonterminals_listT_remove_2; assumption. } - { apply Le.le_S_n. + { apply (fun n m => proj2 (Nat.succ_le_mono n m)). etransitivity; [ | exact Hlen ]. apply (remove_nonterminal_dec valid0 (of_nonterminal nt) Hvalid). } } } { apply Pnt_redundant; subst; assumption. } } diff --git a/src/Parsers/ContextFreeGrammar/Properties.v b/src/Parsers/ContextFreeGrammar/Properties.v index d82f34ab6..e28fe2688 100644 --- a/src/Parsers/ContextFreeGrammar/Properties.v +++ b/src/Parsers/ContextFreeGrammar/Properties.v @@ -1,5 +1,5 @@ (** * Properties about Context Free Grammars *) -Require Import Coq.Numbers.Natural.Peano.NPeano Coq.Lists.List. +Require Import Coq.Lists.List Coq.Arith.PeanoNat. Require Import Fiat.Common Fiat.Common.UIP. Require Import Fiat.Parsers.ContextFreeGrammar.Core. Require Import Fiat.Parsers.ContextFreeGrammar.Equality. diff --git a/src/Parsers/ContextFreeGrammar/ValidReflective.v b/src/Parsers/ContextFreeGrammar/ValidReflective.v index 95d7f36a6..0c2dec518 100644 --- a/src/Parsers/ContextFreeGrammar/ValidReflective.v +++ b/src/Parsers/ContextFreeGrammar/ValidReflective.v @@ -1,5 +1,6 @@ (** * Definition of Context Free Grammars *) Require Import Coq.Strings.String Coq.Lists.List. +Require Import Coq.Arith.PeanoNat. Require Export Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.BaseTypes. Require Import Fiat.Parsers.Splitters.RDPList. @@ -59,7 +60,7 @@ Section cfg. | [ H : List.Forall _ (_::_) |- _ ] => inversion H; clear H | _ => progress subst | _ => congruence - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (proj1 (Nat.eqb_eq _ _)) in H | [ H : Equality.string_beq _ _ = true |- _ ] => apply Equality.string_bl in H | [ H : or _ _ |- _ ] => destruct H | [ H : forall x, @?A x \/ @?B x -> _ |- _ ] diff --git a/src/Parsers/ExtrOcamlParsers.v b/src/Parsers/ExtrOcamlParsers.v index 579b325c0..d74f9ee2b 100644 --- a/src/Parsers/ExtrOcamlParsers.v +++ b/src/Parsers/ExtrOcamlParsers.v @@ -93,7 +93,7 @@ Definition sample_variance (ls : list Ocaml.float) : Ocaml.float Definition median (ls : list Ocaml.float) : Ocaml.float := let ls' := Ocaml.List.sort Pervasives.compare ls in let len := List.length ls in - ((List.nth (Div2.div2 (len - 1)) ls' 0 + List.nth (Div2.div2 len) ls' 0) + ((List.nth (Nat.div2 (len - 1)) ls' 0 + List.nth (Nat.div2 len) ls' 0) / 2)%ocaml_float. Parameter display_info : forall (sum median mean sample_variance : Ocaml.float) diff --git a/src/Parsers/GenericCorrectnessBaseTypes.v b/src/Parsers/GenericCorrectnessBaseTypes.v index d209c8e18..59a7eb459 100644 --- a/src/Parsers/GenericCorrectnessBaseTypes.v +++ b/src/Parsers/GenericCorrectnessBaseTypes.v @@ -63,7 +63,7 @@ Section correctness. ret_Terminal_true_correct : forall str offset len ch, len = 0 \/ offset + len <= length str - -> (beq_nat len 1 && char_at_matches offset str ch)%bool = true + -> (Nat.eqb len 1 && char_at_matches offset str ch)%bool = true -> parse_item_is_correct (substring offset len str) (Terminal ch) true (ret_Terminal_true (unsafe_get offset str)); diff --git a/src/Parsers/GenericRecognizer.v b/src/Parsers/GenericRecognizer.v index e9fd7a2ec..c346bd20e 100644 --- a/src/Parsers/GenericRecognizer.v +++ b/src/Parsers/GenericRecognizer.v @@ -6,7 +6,6 @@ Require Import Fiat.Parsers.ContextFreeGrammar.Core. Require Import Fiat.Parsers.BaseTypes. Require Import Fiat.Parsers.GenericBaseTypes. Require Import Fiat.Common.Wf. -Require Import Coq.Numbers.Natural.Peano.NPeano. Set Implicit Arguments. Local Open Scope string_like_scope. @@ -25,7 +24,7 @@ Section recursive_descent_parser. (it : item Char) : parse_item_T := match it with - | Terminal P => if EqNat.beq_nat len 1 && char_at_matches offset str P + | Terminal P => if Nat.eqb len 1 && char_at_matches offset str P then ret_Terminal_true (unsafe_get offset str) else ret_Terminal_false (unsafe_get offset str) | NonTerminal nt => if is_valid_nonterminal initial_nonterminals_data (of_nonterminal nt) @@ -59,7 +58,7 @@ Section recursive_descent_parser. parse_production_T) ((** 0-length production, only accept empty *) fun _ offset len0_minus_len - => if beq_nat (len0 - len0_minus_len) 0 + => if Nat.eqb (len0 - len0_minus_len) 0 then ret_production_nil_true else ret_production_nil_false) (fun it its parse_production' idx offset len0_minus_len @@ -117,12 +116,12 @@ Section recursive_descent_parser. (offset : nat) (len : nat), len <= fst p -> nonterminal_carrierT -> parse_nt_T). - Lemma pred_lt_beq {x} : negb (beq_nat x 0) = true -> pred x < x. + Lemma pred_lt_beq {x} : negb (Nat.eqb x 0) = true -> pred x < x. Proof. destruct x; simpl; try congruence; try omega. Qed. Lemma pred_lt_beq_helper {valid nt x} - : (negb (beq_nat x 0) && is_valid_nonterminal valid nt)%bool = true -> pred x < x. + : (negb (Nat.eqb x 0) && is_valid_nonterminal valid nt)%bool = true -> pred x < x. Proof. intro is_valid. generalize (proj1 (proj1 (Bool.andb_true_iff _ _) is_valid)). @@ -162,7 +161,7 @@ Section recursive_descent_parser. initial_nonterminals_data offset' (len - len0_minus_len') - (le_minus _ _))) + (Nat.le_sub_l _ _))) (fun _ => (** [str] didn't get smaller, so we cache the fact that we've hit this nonterminal already *) sumbool_rect (fun _ => option (forall (offset' : nat) (len0_minus_len' : nat), nonterminal_carrierT -> parse_nt_T)) @@ -174,11 +173,11 @@ Section recursive_descent_parser. (remove_nonterminal valid nt) offset' (len0 - len0_minus_len') - (le_minus _ _))) + (Nat.le_sub_l _ _))) (fun _ => (** oops, we already saw this nonterminal in the past. ABORT! *) None) - (Sumbool.sumbool_of_bool (negb (EqNat.beq_nat valid_len 0) && is_valid_nonterminal valid nt))) + (Sumbool.sumbool_of_bool (negb (Nat.eqb valid_len 0) && is_valid_nonterminal valid nt))) (lt_dec len len0))); first [ assumption | apply le_minus @@ -211,7 +210,7 @@ Section recursive_descent_parser. Proof. refine (@parse_nonterminal_or_abort p valid offset (fst p - len0_minus_len) _). - clear; try apply le_minus; abstract omega. + clear; try apply Nat.le_sub_l; abstract omega. Defined. Definition parse_nonterminal' diff --git a/src/Parsers/GenericRecognizerMin.v b/src/Parsers/GenericRecognizerMin.v index 579837c1a..e113d90f0 100644 --- a/src/Parsers/GenericRecognizerMin.v +++ b/src/Parsers/GenericRecognizerMin.v @@ -23,7 +23,6 @@ Require Import Fiat.Common.NatFacts. Require Import Fiat.Common.UIP. Require Import Fiat.Common. Import ListNotations. -Import NPeano. Set Implicit Arguments. Local Open Scope string_like_scope. @@ -182,14 +181,14 @@ Section recursive_descent_parser. try unfold x'; try unfold y' end. - Local Hint Resolve beq_nat_true : generic_parser_correctness. + Local Hint Resolve (fun n m => proj1 (Nat.eqb_eq n m)) : generic_parser_correctness. Local Ltac eq_t' := first [ progress subst_le_proof | progress subst_nat_eq_proof | progress subst_bool_eq_proof | solve [ eauto with generic_parser_correctness nocore ] - | rewrite sub_twice, Min.min_r by assumption + | rewrite sub_twice, Nat.min_r by assumption | rewrite !@min_max_sub | rewrite Nat.sub_max_distr_l | rewrite <- Nat.sub_add_distr @@ -313,9 +312,9 @@ Section recursive_descent_parser. transitivity G'; [ | symmetry; exact H ] | let G' := context G[false] in transitivity G'; [ | symmetry; exact H ] ] - | context G[beq_nat ?x ?y] + | context G[Nat.eqb ?x ?y] => let H := fresh in - destruct (Utils.dec (beq_nat x y)) as [H|H]; + destruct (Utils.dec (Nat.eqb x y)) as [H|H]; [ let G' := context G[true] in transitivity G'; [ | symmetry; exact H ] | let G' := context G[false] in @@ -773,7 +772,7 @@ Section recursive_descent_parser. let H1 := fresh in destruct H as [H0 H1]; try clear H | [ H : or _ _ |- _ ] => let H0 := fresh in destruct H as [H0|H0]; try clear H - | [ H : beq_nat _ _ = true |- _ ] => apply Nat.eqb_eq in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H | [ H : ?x = 0, H' : context[?x] |- _ ] => rewrite H in H' | _ => progress subst | _ => progress simpl in * @@ -792,17 +791,17 @@ Section recursive_descent_parser. apply length_singleton in H | [ H : context[length (substring _ 0 _)] |- _ ] => rewrite take_length in H - | [ H : beq_nat ?len 1 = false, + | [ H : Nat.eqb ?len 1 = false, H' : ?offset + ?len <= length ?str, H'' : is_true (is_char (substring ?offset ?len ?str) _) |- _ ] => apply length_singleton in H''; rewrite substring_length in H'' - | [ H : context[min] |- _ ] => rewrite Min.min_l in H by omega - | [ H : context[min] |- _ ] => rewrite Min.min_r in H by omega + | [ H : context[min] |- _ ] => rewrite Nat.min_l in H by omega + | [ H : context[min] |- _ ] => rewrite Nat.min_r in H by omega | [ H : _ |- _ ] => rewrite Nat.add_sub in H - | [ H : andb (beq_nat _ 1) (char_at_matches _ _ _) = false |- False ] => contradict H + | [ H : andb (Nat.eqb _ 1) (char_at_matches _ _ _) = false |- False ] => contradict H | [ |- _ <> false ] => apply Bool.not_false_iff_true - | [ |- andb (beq_nat _ 1) (char_at_matches _ _ _) = true ] => apply char_at_matches_is_char + | [ |- andb (Nat.eqb _ 1) (char_at_matches _ _ _) = true ] => apply char_at_matches_is_char | [ |- ex _ ] => eexists; split; eassumption | [ H : context[to_nonterminal (of_nonterminal _)] |- _ ] => rewrite to_of_nonterminal in H by assumption @@ -840,7 +839,7 @@ Section recursive_descent_parser. : dec (minimal_parse_of_item (G := G) len0 valid (substring offset (len0 - len0_minus_len) str) it). Proof. refine (match it return dec (minimal_parse_of_item len0 valid (substring offset _ str) it) with - | Terminal P => if Sumbool.sumbool_of_bool (EqNat.beq_nat (len0 - len0_minus_len) 1 && char_at_matches offset str P)%bool + | Terminal P => if Sumbool.sumbool_of_bool (Nat.eqb (len0 - len0_minus_len) 1 && char_at_matches offset str P)%bool then inl (match get offset str as g return get offset str = g -> _ with | Some ch => fun H => MinParseTerminal _ _ _ ch _ _ _ | None => fun _ => ! @@ -904,7 +903,7 @@ Section recursive_descent_parser. : length (substring offset len str) = len. Proof. destruct Hlen; subst; rewrite substring_length; simpl; - apply Min.min_case_strong; omega. + apply Nat.min_case_strong; omega. Qed. Lemma dec_in_helper {ls it its offset len0_minus_len} @@ -922,7 +921,7 @@ Section recursive_descent_parser. split; first [ intros [n [[H0 H1] H2]] | intros [n [H0 [H1 H2]]] ]. { destruct (le_lt_dec (len0 - len0_minus_len) n) as [pf|pf]. - { rewrite Min.min_l in H0 by assumption. + { rewrite Nat.min_l in H0 by assumption. clear -H0 H1 H2 rdata cdata pf HSLP. induction ls as [|x xs IHxs]; destruct_head_hnf False. destruct (le_lt_dec (len0 - len0_minus_len) x). @@ -931,14 +930,14 @@ Section recursive_descent_parser. { left; reflexivity. } { eapply expand_minimal_parse_of_item_beq; [ .. | eassumption ]. rewrite take_take, <- Nat.sub_min_distr_l. - rewrite !Min.min_r by omega. + rewrite !Nat.min_r by omega. reflexivity. } { eapply expand_minimal_parse_of_production_beq; [ .. | eassumption ]. rewrite drop_take, StringLike.drop_drop. rewrite Nat.sub_add_distr. - apply bool_eq_empty; rewrite substring_length; apply Min.min_case_strong; generalize dependent (len0 - len0_minus_len); intros; omega. } } + apply bool_eq_empty; rewrite substring_length; apply Nat.min_case_strong; generalize dependent (len0 - len0_minus_len); intros; omega. } } { simpl in *. - rewrite Min.min_r in H0 by omega. + rewrite Nat.min_r in H0 by omega. destruct IHxs as [n' [IH0 [IH1 IH2]]]. { destruct H0; try omega; assumption. } { exists n'; repeat split; try assumption. @@ -949,9 +948,9 @@ Section recursive_descent_parser. | _ => progress destruct_head ex | _ => progress destruct_head and | [ H : context[min ?x ?y] |- _ ] - => rewrite (Min.min_r x y) in H by omega + => rewrite (Nat.min_r x y) in H by omega | _ => progress subst - | [ H : min ?x ?y < ?x |- _ ] => revert H; apply (Min.min_case_strong x y) + | [ H : min ?x ?y < ?x |- _ ] => revert H; apply (Nat.min_case_strong x y) | _ => intro | _ => omega | _ => assumption @@ -959,21 +958,21 @@ Section recursive_descent_parser. { eapply expand_minimal_parse_of_item_beq; [ .. | eassumption ]. rewrite take_take. rewrite <- Nat.sub_min_distr_l, sub_twice. - rewrite (Min.min_r len0) by omega. + rewrite (Nat.min_r len0) by omega. reflexivity. } { eapply expand_minimal_parse_of_production_beq; [ .. | eassumption ]. rewrite drop_take, StringLike.drop_drop. - rewrite (plus_comm offset), Nat.sub_add_distr; reflexivity. } } } + rewrite (Nat.add_comm offset), Nat.sub_add_distr; reflexivity. } } } { exists n; repeat split; try assumption. { apply in_map; assumption. } { eapply expand_minimal_parse_of_item_beq; [ .. | eassumption ]. rewrite take_take. rewrite <- Nat.sub_min_distr_l, sub_twice. - rewrite (Min.min_comm len0), <- !Min.min_assoc, (Min.min_r len0) by omega. + rewrite (Nat.min_comm len0), <- !Nat.min_assoc, (Nat.min_r len0) by omega. reflexivity. } { eapply expand_minimal_parse_of_production_beq; [ .. | eassumption ]. rewrite drop_take, StringLike.drop_drop. - rewrite (plus_comm offset), Nat.sub_add_distr. + rewrite (Nat.add_comm offset), Nat.sub_add_distr. reflexivity. } } Defined. @@ -997,7 +996,7 @@ Section recursive_descent_parser. Proof. subst; assumption. Defined. Lemma min_le_r {x y z} (H : y <= z) : min x y <= z. - Proof. apply Min.min_case_strong; omega. Qed. + Proof. apply Nat.min_case_strong; omega. Qed. Lemma lift_le {offset len n length_str} (H : len = 0 \/ offset + len <= length_str) : len - n = 0 \/ offset + n + (len - n) <= length_str. @@ -1011,7 +1010,7 @@ Section recursive_descent_parser. Lemma lift_le_min {offset n len length_str} (H : len = 0 \/ offset + len <= length_str) : min n len = 0 \/ offset + min n len <= length_str. Proof. - apply Min.min_case_strong; [ | intro; assumption ]. + apply Nat.min_case_strong; [ | intro; assumption ]. destruct H; subst; [ left | right ]; omega. Qed. @@ -1036,21 +1035,21 @@ Section recursive_descent_parser. destruct H as [pi pp]; split. { eapply expand_minimal_parse_of_item_beq; [ | eassumption ]. rewrite take_take, <- Nat.sub_min_distr_l, sub_twice. - rewrite (Min.min_comm len0), <- !Min.min_assoc, min_minus_r. + rewrite (Nat.min_comm len0), <- !Nat.min_assoc, min_minus_r. reflexivity. } { eapply expand_minimal_parse_of_production_beq; [ | eassumption ]. - rewrite drop_take, StringLike.drop_drop, (plus_comm a offset), Nat.sub_add_distr. + rewrite drop_take, StringLike.drop_drop, (Nat.add_comm a offset), Nat.sub_add_distr. reflexivity. } Defined. Local Ltac parse_production'_for_t' := idtac; match goal with - | [ H : (beq_nat _ _) = true |- _ ] => apply EqNat.beq_nat_true in H + | [ H : (Nat.eqb _ _) = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H | _ => progress subst | _ => solve [ constructor; assumption | constructor; - rewrite substring_length; apply Min.min_case_strong; omega ] + rewrite substring_length; apply Nat.min_case_strong; omega ] | [ H : minimal_parse_of_production _ _ _ nil |- _ ] => (inversion H; clear H) | [ H : minimal_parse_of_production _ _ _ (_::_) |- _ ] => (inversion H; clear H) | [ H : ?x = 0, H' : context[?x] |- _ ] => rewrite H in H' @@ -1058,12 +1057,12 @@ Section recursive_descent_parser. | _ => discriminate | [ H : forall x, (_ * _)%type -> _ |- _ ] => specialize (fun x y z => H x (y, z)) | _ => solve [ eauto with nocore ] - | _ => solve [ apply Min.min_case_strong; omega ] + | _ => solve [ apply Nat.min_case_strong; omega ] | _ => omega | [ H : or _ _ |- _ ] => let H0 := fresh in destruct H as [H0|H0]; try clear H | [ H : length (substring _ _ _) = 0 |- _ ] => rewrite substring_length in H - | [ H : context[min] |- _ ] => rewrite Min.min_l in H by omega - | [ H : context[min] |- _ ] => rewrite Min.min_r in H by omega + | [ H : context[min] |- _ ] => rewrite Nat.min_l in H by omega + | [ H : context[min] |- _ ] => rewrite Nat.min_r in H by omega | [ H : _ |- _ ] => rewrite Nat.add_sub in H end. Local Ltac parse_production'_for_t := repeat parse_production'_for_t'. @@ -1124,7 +1123,7 @@ Section recursive_descent_parser. Lemma substring_length_le_helper {offset len0_minus_len} : length (substring offset (len0 - len0_minus_len) str) <= len0. Proof. - rewrite substring_length; apply Min.min_case_strong; omega. + rewrite substring_length; apply Nat.min_case_strong; omega. Qed. Lemma Hlen_sub_more {offset n len0_minus_len} @@ -1132,14 +1131,14 @@ Section recursive_descent_parser. -> len0 - max (len0 - n) len0_minus_len = 0 \/ offset + (len0 - max (len0 - n) len0_minus_len) <= length str. Proof. - clear; intros [Hlen|Hlen]; [ left | right ]; apply Max.max_case_strong; omega. + clear; intros [Hlen|Hlen]; [ left | right ]; apply Nat.max_case_strong; omega. Qed. Lemma Hlen_sub_some {n len0_minus_len offset} : len0 - len0_minus_len = 0 \/ offset + (len0 - len0_minus_len) <= length str -> len0 - max (len0 - n) len0_minus_len <= len0. Proof. - apply Max.max_case_strong; omega. + apply Nat.max_case_strong; omega. Qed. Lemma Hlen_sub_helper {offset n len0_minus_len} @@ -1183,7 +1182,7 @@ Section recursive_descent_parser. dec (minimal_parse_of_production (G := G) len0 valid (substring offset (len0 - len0_minus_len) str) ps)) ((** 0-length production, only accept empty *) fun idx Hidx Hreachable Hvalid offset len0_minus_len Hlen - => match Utils.dec (beq_nat (len0 - len0_minus_len) 0) with + => match Utils.dec (Nat.eqb (len0 - len0_minus_len) 0) with | left H => inl _ | right H => inr (fun p => _) end) @@ -1529,9 +1528,9 @@ Section recursive_descent_parser. | [ H : is_true ?e, H' : context[?e] |- _ ] => rewrite H in H' | [ H : context[andb _ true] |- _ ] => rewrite Bool.andb_true_r in H | [ H : negb _ = false |- _ ] => apply Bool.negb_false_iff in H - | [ H : beq_nat _ _ = true |- _ ] => apply beq_nat_true in H - | [ H : context[beq_nat ?x 0] |- context[pred ?x] ] => is_var x; destruct x - | [ H : _ <= 0 |- _ ] => apply le_n_0_eq in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H + | [ H : context[Nat.eqb ?x 0] |- context[pred ?x] ] => is_var x; destruct x + | [ H : _ <= 0 |- _ ] => apply (fun n Hle => eq_sym (proj1 (Nat.le_0_r n) Hle)) in H | [ H : 0 = _ |- _ ] => symmetry in H | [ H : nonterminals_length ?v = 0, H' : context[is_valid_nonterminal ?v ?nt] |- _ ] => rewrite nonterminals_length_zero in H' by assumption @@ -1555,24 +1554,24 @@ Section recursive_descent_parser. | [ H : or _ _ |- _ ] => let H0 := fresh in destruct H as [H0|H0]; try clear H | [ |- context[length (substring _ _ _)] ] => rewrite substring_length - | _ => apply Min.min_case_strong; omega + | _ => apply Nat.min_case_strong; omega | [ H : ?x = 0 \/ ?T |- _ ] => destruct (Compare_dec.zerop x); [ clear H | assert T by (destruct H; try assumption; omega); clear H ] | [ |- context[min ?x ?y - ?y] ] - => rewrite <- Nat.sub_min_distr_r, minus_diag, Min.min_0_r + => rewrite <- Nat.sub_min_distr_r, Nat.sub_diag, Nat.min_0_r | _ => rewrite Nat.add_sub - | _ => rewrite Min.min_r by omega - | _ => rewrite Min.min_l by omega + | _ => rewrite Nat.min_r by omega + | _ => rewrite Nat.min_l by omega | [ H : context[length (substring _ 0 _)] |- _ ] => rewrite take_length in H | [ H : context[length (substring _ _ _)] |- _ ] - => rewrite substring_length, Min.min_r, Nat.add_sub in H by omega + => rewrite substring_length, Nat.min_r, Nat.add_sub in H by omega | [ H : context[?x - (?x - _)] |- _ ] => rewrite sub_twice in H - | [ H : context[min ?x ?y] |- _ ] => rewrite (Min.min_r x y) in H by assumption - | [ H : context[min ?x ?y] |- _ ] => rewrite (Min.min_l x y) in H by assumption - | [ H : context[min ?x ?x] |- _ ] => rewrite Min.min_idempotent in H - | [ H : context[?x - ?x] |- _ ] => rewrite minus_diag in H + | [ H : context[min ?x ?y] |- _ ] => rewrite (Nat.min_r x y) in H by assumption + | [ H : context[min ?x ?y] |- _ ] => rewrite (Nat.min_l x y) in H by assumption + | [ H : context[min ?x ?x] |- _ ] => rewrite Nat.min_idempotent in H + | [ H : context[?x - ?x] |- _ ] => rewrite Nat.sub_diag in H | [ H : context[?x - 0] |- _ ] => rewrite Nat.sub_0_r in H end. Local Ltac p_step := repeat p_step_t'. @@ -1612,7 +1611,7 @@ Section recursive_descent_parser. (or_introl pf') initial_nonterminals_data (reflexivity _) - offset (len - len0_minus_len) Hlen (le_minus _ _) nt) + offset (len - len0_minus_len) Hlen (Nat.le_sub_l _ _) nt) offset (len - len) (Hlen_helper_sub_sub Hlen) (nonterminal_to_production nt)) @@ -1628,7 +1627,7 @@ Section recursive_descent_parser. (fun _ => _) (fun is_valid => _) (fun is_valid => _) - (Sumbool.sumbool_of_bool (negb (EqNat.beq_nat valid_len 0) && is_valid_nonterminal valid nt))); + (Sumbool.sumbool_of_bool (negb (Nat.eqb valid_len 0) && is_valid_nonterminal valid nt))); [ ((** It was valid, so we can remove it *) edestruct (fun pf'' pf''' => @parse_productions' @@ -1640,7 +1639,7 @@ Section recursive_descent_parser. (or_intror (conj eq_refl pf'')) (remove_nonterminal valid nt) pf''' offset (len0 - len0_minus_len) - Hlen (le_minus _ _)) + Hlen (Nat.le_sub_l _ _)) offset (len0 - len) (Hlen_helper_sub_sub Hlen) (nonterminal_to_production nt)) @@ -1794,7 +1793,7 @@ Section recursive_descent_parser. : dec (minimal_parse_of_nonterminal (G := G) (length str) initial_nonterminals_data (substring 0 (length str - 0) str) (to_nonterminal nt)). Proof. destruct (parse_nonterminal'_substring nt) as [p|p]; [ left | right ]; - rewrite <- minus_n_O; + rewrite Nat.sub_0_r; exact p. Defined. @@ -1819,7 +1818,7 @@ Section recursive_descent_parser. Proof. rewrite <- drop_0 at 1. erewrite <- take_long at 1 by reflexivity. - rewrite drop_length, <- minus_n_O. + rewrite drop_length, Nat.sub_0_r. expand_once. destruct (Utils.dec (is_valid_nonterminal initial_nonterminals_data nt)) as [H|H]; repeat eq_t'. @@ -1842,8 +1841,7 @@ Section recursive_descent_parser. Proof. R_etransitivity_eq; [ eapply parse_nonterminal'_substring_correct | ]. unfold parse_nonterminal'_substring_minus. - edestruct parse_nonterminal'_substring; - destruct (minus_n_O (length str)); reflexivity. + edestruct parse_nonterminal'_substring; reflexivity. Qed. Lemma parse_nonterminal'_correct @@ -1969,7 +1967,7 @@ Section recursive_descent_parser. erewrite <- take_long at 1 by reflexivity; rewrite drop_length(*, <- minus_n_O at 1*). Local Ltac substring_to_str := - repeat rewrite <- minus_n_O at 1; rewrite drop_0, take_long at 1 by reflexivity. + repeat rewrite Nat.sub_0_r at 1; rewrite drop_0, take_long at 1 by reflexivity. Lemma Hlen0 {lenstr} : lenstr - 0 = 0 \/ 0 + (lenstr - 0) <= lenstr. Proof. omega. Qed. @@ -1987,7 +1985,7 @@ Section recursive_descent_parser. destruct parse_item_substring as [p|np]; [ left | right; intro p; apply np; clear np ]; (eapply expand_minimal_parse_of_item_beq; [ | eassumption ]); - clear -HSLP; abstract (rewrite <- minus_n_O, substring_correct3'; reflexivity). + clear -HSLP; abstract (rewrite Nat.sub_0_r, substring_correct3'; reflexivity). Defined. Lemma parse_item_substring_correct @@ -1998,7 +1996,7 @@ Section recursive_descent_parser. Proof. str_to_substring. unfold GenericRecognizer.parse_item. - rewrite (minus_n_O (length str)) at 6; + rewrite <-(Nat.sub_0_r (length str)) at 6; apply parse_item'_all_correct; intro; substring_to_str. apply parse_nonterminal'_substring_minus_correct. Qed. @@ -2024,17 +2022,17 @@ Section recursive_descent_parser. Definition parse_production_substring_minus : dec (minimal_parse_of_production (G := G) (length str) initial_nonterminals_data (substring 0 (length str - 0) str) (to_production p)). Proof. - eapply parse_production'; [ | right; clear; apply le_minus | reflexivity.. | assumption | assumption ]. + eapply parse_production'; [ | right; clear; apply Nat.le_sub_l | reflexivity.. | assumption | assumption ]. intros. eapply (@parse_nonterminal_or_abort (length str, _)); - simpl; try reflexivity; subst; try assumption; apply le_minus. + simpl; try reflexivity; subst; try assumption; apply Nat.le_sub_l. Defined. Definition parse_production_substring : dec (minimal_parse_of_production (G := G) (length str) initial_nonterminals_data (substring 0 (length str) str) (to_production p)). Proof. destruct parse_production_substring_minus as [p'|p']; [ left | right ]; - rewrite <- minus_n_O in p'; + rewrite Nat.sub_0_r in p'; exact p'. Defined. @@ -2070,7 +2068,7 @@ Section recursive_descent_parser. R_etransitivity_eq; [ eapply parse_production_substring_minus_correct | ]. unfold parse_production_substring. destruct parse_production_substring_minus; - destruct (minus_n_O (length str)); reflexivity. + destruct (Nat.sub_0_r (length str)); reflexivity. Qed. Lemma parse_production_correct @@ -2094,17 +2092,17 @@ Section recursive_descent_parser. Definition parse_productions_substring_minus : dec (minimal_parse_of (G := G) (length str) initial_nonterminals_data (substring 0 (length str - 0) str) (List.map to_production ps)). Proof. - eapply parse_productions'; [ | right; apply le_minus | reflexivity.. | assumption | assumption ]. + eapply parse_productions'; [ | right; apply Nat.le_sub_l | reflexivity.. | assumption | assumption ]. intros. eapply (@parse_nonterminal_or_abort (length str, _)); - simpl; try reflexivity; subst; try apply le_minus; assumption. + simpl; try reflexivity; subst; try apply Nat.le_sub_l; assumption. Defined. Definition parse_productions_substring : dec (minimal_parse_of (G := G) (length str) initial_nonterminals_data (substring 0 (length str) str) (List.map to_production ps)). Proof. destruct parse_productions_substring_minus as [p'|p']; [ left | right ]; - rewrite <- minus_n_O in p'; + rewrite Nat.sub_0_r in p'; exact p'. Defined. @@ -2137,7 +2135,7 @@ Section recursive_descent_parser. R_etransitivity_eq; [ eapply parse_productions_substring_minus_correct | ]. unfold parse_productions_substring. destruct parse_productions_substring_minus; - destruct (minus_n_O (length str)); reflexivity. + destruct (Nat.sub_0_r (length str)); reflexivity. Qed. Lemma parse_productions_correct diff --git a/src/Parsers/GenericRecognizerOptimized.v b/src/Parsers/GenericRecognizerOptimized.v index 0727dae96..c483af3c3 100644 --- a/src/Parsers/GenericRecognizerOptimized.v +++ b/src/Parsers/GenericRecognizerOptimized.v @@ -1,7 +1,7 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Definition of an optimized CFG parser-recognizer *) Require Import Coq.Lists.List Coq.Strings.String. -Require Import Coq.Numbers.Natural.Peano.NPeano Coq.Arith.Compare_dec Coq.Arith.Wf_nat. +Require Import Coq.Arith.PeanoNat Coq.Arith.Compare_dec Coq.Arith.Wf_nat. Require Import Fiat.Common.List.Operations. Require Import Fiat.Common.List.ListMorphisms. Require Import Fiat.Parsers.ContextFreeGrammar.Core. @@ -36,7 +36,6 @@ Require Export Fiat.Common.Sigma. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. Require Import Fiat.Parsers.GenericRecognizerOptimizedTactics. -Import NPeano. Set Implicit Arguments. Local Open Scope string_like_scope. @@ -333,7 +332,7 @@ Section recursive_descent_parser. (fun _ => _) (N0 a' b' c') (list_rect P0 (fun _ _ _ => ret_production_nil_true) C0 ls' a' b' c') - (EqNat.beq_nat (List.length ls') 0)) + (Nat.eqb (List.length ls') 0)) = list_rect P0 N0 C0 ls' a' b' c') _ _ @@ -360,7 +359,7 @@ Section recursive_descent_parser. end. } { simpl. match goal with - | [ |- context[EqNat.beq_nat (List.length ?ls) 0] ] + | [ |- context[Nat.eqb (List.length ?ls) 0] ] => is_var ls; destruct ls; simpl; try reflexivity end; []. repeat match goal with @@ -416,13 +415,13 @@ Section recursive_descent_parser. repeat optsplit_t'. { apply (f_equal2 ret_production_cons); [ | reflexivity ]. repeat misc_opt'. - rewrite Min.min_idempotent. + rewrite Nat.min_idempotent. reflexivity. } { apply (f_equal2 ret_production_cons). { apply (f_equal3 (fun (b : bool) x y => if b then x else y)); [ | reflexivity | reflexivity ]. apply (f_equal2 andb); [ | reflexivity ]. match goal with - | [ |- _ = EqNat.beq_nat (min ?v ?x) ?v ] + | [ |- _ = Nat.eqb (min ?v ?x) ?v ] => refine (_ : Compare_dec.leb v x = _) end. match goal with @@ -592,8 +591,8 @@ Section recursive_descent_parser. => transitivity (if (orb (negb b') b) then t else f); [ | destruct b, b'; reflexivity ] end; fin_step_opt. match goal with - | [ |- context[negb (EqNat.beq_nat ?x 0)] ] - => replace (negb (EqNat.beq_nat x 0)) with (Compare_dec.leb 1 x) + | [ |- context[negb (Nat.eqb ?x 0)] ] + => replace (negb (Nat.eqb x 0)) with (Compare_dec.leb 1 x) by (destruct x as [|[]]; reflexivity) end. reflexivity. } @@ -615,8 +614,8 @@ Section recursive_descent_parser. => transitivity (if (orb (negb b') b) then t else f); [ | destruct b, b'; reflexivity ] end; fin_step_opt. match goal with - | [ |- context[negb (EqNat.beq_nat ?x 0)] ] - => replace (negb (EqNat.beq_nat x 0)) with (Compare_dec.leb 1 x) + | [ |- context[negb (Nat.eqb ?x 0)] ] + => replace (negb (Nat.eqb x 0)) with (Compare_dec.leb 1 x) by (destruct x as [|[]]; reflexivity) end. reflexivity. } diff --git a/src/Parsers/GenericRecognizerOptimizedTactics.v b/src/Parsers/GenericRecognizerOptimizedTactics.v index 111104d2e..475c51791 100644 --- a/src/Parsers/GenericRecognizerOptimizedTactics.v +++ b/src/Parsers/GenericRecognizerOptimizedTactics.v @@ -1,6 +1,7 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Coq.Lists.List. Require Import Coq.Arith.Compare_dec. +Require Import Coq.Arith.PeanoNat. Require Import Fiat.Parsers.BaseTypes. Require Import Fiat.Parsers.BaseTypesLemmas. Require Import Fiat.Parsers.GenericBaseTypes. @@ -14,7 +15,6 @@ Require Import Fiat.Common.BoolFacts. Require Import Fiat.Common.NatFacts. Require Import Fiat.Common.List.ListFacts. Require Import Fiat.Common Fiat.Common.Wf Fiat.Common.Wf2 Fiat.Common.Equality. -Import NPeano. Ltac t_reduce_fix := repeat match goal with @@ -66,9 +66,9 @@ Ltac t_reduce_fix := | [ H : (_ && _)%bool = true |- _ ] => apply Bool.andb_true_iff in H | [ H : _ = in_left |- _ ] => clear H | [ H : _ /\ _ |- _ ] => destruct H - | [ H : context[negb (EqNat.beq_nat ?x ?y)] |- _ ] => destruct (EqNat.beq_nat x y) eqn:? - | [ H : EqNat.beq_nat _ _ = false |- _ ] => apply EqNat.beq_nat_false in H - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H + | [ H : context[negb (Nat.eqb ?x ?y)] |- _ ] => destruct (Nat.eqb x y) eqn:? + | [ H : Nat.eqb _ _ = false |- _ ] => apply (fun n m => proj1 (Nat.eqb_neq n m)) in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H | [ H : snd ?x = _ |- _ ] => is_var x; destruct x | _ => progress simpl negb in * | [ H : false = true |- _ ] => inversion H @@ -200,7 +200,7 @@ Ltac fin_step_opt := | [ |- _ = 0 ] => reflexivity | [ |- _ = 1 ] => reflexivity | [ |- _ = None ] => reflexivity - | [ |- _ = EqNat.beq_nat _ _ ] => apply f_equal2 + | [ |- _ = Nat.eqb _ _ ] => apply f_equal2 | [ |- _ = Compare_dec.leb _ _ ] => apply f_equal2 | [ |- _ = S _ ] => apply f_equal | [ |- _ = string_beq _ _ ] => apply f_equal2 @@ -226,9 +226,9 @@ Ltac fin_step_opt := Ltac misc_opt' := idtac; match goal with - | _ => progress rewrite ?max_min_n, ?Minus.minus_diag, ?Nat.sub_0_r, ?uneta_bool, ?beq_nat_min_0(*, ?bool_rect_flatten*) - | _ => rewrite Min.min_l by assumption - | _ => rewrite Min.min_r by assumption + | _ => progress rewrite ?max_min_n, ?Nat.sub_diag, ?Nat.sub_0_r, ?uneta_bool, ?beq_nat_min_0(*, ?bool_rect_flatten*) + | _ => rewrite Nat.min_l by assumption + | _ => rewrite Nat.min_r by assumption | [ |- context[if ?ltb ?x ?y then _ else _] ] => rewrite if_to_min | [ |- context[min ?x ?y - ?x] ] => rewrite min_sub_same | [ |- context[(min ?x ?y - ?x)%natr] ] => rewrite min_subr_same diff --git a/src/Parsers/MinimalParseOfParse.v b/src/Parsers/MinimalParseOfParse.v index e95246021..f98ece70b 100644 --- a/src/Parsers/MinimalParseOfParse.v +++ b/src/Parsers/MinimalParseOfParse.v @@ -259,7 +259,7 @@ Section cfg. Proof. setoid_subst str'. rewrite take_length. - apply Min.min_case_strong; omega. + apply Nat.min_case_strong; omega. Qed. Lemma Hlen0_drop {len0 len0' str str' n} @@ -526,8 +526,8 @@ Section cfg. intro; apply H | _ => eapply H'; eassumption | _ => assumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Defined. @@ -536,7 +536,7 @@ Section cfg. : alt_option h valid str -> alt_option h' valid' str'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. Section wf_parts. @@ -606,7 +606,7 @@ Section cfg. eexists (MinimalParse.MinParseTerminal _ _ _ _ _ pf0 pf0'); simpl; constructor. } { edestruct (fun pf => @minimal_parse_of_nonterminal__of__parse_of_nonterminal (S h') pf len0 _ valid' _ p') as [ [p'' H''] | p'' ]; - try solve [ repeat (apply Lt.lt_n_Sn || apply Lt.lt_S) + try solve [ repeat (apply Nat.lt_succ_diag_r || apply Nat.lt_lt_succ_r) | exact Hinit' | exact H_h | exact H_forall @@ -693,8 +693,8 @@ Section cfg. | [ |- (_ * _)%type ] => split | [ H : _ <= _ |- _ <= _ ] => apply H - | _ => apply Le.le_n_S - | _ => apply Plus.plus_le_compat + | _ => apply le_n_S + | _ => apply Nat.add_le_mono | [ H0 : Forall_parse_of_item _ _, H1 : Forall_parse_of_production _ _ |- Forall_parse_of_production _ _ ] @@ -721,7 +721,7 @@ Section cfg. | [ H : context[min _ _] |- _ ] => rewrite min_r in H by omega | _ => rewrite drop_length | _ - => solve [ eauto using le_S, Le.le_trans, Plus.le_plus_l, Plus.le_plus_r, drop_0, take_long, NPeano.Nat.eq_le_incl, bool_eq_empty, drop_length, (fun x y => proj2 (NPeano.Nat.sub_0_le x y)) with nocore ] + => solve [ eauto using le_S, Nat.le_trans, Nat.le_add_l, Nat.le_add_r, drop_0, take_long, Nat.eq_le_incl, bool_eq_empty, drop_length, (fun x y => proj2 (Nat.sub_0_le x y)) with nocore ] end. Local Ltac min_parse_prod_t := repeat min_parse_prod_t'. @@ -765,24 +765,24 @@ Section cfg. repeat (reflexivity || esplit). } { specialize (fun h' pf => @minimal_parse_of_nonterminal__of__parse_of_nonterminal - h' (transitivity pf (Lt.lt_n_Sn _))). + h' (transitivity pf (Nat.lt_succ_diag_r _))). change (S ((size_of_parse_item p0') + (size_of_parse_production p1')) < S h') in H_h. - apply Lt.lt_S_n in H_h. - pose proof (Lt.le_lt_trans _ _ _ (Plus.le_plus_l _ _) H_h) as H_h0. - pose proof (Lt.le_lt_trans _ _ _ (Plus.le_plus_r _ _) H_h) as H_h1. + apply Nat.succ_lt_mono in H_h. + pose proof (Nat.le_lt_trans _ _ _ (Nat.le_add_l _ _) H_h) as H_h0. + pose proof (Nat.le_lt_trans _ _ _ (Nat.le_add_r _ _) H_h) as H_h1. clear H_h. assert (pf0' : length (take n str') <= length str') - by (rewrite take_length; apply Min.min_case_strong; omega). + by (rewrite take_length; apply Nat.min_case_strong; omega). assert (pf1' : length (drop n str') <= length str') by (rewrite drop_length; omega). - pose proof (fun valid Hinit => @minimal_parse_of_item__of__parse_of_item' _ h' minimal_parse_of_nonterminal__of__parse_of_nonterminal _ (transitivity pf0' pf) valid _ p0' H_h0 Hinit) as p_it. - pose proof (fun valid Hinit => @minimal_parse_of_production__of__parse_of_production' h' minimal_parse_of_nonterminal__of__parse_of_nonterminal _ (transitivity pf1' pf) valid _ p1' H_h1 Hinit) as p_prod. + pose proof (fun valid Hinit => @minimal_parse_of_item__of__parse_of_item' _ h' minimal_parse_of_nonterminal__of__parse_of_nonterminal _ (transitivity pf0' pf) valid _ p0' H_h1 Hinit) as p_it. + pose proof (fun valid Hinit => @minimal_parse_of_production__of__parse_of_production' h' minimal_parse_of_nonterminal__of__parse_of_nonterminal _ (transitivity pf1' pf) valid _ p1' H_h0 Hinit) as p_prod. clear pf0' pf1'. destruct (le_lt_dec (length str') n) as [ Hle | Hle ], (zerop (min n (length str'))) as [Hstr' | Hstr' ]. { (* empty, empty *) - rewrite Min.min_r in Hstr' by assumption. + rewrite Nat.min_r in Hstr' by assumption. specialize (p_it valid' Hinit'); specialize (p_prod valid' Hinit'). destruct p_it as [ [ p0'' H0''] |], p_prod as [ [ p1'' H1'' ] |]; [ | | | ]; @@ -822,40 +822,40 @@ Section cfg. destruct p as [pat pats p' | pat pats p']. { clear minimal_parse_of_productions__of__parse_of_productions'. edestruct (@minimal_parse_of_production__of__parse_of_production' _ h' minimal_parse_of_nonterminal__of__parse_of_nonterminal _ pf valid' _ p') as [ [p'' p''H] | [nonterminal' H'] ]; - try solve [ exact (Lt.lt_S_n _ _ H_h) + try solve [ exact (proj2 (Nat.succ_lt_mono _ _) H_h) | exact H_forall | exact Hinit' ]; [|]. { left. exists (MinParseHead pats p''). simpl. - exact (Le.le_n_S _ _ p''H). } + exact (le_n_S _ _ p''H). } { right. exists nonterminal'. split; try solve [ exact (fst H') ]; []. exists (proj1_sig (snd H')). - exact (Lt.lt_S _ _ (proj2_sig (snd H'))). } } + exact (Nat.lt_lt_succ_r _ _ (proj2_sig (snd H'))). } } { specialize (fun h' pf => @minimal_parse_of_nonterminal__of__parse_of_nonterminal - h' (transitivity pf (Lt.lt_n_Sn _))). + h' (transitivity pf (Nat.lt_succ_diag_r _))). edestruct (minimal_parse_of_productions__of__parse_of_productions' h' minimal_parse_of_nonterminal__of__parse_of_nonterminal _ pf valid' _ p') as [ [p'' p''H] | [nonterminal' H'] ]; - try solve [ exact (Lt.lt_S_n _ _ H_h) + try solve [ exact (proj2 (Nat.succ_lt_mono _ _) H_h) | exact Hinit' | exact H_forall ]; [|]. { left. exists (MinParseTail pat p''). simpl. - exact (Le.le_n_S _ _ p''H). } + exact (le_n_S _ _ p''H). } { right. exists nonterminal'. split; try solve [ exact (fst H') ]; []. exists (proj1_sig (snd H')). - exact (Lt.lt_S _ _ (proj2_sig (snd H'))). } } + exact (Nat.lt_lt_succ_r _ _ (proj2_sig (snd H'))). } } Defined. End productions. @@ -877,29 +877,29 @@ Section cfg. destruct (le_lt_eq_dec _ _ H) as [pf_lt|pf_eq]. { (** [str] got smaller, so we reset the valid nonterminals list *) - destruct (@minimal_parse_of_productions__of__parse_of_productions' (length str) h minimal_parse_of_nonterminal__of__parse_of_nonterminal str (reflexivity _) initial_nonterminals_data (Lookup G nonterminal) p (Lt.lt_S_n _ _ pf) (reflexivity _)) as [p'|p']. + destruct (@minimal_parse_of_productions__of__parse_of_productions' (length str) h minimal_parse_of_nonterminal__of__parse_of_nonterminal str (reflexivity _) initial_nonterminals_data (Lookup G nonterminal) p (proj2 (Nat.succ_lt_mono _ _) pf) (reflexivity _)) as [p'|p']. { left. exists (MinParseNonTerminalStrLt valid _ pf_lt (proj2 (initial_nonterminals_correct _) Hvalid') (proj1_sig p')); simpl. simpl in *. - exact (Le.le_n_S _ _ (proj2_sig p')). } + exact (le_n_S _ _ (proj2_sig p')). } { simpl. right; eapply expand_alt_option; [..| exact p' ]; - solve [ apply Lt.lt_n_Sn + solve [ apply Nat.lt_succ_diag_r | assumption | reflexivity ]. } } { (** [str] didn't get smaller, so we cache the fact that we've hit this nonterminal already *) destruct (Sumbool.sumbool_of_bool (is_valid_nonterminal valid (of_nonterminal nonterminal))) as [ Hvalid | Hinvalid ]. - { destruct (@minimal_parse_of_productions__of__parse_of_productions' len0 h minimal_parse_of_nonterminal__of__parse_of_nonterminal str Hstr (remove_nonterminal valid (of_nonterminal nonterminal)) (Lookup G nonterminal) p (Lt.lt_S_n _ _ pf) (transitivity (R := sub_nonterminals_listT) (@sub_nonterminals_listT_remove _ _ _ _ _ _) Hinit')) as [p'|p']. + { destruct (@minimal_parse_of_productions__of__parse_of_productions' len0 h minimal_parse_of_nonterminal__of__parse_of_nonterminal str Hstr (remove_nonterminal valid (of_nonterminal nonterminal)) (Lookup G nonterminal) p (proj2 (Nat.succ_lt_mono _ _) pf) (transitivity (R := sub_nonterminals_listT) (@sub_nonterminals_listT_remove _ _ _ _ _ _) Hinit')) as [p'|p']. { left. exists (@MinimalParse.MinParseNonTerminalStrEq _ _ _ _ _ _ _ _ _ pf_eq (proj2 (initial_nonterminals_correct _) Hvalid') Hvalid (proj1_sig p')). simpl in *. - exact (Le.le_n_S _ _ (proj2_sig p')). } + exact (le_n_S _ _ (proj2_sig p')). } { destruct p' as [nonterminal' p']. destruct (string_dec nonterminal nonterminal') as [|n]. { subst nonterminal; simpl in *. edestruct (@minimal_parse_of_nonterminal__of__parse_of_nonterminal (S (size_of_parse p)) pf len0 _ valid nonterminal' (proj1_sig (snd p'))) as [p''|p'']; - try solve [ apply Lt.lt_n_S, (proj2_sig (snd p')) + try solve [ apply (proj1 (Nat.succ_lt_mono _ _)), (proj2_sig (snd p')) | subst; reflexivity | assumption | split; [ exact (proj2 (fst p')) @@ -909,7 +909,7 @@ Section cfg. exists (proj1_sig p''). etransitivity; [ exact (proj2_sig p'') - | exact (Lt.lt_le_weak _ _ (Lt.lt_n_S _ _ (proj2_sig (snd p')))) ]. } + | exact (Nat.lt_le_incl _ _ ((proj1 (Nat.succ_lt_mono _ _)) (proj2_sig (snd p')))) ]. } { right. exists (projT1 p''). split; @@ -918,7 +918,7 @@ Section cfg. exists (proj1_sig (snd (projT2 p''))). etransitivity; [ exact (proj2_sig (snd (projT2 p''))) - | exact (Lt.lt_n_S _ _ (proj2_sig (snd p'))) ]. } } + | exact ((proj1 (Nat.succ_lt_mono _ _)) (proj2_sig (snd p'))) ]. } } { right. exists nonterminal'. destruct p' as [p'H p'p]. @@ -945,7 +945,7 @@ Section cfg. end. exact p'H. } { exists (proj1_sig p'p). - exact (Lt.lt_S _ _ (proj2_sig p'p)). } } } } + exact (Nat.lt_lt_succ_r _ _ (proj2_sig p'p)). } } } } { (** oops, we already saw this nonterminal in the past. ABORT! *) right. exists nonterminal. @@ -953,7 +953,7 @@ Section cfg. split; [ split; assumption | ]. exists p. - apply Lt.lt_n_Sn. } } + apply Nat.lt_succ_diag_r. } } Defined. End step. diff --git a/src/Parsers/ParserImplementation.v b/src/Parsers/ParserImplementation.v index da2a00c7d..da6259600 100644 --- a/src/Parsers/ParserImplementation.v +++ b/src/Parsers/ParserImplementation.v @@ -87,11 +87,11 @@ Section implementation. (parse_of_item__of__minimal_parse_of_item pit) (parse_of_production__of__minimal_parse_of_production pits)). specialize_by assumption. - rewrite Min.min_r by assumption. + rewrite Nat.min_r by assumption. apply H'; eauto. } { exists (length (substring offset len str)). specialize (H' _ (reflexivity _)). - rewrite Min.min_idempotent. + rewrite Nat.min_idempotent. rewrite !substring_length_no_min in * by assumption. repeat match goal with | [ H : context[length (substring _ _ _)] |- _ ] => rewrite !substring_length_no_min in H by assumption diff --git a/src/Parsers/Reachable/All/MinimalReachableOfReachable.v b/src/Parsers/Reachable/All/MinimalReachableOfReachable.v index a70d1ccf3..0aa10217c 100644 --- a/src/Parsers/Reachable/All/MinimalReachableOfReachable.v +++ b/src/Parsers/Reachable/All/MinimalReachableOfReachable.v @@ -160,8 +160,8 @@ Section cfg. intro; apply H | _ => eapply H'; eassumption | _ => assumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Defined. @@ -170,7 +170,7 @@ Section cfg. : alt_option h valid -> alt_option h' valid'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. Section wf_parts. @@ -221,7 +221,7 @@ Section cfg. intros valid' pats p H_h Hinit'. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_reachable_from_production__of__reachable_from_production' h' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))). - specialize (minimal_reachable_from_item__of__reachable_from_item h' (Le.le_n_Sn _)). + specialize (minimal_reachable_from_item__of__reachable_from_item h' (Nat.le_succ_diag_r _)). destruct p as [ ? ? p' | ? ? p' ]. { destruct (fun k => minimal_reachable_from_item__of__reachable_from_item valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -229,18 +229,18 @@ Section cfg. | left | right ]. { eexists (MinReachableProductionHead _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_reachable_from_production__of__reachable_from_production' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_reachable_from_production__of__reachable_from_production' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { eexists (MinReachableProductionTail _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End production. @@ -257,7 +257,7 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_reachable_from_productions__of__reachable_from_productions' h' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))). pose proof (minimal_reachable_from_production__of__reachable_from_production' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))) as minimal_reachable_from_production__of__reachable_from_production''. - specialize (minimal_reachable_from_item__of__reachable_from_item h' (Le.le_n_Sn _)). + specialize (minimal_reachable_from_item__of__reachable_from_item h' (Nat.le_succ_diag_r _)). destruct p as [ ? ? p' | ? ? p' ]. { destruct (fun k => minimal_reachable_from_production__of__reachable_from_production'' valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -265,18 +265,18 @@ Section cfg. | left | right ]. { eexists (MinReachableHead _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_reachable_from_productions__of__reachable_from_productions' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_reachable_from_productions__of__reachable_from_productions' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { eexists (MinReachableTail _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End productions. @@ -294,12 +294,12 @@ Section cfg. { left. eexists (MinReachableTerminal _ _ _ Pch); simpl; constructor. } { case_eq (is_valid_nonterminal valid' (of_nonterminal nonterminal')); intro H'''. - { edestruct (fun k => @minimal_reachable_from_productions__of__reachable_from_productions' _ (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (Le.le_n_S _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) + { edestruct (fun k => @minimal_reachable_from_productions__of__reachable_from_productions' _ (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ ((fun n m => proj1 (Nat.succ_le_mono n m)) _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) as [ [ p'' H'' ] | [ nt'' H'' ] ]; [ solve [ auto with arith ] | left | ]. { eexists (MinReachableNonTerminal _ _ H''' p''). - apply Le.le_n_S; eassumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); eassumption. } { destruct (string_dec nonterminal' nt''); subst. { destruct H'' as [ H'' [ p'' H'''' ] ]. simpl in *. @@ -341,7 +341,7 @@ Section cfg. eassumption. } { destruct_head sigT. eexists. - apply Lt.lt_S; eassumption. } } } } + apply Nat.lt_lt_succ_r; eassumption. } } } } { right. exists nonterminal'; repeat split; trivial; []. exists p'. diff --git a/src/Parsers/Reachable/MaybeEmpty/MinimalOfCore.v b/src/Parsers/Reachable/MaybeEmpty/MinimalOfCore.v index c28f26f0a..bb40bee5c 100644 --- a/src/Parsers/Reachable/MaybeEmpty/MinimalOfCore.v +++ b/src/Parsers/Reachable/MaybeEmpty/MinimalOfCore.v @@ -154,8 +154,8 @@ Section cfg. intro; apply H | _ => eapply H'; eassumption | _ => assumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Defined. @@ -164,7 +164,7 @@ Section cfg. : alt_option h valid -> alt_option h' valid'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. Section wf_parts. @@ -227,7 +227,7 @@ Section cfg. intros valid' pats p H_h Hinit'. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_maybe_empty_production__of__maybe_empty_production' h' (fun h'' pf => minimal_maybe_empty_item__of__maybe_empty_item _ (le_S _ _ pf))). - specialize (minimal_maybe_empty_item__of__maybe_empty_item h' (Le.le_n_Sn _)). + specialize (minimal_maybe_empty_item__of__maybe_empty_item h' (Nat.le_succ_diag_r _)). destruct p as [ | ?? p0' p1' ]. { left; eexists (MinMaybeEmptyProductionNil _); simpl; reflexivity. } { assert (size_of_maybe_empty_item p0' < h') by exact (lt_helper_1 H_h). @@ -242,14 +242,14 @@ Section cfg. | right ]. { eexists (MinMaybeEmptyProductionCons p0'' p1''). simpl in *. - apply Le.le_n_S, Plus.plus_le_compat; assumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)), Nat.add_le_mono; assumption. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity ]. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity ]. } } @@ -268,7 +268,7 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_maybe_empty_productions__of__maybe_empty_productions' h' (fun h'' pf => minimal_maybe_empty_item__of__maybe_empty_item _ (le_S _ _ pf))). pose proof (minimal_maybe_empty_production__of__maybe_empty_production' (fun h'' pf => minimal_maybe_empty_item__of__maybe_empty_item _ (le_S _ _ pf))) as minimal_maybe_empty_production__of__maybe_empty_production''. - specialize (minimal_maybe_empty_item__of__maybe_empty_item h' (Le.le_n_Sn _)). + specialize (minimal_maybe_empty_item__of__maybe_empty_item h' (Nat.le_succ_diag_r _)). destruct p as [ ? ? p' | ? ? p' ]. { destruct (fun k => minimal_maybe_empty_production__of__maybe_empty_production'' valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -276,18 +276,18 @@ Section cfg. | left | right ]. { eexists (MinMaybeEmptyHead _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_maybe_empty_productions__of__maybe_empty_productions' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_maybe_empty_productions__of__maybe_empty_productions' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { eexists (MinMaybeEmptyTail _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End productions. @@ -303,12 +303,12 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. destruct p as [nonterminal' H' p']. { case_eq (is_valid_nonterminal valid' (of_nonterminal nonterminal')); intro H'''. - { edestruct (fun k => @minimal_maybe_empty_productions__of__maybe_empty_productions' _ (fun h'' pf => minimal_maybe_empty_item__of__maybe_empty_item _ (Le.le_n_S _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) + { edestruct (fun k => @minimal_maybe_empty_productions__of__maybe_empty_productions' _ (fun h'' pf => minimal_maybe_empty_item__of__maybe_empty_item _ ((fun n m => proj1 (Nat.succ_le_mono n m)) _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) as [ [ p'' H'' ] | [ nt'' H'' ] ]; [ solve [ auto with arith ] | left | ]. { eexists (MinMaybeEmptyNonTerminal _ _ H''' p''). - apply Le.le_n_S; eassumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); eassumption. } { destruct (string_dec nonterminal' nt''); subst. { destruct H'' as [ H'' [ p'' H'''' ] ]. simpl in *. @@ -350,7 +350,7 @@ Section cfg. eassumption. } { destruct_head sigT. eexists. - apply Lt.lt_S; eassumption. } } } } + apply Nat.lt_lt_succ_r; eassumption. } } } } { right. exists nonterminal'; repeat split; trivial; []. exists p'. diff --git a/src/Parsers/Reachable/MaybeEmpty/OfParse.v b/src/Parsers/Reachable/MaybeEmpty/OfParse.v index bf1304f67..9255b36bc 100644 --- a/src/Parsers/Reachable/MaybeEmpty/OfParse.v +++ b/src/Parsers/Reachable/MaybeEmpty/OfParse.v @@ -63,7 +63,7 @@ Section cfg. { constructor. } { constructor. { apply (fun k => @parse_empty_maybe_empty_parse_of_item' parse_empty_maybe_empty_parse_of_productions valid0 _ Hsub _ k _ (fst Hforall)). - rewrite take_length, Hlen, Min.min_0_r; reflexivity. } + rewrite take_length, Hlen, Nat.min_0_r; reflexivity. } { apply (fun k => @parse_empty_maybe_empty_parse_of_production valid0 _ Hsub _ k _ (snd Hforall)). rewrite drop_length, Hlen; reflexivity. } } } Defined. @@ -163,7 +163,7 @@ Section cfg. { eexists (ParseProductionCons _ 0 (projT1 p0) (projT1 p1)). exact (projT2 p0, projT2 p1). } { rewrite ?take_length, ?drop_length, Hlen; reflexivity. } - { rewrite ?take_length, ?drop_length, Hlen, Min.min_0_r; reflexivity. } } } + { rewrite ?take_length, ?drop_length, Hlen, Nat.min_0_r; reflexivity. } } } Defined. Definition parse_empty_from_maybe_empty_parse_of_item diff --git a/src/Parsers/Reachable/OnlyFirst/MinimalReachableOfReachable.v b/src/Parsers/Reachable/OnlyFirst/MinimalReachableOfReachable.v index f463a7a1c..e51400d72 100644 --- a/src/Parsers/Reachable/OnlyFirst/MinimalReachableOfReachable.v +++ b/src/Parsers/Reachable/OnlyFirst/MinimalReachableOfReachable.v @@ -162,8 +162,8 @@ Section cfg. intro; apply H | _ => eapply H'; eassumption | _ => assumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Defined. @@ -172,7 +172,7 @@ Section cfg. : alt_option h valid -> alt_option h' valid'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. Section wf_parts. @@ -223,7 +223,7 @@ Section cfg. intros valid' pats p H_h Hinit'. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_reachable_from_production__of__reachable_from_production' h' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))). - specialize (minimal_reachable_from_item__of__reachable_from_item h' (Le.le_n_Sn _)). + specialize (minimal_reachable_from_item__of__reachable_from_item h' (Nat.le_succ_diag_r _)). destruct p as [ ?? p' | ?? p_empty p' ]. { destruct (fun k => minimal_reachable_from_item__of__reachable_from_item valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -231,18 +231,18 @@ Section cfg. | left | right ]. { eexists (MinReachableProductionHead _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_reachable_from_production__of__reachable_from_production' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_reachable_from_production__of__reachable_from_production' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { exists (MinReachableProductionTail (minimal_maybe_empty_item__of__maybe_empty_item Hsub0 p_empty) p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End production. @@ -259,7 +259,7 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_reachable_from_productions__of__reachable_from_productions' h' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))). pose proof (minimal_reachable_from_production__of__reachable_from_production' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))) as minimal_reachable_from_production__of__reachable_from_production''. - specialize (minimal_reachable_from_item__of__reachable_from_item h' (Le.le_n_Sn _)). + specialize (minimal_reachable_from_item__of__reachable_from_item h' (Nat.le_succ_diag_r _)). destruct p as [ ? ? p' | ? ? p' ]. { destruct (fun k => minimal_reachable_from_production__of__reachable_from_production'' valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -267,18 +267,18 @@ Section cfg. | left | right ]. { eexists (MinReachableHead _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_reachable_from_productions__of__reachable_from_productions' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_reachable_from_productions__of__reachable_from_productions' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { eexists (MinReachableTail _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End productions. @@ -296,12 +296,12 @@ Section cfg. { left. eexists (MinReachableTerminal _ _ _ _ Pch); simpl; constructor. } { case_eq (is_valid_nonterminal valid' (of_nonterminal nonterminal')); intro H'''. - { edestruct (fun k => @minimal_reachable_from_productions__of__reachable_from_productions' _ (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (Le.le_n_S _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) + { edestruct (fun k => @minimal_reachable_from_productions__of__reachable_from_productions' _ (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ ((fun n m => proj1 (Nat.succ_le_mono n m)) _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) as [ [ p'' H'' ] | [ nt'' H'' ] ]; [ solve [ auto with arith ] | left | ]. { eexists (MinReachableNonTerminal _ _ H''' p''). - apply Le.le_n_S; eassumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); eassumption. } { destruct (string_dec nonterminal' nt''); subst. { destruct H'' as [ H'' [ p'' H'''' ] ]. simpl in *. @@ -343,7 +343,7 @@ Section cfg. eassumption. } { destruct_head sigT. eexists. - apply Lt.lt_S; eassumption. } } } } + apply Nat.lt_lt_succ_r; eassumption. } } } } { right. exists nonterminal'; repeat split; trivial; []. exists p'. diff --git a/src/Parsers/Reachable/OnlyLast/MinimalReachableOfReachable.v b/src/Parsers/Reachable/OnlyLast/MinimalReachableOfReachable.v index 1e89d6988..f61c2c19f 100644 --- a/src/Parsers/Reachable/OnlyLast/MinimalReachableOfReachable.v +++ b/src/Parsers/Reachable/OnlyLast/MinimalReachableOfReachable.v @@ -162,8 +162,8 @@ Section cfg. intro; apply H | _ => eapply H'; eassumption | _ => assumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Defined. @@ -172,7 +172,7 @@ Section cfg. : alt_option h valid -> alt_option h' valid'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. Section wf_parts. @@ -223,7 +223,7 @@ Section cfg. intros valid' pats p H_h Hinit'. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_reachable_from_production__of__reachable_from_production' h' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))). - specialize (minimal_reachable_from_item__of__reachable_from_item h' (Le.le_n_Sn _)). + specialize (minimal_reachable_from_item__of__reachable_from_item h' (Nat.le_succ_diag_r _)). destruct p as [ ?? p_empty p' | ?? p' ]. { destruct (fun k => minimal_reachable_from_item__of__reachable_from_item valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -231,18 +231,18 @@ Section cfg. | left | right ]. { exists (MinReachableProductionHead (minimal_maybe_empty_production__of__maybe_empty_production Hsub0 p_empty) p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_reachable_from_production__of__reachable_from_production' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_reachable_from_production__of__reachable_from_production' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { eexists (MinReachableProductionTail _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End production. @@ -259,7 +259,7 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_reachable_from_productions__of__reachable_from_productions' h' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))). pose proof (minimal_reachable_from_production__of__reachable_from_production' (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (le_S _ _ pf))) as minimal_reachable_from_production__of__reachable_from_production''. - specialize (minimal_reachable_from_item__of__reachable_from_item h' (Le.le_n_Sn _)). + specialize (minimal_reachable_from_item__of__reachable_from_item h' (Nat.le_succ_diag_r _)). destruct p as [ ? ? p' | ? ? p' ]. { destruct (fun k => minimal_reachable_from_production__of__reachable_from_production'' valid' _ p' k Hinit') as [ [p'' H''] | p'' ]; @@ -267,18 +267,18 @@ Section cfg. | left | right ]. { eexists (MinReachableHead _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } - { destruct (minimal_reachable_from_productions__of__reachable_from_productions' valid' _ p' (Lt.lt_S_n _ _ H_h) Hinit') + { destruct (minimal_reachable_from_productions__of__reachable_from_productions' valid' _ p' ((fun n m => proj2 (Nat.succ_lt_mono n m)) _ _ H_h) Hinit') as [ [p'' H''] | p'' ]; [ left | right ]. { eexists (MinReachableTail _ p''). simpl in *. - apply Le.le_n_S; exact H''. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); exact H''. } { eapply expand_alt_option; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | reflexivity ]. } } Defined. End productions. @@ -296,12 +296,12 @@ Section cfg. { left. eexists (MinReachableTerminal _ _ _ _ Pch); simpl; constructor. } { case_eq (is_valid_nonterminal valid' (of_nonterminal nonterminal')); intro H'''. - { edestruct (fun k => @minimal_reachable_from_productions__of__reachable_from_productions' _ (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ (Le.le_n_S _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) + { edestruct (fun k => @minimal_reachable_from_productions__of__reachable_from_productions' _ (fun h'' pf => minimal_reachable_from_item__of__reachable_from_item _ ((fun n m => proj1 (Nat.succ_le_mono n m)) _ _ pf)) (remove_nonterminal valid' (of_nonterminal nonterminal')) _ p' k) as [ [ p'' H'' ] | [ nt'' H'' ] ]; [ solve [ auto with arith ] | left | ]. { eexists (MinReachableNonTerminal _ _ H''' p''). - apply Le.le_n_S; eassumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); eassumption. } { destruct (string_dec nonterminal' nt''); subst. { destruct H'' as [ H'' [ p'' H'''' ] ]. simpl in *. @@ -343,7 +343,7 @@ Section cfg. eassumption. } { destruct_head sigT. eexists. - apply Lt.lt_S; eassumption. } } } } + apply Nat.lt_lt_succ_r; eassumption. } } } } { right. exists nonterminal'; repeat split; trivial; []. exists p'. diff --git a/src/Parsers/Reachable/ParenBalanced/Core.v b/src/Parsers/Reachable/ParenBalanced/Core.v index f9d9c6858..3b03ff8a5 100644 --- a/src/Parsers/Reachable/ParenBalanced/Core.v +++ b/src/Parsers/Reachable/ParenBalanced/Core.v @@ -84,7 +84,7 @@ pb = pb' '+' 0 Definition pb_new_level_fun (P : Char -> bool) (start_level : nat) : list nat := List.uniquize - EqNat.beq_nat + Nat.eqb (List.map (fun ch => pb_new_level ch start_level) (List.filter @@ -96,7 +96,7 @@ pb = pb' '+' 0 <-> ((exists ch, P ch) /\ forall ch, P ch -> new_level = pb_new_level ch start_level). Proof. unfold pb_new_level_fun. - rewrite (uniquize_singleton (beq := EqNat.beq_nat) bl lb). + rewrite (uniquize_singleton (beq := Nat.eqb) bl lb). setoid_rewrite in_map_iff. setoid_rewrite filter_In. setoid_rewrite (fun x => @and_TrueP_L (In x (enumerate Char)) (P x = true) (enumerate_correct _)). diff --git a/src/Parsers/Reachable/ParenBalanced/MinimalOfCore.v b/src/Parsers/Reachable/ParenBalanced/MinimalOfCore.v index 851db61fa..8a1a42d5d 100644 --- a/src/Parsers/Reachable/ParenBalanced/MinimalOfCore.v +++ b/src/Parsers/Reachable/ParenBalanced/MinimalOfCore.v @@ -142,8 +142,8 @@ Section cfg. | _ => eapply H'; eassumption | _ => assumption | _ => eassumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Unshelve. intros ??????; subst; assumption. @@ -155,7 +155,7 @@ Section cfg. : alt_option h valid -> alt_option h' valid'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. End alt_option. @@ -222,7 +222,7 @@ Section cfg. intros valid' n pat p H_h Hinit'. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_pb'_production__of__pb'_production' h' (fun h'' pf => minimal_pb'_item__of__pb'_item' _ (le_S _ _ pf))). - specialize (minimal_pb'_item__of__pb'_item' h' (Lt.lt_n_Sn _)). + specialize (minimal_pb'_item__of__pb'_item' h' (Nat.lt_succ_diag_r _)). subst_body. destruct p as [ | ?? nt' ?? p0' p1' | valid ?????? p' ]; simpl_size_of. { left; eexists (PBProductionNil _ _ _); reflexivity. } @@ -244,15 +244,15 @@ Section cfg. { eexists (PBProductionConsNonTerminal _ Hnt' p0'' p1''). rewrite ?expand_size_of_pb'_production, ?expand_size_of_pb'_productions in *. simpl_size_of. - apply Le.le_n_S, Plus.plus_le_compat; assumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)), Nat.add_le_mono; assumption. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity | omega ]. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity @@ -274,10 +274,10 @@ Section cfg. { eexists (PBProductionConsTerminal _ _ _ _ p0''). rewrite ?expand_size_of_pb'_production, ?expand_size_of_pb'_productions in *. simpl_size_of. - apply Le.le_n_S; assumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); assumption. } { unfold pb_new_level in *. eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity @@ -300,7 +300,7 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_pb'_productions__of__pb'_productions' h' (fun h'' pf => minimal_pb'_item__of__pb'_item _ (le_S _ _ pf))). pose proof (minimal_pb'_production__of__pb'_production' (fun h'' pf => minimal_pb'_item__of__pb'_item _ (le_S _ _ pf))) as minimal_pb'_production__of__pb'_production''. - specialize (minimal_pb'_item__of__pb'_item h' (Lt.lt_n_Sn _)). + specialize (minimal_pb'_item__of__pb'_item h' (Nat.lt_succ_diag_r _)). destruct p as [ | ??? p0' p1' ]. { left. eexists (PBNil _ _ _). @@ -327,11 +327,11 @@ Section cfg. simpl_size_of. omega. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | simpl_size_of; omega | reflexivity ]. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | simpl_size_of; omega | reflexivity ]. } } Defined. diff --git a/src/Parsers/Reachable/ParenBalanced/OfParse.v b/src/Parsers/Reachable/ParenBalanced/OfParse.v index 77fd9e96e..272f0661d 100644 --- a/src/Parsers/Reachable/ParenBalanced/OfParse.v +++ b/src/Parsers/Reachable/ParenBalanced/OfParse.v @@ -74,7 +74,7 @@ Section cfg. rewrite take_length. unfold paren_balanced'_step, pb_check_level, pb_new_level in *. split_iff. - apply Min.min_case_strong; + apply Nat.min_case_strong; repeat match goal with | _ => progress subst | _ => progress intros diff --git a/src/Parsers/Reachable/ParenBalancedHiding/MinimalOfCore.v b/src/Parsers/Reachable/ParenBalancedHiding/MinimalOfCore.v index 346215696..5adecc422 100644 --- a/src/Parsers/Reachable/ParenBalancedHiding/MinimalOfCore.v +++ b/src/Parsers/Reachable/ParenBalancedHiding/MinimalOfCore.v @@ -156,8 +156,8 @@ Section cfg. | _ => eapply H'; eassumption | _ => assumption | _ => eassumption - | [ |- _ < _ ] => eapply Lt.lt_trans; eassumption - | [ |- _ < _ ] => eapply Lt.lt_le_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_trans; eassumption + | [ |- _ < _ ] => eapply Nat.lt_le_trans; eassumption end. Unshelve. unfold respectful; intros; subst; assumption. @@ -170,7 +170,7 @@ Section cfg. : alt_option h valid -> alt_option h' valid'. Proof. apply expand_alt_option'; try assumption. - apply Lt.lt_le_weak; assumption. + apply Nat.lt_le_incl; assumption. Defined. End alt_option. @@ -237,7 +237,7 @@ Section cfg. intros valid' n pat p H_h Hinit'. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_pbh'_production__of__pbh'_production' h' (fun h'' pf => minimal_pbh'_item__of__pbh'_item' _ (le_S _ _ pf))). - specialize (minimal_pbh'_item__of__pbh'_item' h' (Lt.lt_n_Sn _)). + specialize (minimal_pbh'_item__of__pbh'_item' h' (Nat.lt_succ_diag_r _)). destruct p as [ | ? nt' ?? p0' p1' | ?? nt' ? p0' p' | ??????? p' ]; simpl_size_of. { left; eexists (PBHProductionNil _ _ _ _); reflexivity. } { assert (size_of_pbh'_productions p0' < h') by exact (lt_helper_1 H_h). @@ -260,15 +260,15 @@ Section cfg. { eexists (PBHProductionConsNonTerminal0 _ Hnt' p0'' p1''). rewrite ?expand_size_of_pbh'_production, ?expand_size_of_pbh'_productions in *. simpl_size_of. - apply Le.le_n_S, Plus.plus_le_compat; assumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)), Nat.add_le_mono; assumption. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity | omega ]. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity @@ -293,9 +293,9 @@ Section cfg. { eexists (PBHProductionConsNonTerminalS p0' p1''). rewrite ?expand_size_of_pbh'_production, ?expand_size_of_pbh'_productions in *. simpl_size_of. - apply Le.le_n_S; assumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); assumption. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity @@ -311,10 +311,10 @@ Section cfg. { eexists (PBHProductionConsTerminal _ _ _ _ p0''). rewrite ?expand_size_of_pbh'_production, ?expand_size_of_pbh'_productions in *. simpl_size_of. - apply Le.le_n_S; assumption. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)); assumption. } { unfold pb_new_level in *. eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | apply lt_helper_1' | apply lt_helper_2' | reflexivity @@ -337,7 +337,7 @@ Section cfg. destruct h as [|h']; [ exfalso; omega | ]. specialize (minimal_pbh'_productions__of__pbh'_productions' h' (fun h'' pf => minimal_pbh'_item__of__pbh'_item _ (le_S _ _ pf))). pose proof (minimal_pbh'_production__of__pbh'_production' (fun h'' pf => minimal_pbh'_item__of__pbh'_item _ (le_S _ _ pf))) as minimal_pbh'_production__of__pbh'_production''. - specialize (minimal_pbh'_item__of__pbh'_item h' (Lt.lt_n_Sn _)). + specialize (minimal_pbh'_item__of__pbh'_item h' (Nat.lt_succ_diag_r _)). destruct p as [ | ??? p0' p1' ]. { left. eexists (PBHNil _ _ _ _). @@ -364,11 +364,11 @@ Section cfg. simpl_size_of. omega. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | simpl_size_of; omega | reflexivity ]. } { eapply expand_alt_option'; [ .. | eassumption ]; - try solve [ apply Lt.lt_n_Sn + try solve [ apply Nat.lt_succ_diag_r | simpl_size_of; omega | reflexivity ]. } } Defined. diff --git a/src/Parsers/RecognizerPreOptimized.v b/src/Parsers/RecognizerPreOptimized.v index 247f724a5..43fc5334f 100644 --- a/src/Parsers/RecognizerPreOptimized.v +++ b/src/Parsers/RecognizerPreOptimized.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Coq.Lists.List. Require Import Coq.ZArith.ZArith. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Fiat.Parsers.ContextFreeGrammar.Core. Require Import Fiat.Parsers.ContextFreeGrammar.PreNotations. Require Import Fiat.Parsers.BaseTypes. @@ -49,6 +48,8 @@ Section recursive_descent_parser. Local Arguments minus !_ !_. Local Arguments min !_ !_. + Local Notation minus_plus := (fun n m : nat => eq_ind_r (fun n0 : nat => n0 - n = m) (Nat.add_sub m n) (Nat.add_comm n m)). + Local Program Instance optsplitdata_correct : @boolean_parser_completeness_dataT' _ _ _ G optdata := { split_string_for_production_complete := _ }. Next Obligation. @@ -77,7 +78,7 @@ Section recursive_descent_parser. | [ |- context[match ?e with nil => _ | _ => _ end] ] => destruct e eqn:? | _ => progress simpl - | _ => rewrite Min.min_0_r + | _ => rewrite Nat.min_0_r | _ => intro | [ |- context[0 = min _ _] ] => exists 0 | [ |- ?x = ?x ] => reflexivity @@ -89,7 +90,7 @@ Section recursive_descent_parser. | [ H : is_true (take _ (substring _ 0 _) ~= [_]) |- _ ] => apply length_singleton in H | [ H : length (take _ (substring _ 0 _)) = S _ |- _ ] - => rewrite take_length, substring_length, <- Nat.sub_min_distr_r, Nat.add_sub, !Min.min_0_r in H + => rewrite take_length, substring_length, <- Nat.sub_min_distr_r, Nat.add_sub, !Nat.min_0_r in H | [ H : MinimalParse.minimal_parse_of_production _ _ _ nil |- _ ] => inversion H; clear H | [ |- MinimalParse.minimal_parse_of_production _ _ _ nil ] => constructor | [ H : MinimalParse.minimal_parse_of_item _ _ _ (Terminal _) |- _ ] @@ -106,22 +107,22 @@ Section recursive_descent_parser. | [ |- (_ * _)%type ] => split | [ |- { _ : nat & _ } ] => eexists; repeat split; [ left; reflexivity | .. ] | [ H : ?x + ?y <= _ |- context[(?y + ?x)%nat] ] - => not constr_eq x y; rewrite (Plus.plus_comm y x) + => not constr_eq x y; rewrite (Nat.add_comm y x) | [ H : ?x + ?y <= _, H' : context[(?y + ?x)%nat] |- _ ] - => not constr_eq x y; rewrite (Plus.plus_comm y x) in H' - | [ H : context[(?x + 1)%nat] |- _ ] => rewrite (Plus.plus_comm x 1) in H; simpl plus in H - | [ H : context[min ?x ?y], H' : ?y <= ?x |- _ ] => rewrite (Min.min_r x y) in H by assumption - | [ H' : ?y <= ?x |- context[min ?x ?y] ] => rewrite (Min.min_r x y) by assumption + => not constr_eq x y; rewrite (Nat.add_comm y x) in H' + | [ H : context[(?x + 1)%nat] |- _ ] => rewrite (Nat.add_comm x 1) in H; simpl plus in H + | [ H : context[min ?x ?y], H' : ?y <= ?x |- _ ] => rewrite (Nat.min_r x y) in H by assumption + | [ H' : ?y <= ?x |- context[min ?x ?y] ] => rewrite (Nat.min_r x y) by assumption | [ H : _ - _ = 0 |- _ ] => apply Nat.sub_0_le in H | [ |- _ - _ = 0 ] => apply Nat.sub_0_le - | [ H : _ |- _ ] => progress rewrite ?Nat.add_sub, ?Minus.minus_plus in H - | _ => progress rewrite ?Nat.add_sub, ?Minus.minus_plus, ?Minus.minus_diag, ?Min.min_idempotent + | [ H : _ |- _ ] => progress rewrite ?Nat.add_sub, ?minus_plus in H + | _ => progress rewrite ?Nat.add_sub, ?minus_plus, ?Nat.sub_diag, ?Nat.min_idempotent | [ |- is_true (is_char (take ?x (take ?x _)) _) ] => rewrite take_take | [ H : is_true (is_char (take ?n ?str) ?ch) |- is_true (is_char ?str ?ch) ] => rewrite (take_long str) in H - by (rewrite substring_length, Plus.plus_comm, Min.min_r by assumption; omega) + by (rewrite substring_length, Nat.add_comm, Nat.min_r by assumption; omega) | [ H : is_true (is_char (take ?n ?str) ?ch) |- is_true (is_char (take 1 ?str) ?ch) ] => apply take_n_1_singleton in H | [ |- MinimalParse.minimal_parse_of_production _ _ _ (_::_) ] @@ -137,21 +138,21 @@ Section recursive_descent_parser. => apply length_singleton in H; rewrite take_length, substring_length in H | [ H : min ?x ?y = 1 |- ?R (drop ?x (substring _ ?y _)) (drop 1 (substring _ ?y _)) ] - => revert H; apply Min.min_case_strong; intros; subst; + => revert H; apply Nat.min_case_strong; intros; subst; try reflexivity; apply bool_eq_empty | [ |- context[S ?x - ?x] ] - => rewrite <- Nat.add_1_r, Minus.minus_plus + => rewrite <- Nat.add_1_r, minus_plus | [ |- context[take ?x (take 0 _)] ] => rewrite take_take | [ |- context[min _ ?x - ?x] ] => rewrite <- Nat.sub_min_distr_r | [ H : ?y <= ?x |- context[take ?x (substring ?z ?y ?str)] ] => rewrite (take_long (substring z y str)) - by (rewrite substring_length, Plus.plus_comm, Min.min_r by assumption; omega) + by (rewrite substring_length, Nat.add_comm, Nat.min_r by assumption; omega) | [ |- context[take ?x (substring ?z ?x ?str)] ] => rewrite (take_long (substring z x str)) - by (rewrite substring_length, Plus.plus_comm, Min.min_r by assumption; omega) + by (rewrite substring_length, Nat.add_comm, Nat.min_r by assumption; omega) end. Qed. End recursive_descent_parser. diff --git a/src/Parsers/Refinement/BinOpBrackets/BinOpRules.v b/src/Parsers/Refinement/BinOpBrackets/BinOpRules.v index 4eda0fca7..75130af5e 100644 --- a/src/Parsers/Refinement/BinOpBrackets/BinOpRules.v +++ b/src/Parsers/Refinement/BinOpBrackets/BinOpRules.v @@ -70,7 +70,7 @@ Section helper_lemmas. => apply FirstChar.for_first_char_exists in H; [ | rewrite !drop_length; clear -H' H''; omega ] - | [ H : _ < min _ _ |- _ ] => apply NPeano.Nat.min_glb_lt_iff in H + | [ H : _ < min _ _ |- _ ] => apply Nat.min_glb_lt_iff in H end. eapply (paren_balanced_hiding'_prefix); [ exact H_hiding @@ -122,7 +122,7 @@ Section refine_rules. destruct (List.nth n' table None); try reflexivity. rewrite substring_length. destruct Hlen; subst; simpl; - apply Min.min_case_strong; omega. + apply Nat.min_case_strong; omega. Qed. Section general_table. @@ -157,7 +157,7 @@ Section refine_rules. destruct (nth n table None) eqn:nth_equals. { destruct (Compare_dec.le_lt_dec (StringLike.length s_str) n0) as [|g]. - { rewrite Min.min_l by assumption. + { rewrite Nat.min_l by assumption. left. reflexivity. } { exfalso. @@ -168,21 +168,21 @@ Section refine_rules. subst s_str. rewrite take_take in H_nt_hiding. refine (paren_balanced_hiding'_prefix__index_points_to_binop _ H_nt_hiding Hn0_binop Hn0_balanced _). - rewrite <- Min.min_assoc. + rewrite <- Nat.min_assoc. rewrite <- take_length. - rewrite Min.min_idempotent. + rewrite Nat.min_idempotent. assumption. } } { - rewrite Min.min_idempotent. + rewrite Nat.min_idempotent. tauto. } } destruct Hlen as [Hlen|Hlen]. { subst s_str m; exfalso; revert isnonzero. - rewrite drop_length, substring_length; apply Min.min_case_strong; simpl; intros; omega. } + rewrite drop_length, substring_length; apply Nat.min_case_strong; simpl; intros; omega. } subst s_str. rewrite take_take in H_nt_hiding. @@ -238,7 +238,7 @@ Section refine_rules. | [ H : _ <= StringLike.length _ |- _ ] => rewrite take_length in H | [ H : context[min ?x ?y], H' : ?x <= min ?y _ |- _ ] => replace (min x y) with x in H - by (revert H'; clear; abstract (repeat apply Min.min_case_strong; intros; omega)) + by (revert H'; clear; abstract (repeat apply Nat.min_case_strong; intros; omega)) | _ => progress subst end. *) destruct (List.nth n table None) as [idx|]. @@ -246,7 +246,7 @@ Section refine_rules. left. destruct (Compare_dec.lt_eq_lt_dec idx idx') as [ [?|?]|?]; [ - | subst; apply Min.min_r; omega + | subst; apply Nat.min_r; omega | ]; exfalso. { (** idx < idx'; this contradicts the paren-balanced-hiding @@ -257,7 +257,7 @@ Section refine_rules. apply paren_balanced_hiding_impl_paren_balanced' in Htable1; [ | exact _ .. ]. eapply paren_balanced_hiding'_prefix__index_points_to_binop; try eassumption. rewrite <- take_length, <- take_take, take_length. - apply Min.min_case_strong; intros; try assumption; omega. + apply Nat.min_case_strong; intros; try assumption; omega. } { (** idx' < idx; this contradicts the paren-balanced-hiding @@ -272,9 +272,9 @@ Section refine_rules. | | | eassumption ]. - { apply Min.min_case_strong; omega. } - { rewrite Min.min_l - by (rewrite substring_length, Min.min_r in Hsmall by omega; + { apply Nat.min_case_strong; omega. } + { rewrite Nat.min_l + by (rewrite substring_length, Nat.min_r in Hsmall by omega; omega). rewrite drop_take, take_take in H. apply take_n_1_singleton in H. @@ -289,23 +289,23 @@ Section refine_rules. [ | solve [ repeat match goal with - | _ => rewrite Min.min_l in Hsmall by omega - | _ => rewrite Min.min_l in isnonzero by omega - | _ => rewrite Min.min_l by assumption + | _ => rewrite Nat.min_l in Hsmall by omega + | _ => rewrite Nat.min_l in isnonzero by omega + | _ => rewrite Nat.min_l by assumption | [ H : is_true (_ ~= [ _ ]) |- _ ] => apply length_singleton in H | [ H : _ |- _ ] => progress rewrite ?drop_length, ?take_length in H | _ => progress rewrite ?drop_length, ?take_length - | [ H : min _ _ = _ |- _ ] => revert H; apply Min.min_case_strong; clear; intros; omega + | [ H : min _ _ = _ |- _ ] => revert H; apply Nat.min_case_strong; clear; intros; omega | _ => omega end ] ]. destruct Htable' as [ch' [Ht0 Ht1] ]. - rewrite substring_length, Min.min_r, NPeano.Nat.add_sub in Hsmall by omega. + rewrite substring_length, Nat.min_r, Nat.add_sub in Hsmall by omega. repeat match goal with - | _ => rewrite Min.min_idempotent + | _ => rewrite Nat.min_idempotent | [ |- _ \/ False ] => left | [ H : 0 < ?x - ?y |- ?x = ?y ] => exfalso - | [ H : context[min] |- _ ] => rewrite Min.min_r in H by omega - | [ H : context[min] |- _ ] => rewrite Min.min_l in H by omega + | [ H : context[min] |- _ ] => rewrite Nat.min_r in H by omega + | [ H : context[min] |- _ ] => rewrite Nat.min_l in H by omega | [ H : is_true (is_char (substring _ _ (substring _ _ _)) _) |- _ ] => rewrite substring_substring in H; apply take_n_1_singleton in H diff --git a/src/Parsers/Refinement/BinOpBrackets/MakeBinOpTable.v b/src/Parsers/Refinement/BinOpBrackets/MakeBinOpTable.v index c40dabd70..d9fda10ea 100644 --- a/src/Parsers/Refinement/BinOpBrackets/MakeBinOpTable.v +++ b/src/Parsers/Refinement/BinOpBrackets/MakeBinOpTable.v @@ -171,7 +171,7 @@ pb = pb' '+' 0 { specialize (IHlen (drop 1 str)). rewrite drop_length, H' in IHlen. simpl in IHlen. - specialize (IHlen (NPeano.Nat.sub_0_r _)). + specialize (IHlen (Nat.sub_0_r _)). rewrite list_of_next_bin_ops'_recr. destruct (singleton_exists (take 1 str)) as [ch H'']. { rewrite take_length, H'; reflexivity. } @@ -203,7 +203,7 @@ pb = pb' '+' 0 { specialize (IHlen (drop 1 str)). rewrite drop_length, H' in IHlen. simpl in IHlen. - rewrite NPeano.Nat.sub_0_r in IHlen. + rewrite Nat.sub_0_r in IHlen. specialize (IHlen (pred n) eq_refl). rewrite list_of_next_bin_ops'_recr. destruct (singleton_exists (take 1 str)) as [ch H'']. @@ -228,7 +228,7 @@ pb = pb' '+' 0 : index_points_to_binop (S offset) index str <-> index_points_to_binop offset index (drop 1 str). Proof. unfold index_points_to_binop. - rewrite ?drop_drop, !NPeano.Nat.add_1_r; simpl. + rewrite ?drop_drop, !Nat.add_1_r; simpl. reflexivity. Qed. @@ -266,7 +266,7 @@ pb = pb' '+' 0 : index_not_points_to_binop (S offset) index str <-> index_not_points_to_binop offset index (drop 1 str). Proof. unfold index_not_points_to_binop. - rewrite ?drop_drop, !NPeano.Nat.add_1_r; simpl. + rewrite ?drop_drop, !Nat.add_1_r; simpl. reflexivity. Qed. @@ -339,7 +339,7 @@ pb = pb' '+' 0 unfold cell_of_next_bin_ops_spec'' in *; simpl in *. rewrite !index_points_to_binop_S1. rewrite !index_not_points_to_binop_S1. - rewrite drop_drop, NPeano.Nat.add_1_r in H. + rewrite drop_drop, Nat.add_1_r in H. assumption. Qed. @@ -386,9 +386,9 @@ pb = pb' '+' 0 | [ H : option_map _ ?x = None |- _ ] => destruct x eqn:?; simpl in H | [ H : forall x, _ = _ -> @?T x |- _ ] => specialize (H _ eq_refl) | [ H : _ = _ -> ?T |- _ ] => specialize (H eq_refl) - | [ H : context[_ - 0] |- _ ] => rewrite NPeano.Nat.sub_0_r in H - | [ |- context[_ - 0] ] => rewrite NPeano.Nat.sub_0_r - | [ H : context[(_ + 1)%nat] |- _ ] => rewrite NPeano.Nat.add_1_r in H || setoid_rewrite NPeano.Nat.add_1_r in H + | [ H : context[_ - 0] |- _ ] => rewrite Nat.sub_0_r in H + | [ |- context[_ - 0] ] => rewrite Nat.sub_0_r + | [ H : context[(_ + 1)%nat] |- _ ] => rewrite Nat.add_1_r in H || setoid_rewrite Nat.add_1_r in H | [ H : ?x > 0 |- _ ] => is_var x; destruct x; [ exfalso; clear -H; omega | clear dependent H ] | [ H : ~ ?x > 0 |- _ ] => is_var x; destruct x; [ clear dependent H | exfalso; clear -H; omega ] | [ H : 0 > 0 |- _ ] => exfalso; clear -H; omega diff --git a/src/Parsers/Refinement/BinOpBrackets/ParenBalanced.v b/src/Parsers/Refinement/BinOpBrackets/ParenBalanced.v index 302f4f566..337adc78b 100644 --- a/src/Parsers/Refinement/BinOpBrackets/ParenBalanced.v +++ b/src/Parsers/Refinement/BinOpBrackets/ParenBalanced.v @@ -173,7 +173,7 @@ Section specific. { simpl in *. setoid_rewrite drop_drop in IHlen. setoid_rewrite take_drop in IHlen. - rewrite !NPeano.Nat.add_1_r in IHlen. + rewrite !Nat.add_1_r in IHlen. specialize (fun H' level1 H => IHlen level1 level2 H H'). intros H1 H2. specialize_by assumption. @@ -362,7 +362,7 @@ Section specific. destruct level1, level2; simpl; intros; first [ assumption | congruence ]. } { simpl in *. setoid_rewrite drop_drop in IHlen. - rewrite !NPeano.Nat.add_1_r in IHlen. + rewrite !Nat.add_1_r in IHlen. specialize (fun H' level1 H => IHlen level1 level2 H H'). intros H1 H2. specialize_by assumption. @@ -385,7 +385,7 @@ Section specific. | _ => congruence | _ => omega | [ H : context[take _ (drop _ _)] |- _ ] => setoid_rewrite take_drop in H - | [ H : context[(_ + 1)%nat] |- _ ] => rewrite !NPeano.Nat.add_1_r in H + | [ H : context[(_ + 1)%nat] |- _ ] => rewrite !Nat.add_1_r in H | [ H : context[bool_of_sumbool ?e] |- _ ] => destruct e; simpl in H | [ H : context[match ?e with left _ => _ | right _ => _ end] |- _ ] => destruct e; simpl in H | [ |- context[bool_of_sumbool ?e] ] => destruct e; simpl @@ -417,7 +417,7 @@ Section specific. induction_str_len str. { rewrite paren_balanced_hiding'_nil, paren_balanced'_nil by assumption; trivial. } { specialize (IHlen (drop 1 str)). - rewrite drop_length, <- Minus.pred_of_minus in IHlen. + rewrite drop_length, Nat.sub_1_r in IHlen. specialize (IHlen (f_equal pred H)). rewrite paren_balanced_hiding'_recr, paren_balanced'_recr. edestruct get as [ch|]; trivial; []. @@ -442,7 +442,7 @@ Section specific. induction_str_len str. { rewrite paren_balanced_hiding'_nil, paren_balanced'_nil by assumption; trivial. } { specialize (IHlen (drop 1 str)). - rewrite drop_length, <- Minus.pred_of_minus in IHlen. + rewrite drop_length, Nat.sub_1_r in IHlen. specialize (IHlen (f_equal pred H)). rewrite paren_balanced_hiding'_recr, paren_balanced'_recr. edestruct get as [ch|]; trivial; []. @@ -468,7 +468,7 @@ Section specific. { simpl in *. intros n H'. apply paren_balanced_hiding'_S. - rewrite NPeano.Nat.add_succ_r; eauto with nocore. } + rewrite Nat.add_succ_r; eauto with nocore. } Qed.*) End paren_balanced_to_hiding. End specific. diff --git a/src/Parsers/Refinement/BinOpBrackets/ParenBalancedGrammar.v b/src/Parsers/Refinement/BinOpBrackets/ParenBalancedGrammar.v index 022d719b5..6b5c963c1 100644 --- a/src/Parsers/Refinement/BinOpBrackets/ParenBalancedGrammar.v +++ b/src/Parsers/Refinement/BinOpBrackets/ParenBalancedGrammar.v @@ -186,7 +186,7 @@ Section specific. | [ H : forall x, _ = x -> _ |- _ ] => specialize (H _ eq_refl) | [ H : forall x, x = _ -> _ |- _ ] => specialize (H _ eq_refl) | [ H : context[In _ (uniquize _ _)] |- _ ] - => setoid_rewrite <- (ListFacts.uniquize_In_refl_iff _ EqNat.beq_nat _ (lb eq_refl) bl) in H + => setoid_rewrite <- (ListFacts.uniquize_In_refl_iff _ Nat.eqb _ (lb eq_refl) bl) in H | [ H : context[In _ (map _ _)] |- _ ] => setoid_rewrite in_map_iff in H | _ => progress destruct_head ex @@ -284,7 +284,7 @@ Section specific. | _ => progress subst | _ => congruence | [ H : string_beq _ _ = true |- _ ] => apply string_bl in H - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H | [ H : is_true (_ || _) |- _ ] => apply Bool.orb_true_iff in H | [ H : is_true (_ && _) |- _ ] => apply Bool.andb_true_iff in H | [ H : _ /\ _ |- _ ] => let H1 := fresh in diff --git a/src/Parsers/Refinement/BinOpBrackets/ParenBalancedLemmas.v b/src/Parsers/Refinement/BinOpBrackets/ParenBalancedLemmas.v index f5a3ed9e0..adba62860 100644 --- a/src/Parsers/Refinement/BinOpBrackets/ParenBalancedLemmas.v +++ b/src/Parsers/Refinement/BinOpBrackets/ParenBalancedLemmas.v @@ -25,7 +25,7 @@ Section helpers. | [ H : is_true false |- _ ] => solve [ inversion H ] | [ H : false = true |- _ ] => solve [ inversion H ] | _ => progress rewrite ?take_length, ?drop_length - | [ H : _ |- _ ] => progress rewrite ?take_length, ?drop_length, ?drop_take, ?drop_0, ?NPeano.Nat.sub_0_r, ?NPeano.Nat.sub_1_r in H + | [ H : _ |- _ ] => progress rewrite ?take_length, ?drop_length, ?drop_take, ?drop_0, ?Nat.sub_0_r, ?Nat.sub_1_r in H | [ H : ?x = 0, H' : context[?x] |- _ ] => rewrite H in H' | [ H : length ?str = 0, H' : is_true (take _ (drop _ ?str) ~= [ _ ])%string_like |- _ ] => apply length_singleton in H' @@ -42,7 +42,7 @@ Section helpers. | _ => solve [ eauto with nocore ] | [ H : context[drop _ (drop _ _)] |- _ ] => setoid_rewrite drop_drop in H | [ |- context[match get 0 (take _ _) with _ => _ end] ] => rewrite !get_take_lt by omega - | [ H : context[(_ + 1)%nat] |- _ ] => setoid_rewrite NPeano.Nat.add_1_r in H + | [ H : context[(_ + 1)%nat] |- _ ] => setoid_rewrite Nat.add_1_r in H | [ |- context[get 0 ?str] ] => erewrite (proj1 (get_0 str _)) by eassumption | [ |- context[get 0 (take 0 ?str)] ] => rewrite (has_first_char_nonempty (take 0 str)) by (rewrite take_length; reflexivity) diff --git a/src/Parsers/Refinement/DisjointLemmas.v b/src/Parsers/Refinement/DisjointLemmas.v index a3e89efd7..c5877326a 100644 --- a/src/Parsers/Refinement/DisjointLemmas.v +++ b/src/Parsers/Refinement/DisjointLemmas.v @@ -3,7 +3,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Coq.Init.Wf Coq.Arith.Wf_nat. Require Import Coq.ZArith.ZArith. Require Import Coq.Lists.List Coq.Strings.String. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Fiat.Parsers.ContextFreeGrammar.Core. Require Import Fiat.Parsers.ContextFreeGrammar.PreNotations. Require Import Fiat.Parsers.ContextFreeGrammar.Equality. @@ -290,7 +289,7 @@ Section search_backward. try solve [ left; eauto | right; eauto | assumption - | apply for_last_char_nil; rewrite ?take_length; apply Min.min_case_strong; omega ] ]. + | apply for_last_char_nil; rewrite ?take_length; apply Nat.min_case_strong; omega ] ]. revert H0. apply forall_chars__char_in__impl__forall_chars. intros ch H' H''. @@ -357,7 +356,7 @@ Section search_backward. try solve [ right; eauto | left; split; eauto | assumption - | apply for_last_char_nil; rewrite ?take_length; apply Min.min_case_strong; omega ] ]. + | apply for_last_char_nil; rewrite ?take_length; apply Nat.min_case_strong; omega ] ]. { revert H0. apply forall_chars_Proper; [ reflexivity | ]. intros ch H' H''. diff --git a/src/Parsers/Refinement/DisjointRules.v b/src/Parsers/Refinement/DisjointRules.v index ff7ad8bd8..96e33892f 100644 --- a/src/Parsers/Refinement/DisjointRules.v +++ b/src/Parsers/Refinement/DisjointRules.v @@ -26,8 +26,8 @@ Proof. revert str; induction len; simpl; intros; [ reflexivity | ]. destruct (get (length str - S len) str) eqn:H. { edestruct P; try omega. - apply Le.le_n_S, IHlen. } - { apply Le.le_n_S, IHlen. } + apply (fun n m => proj1 (Nat.succ_le_mono n m)), IHlen. } + { apply (fun n m => proj1 (Nat.succ_le_mono n m)), IHlen. } Qed. Lemma find_first_char_such_that_short {Char HSLM HSL} @@ -46,30 +46,30 @@ Proof. destruct H as [n H]. set (len := length str). setoid_replace str with (drop (length str - len) str) at 1 - by (subst; rewrite Minus.minus_diag, drop_0; reflexivity). + by (subst; rewrite Nat.sub_diag, drop_0; reflexivity). setoid_replace str with (drop (length str - len) str) in H - by (subst; rewrite Minus.minus_diag, drop_0; reflexivity). + by (subst; rewrite Nat.sub_diag, drop_0; reflexivity). assert (len <= length str) by reflexivity. clearbody len. generalize dependent str; revert n. induction len; simpl; intros n str IH Hlen. { apply first_char_such_that_0. rewrite drop_length. - rewrite NPeano.Nat.sub_0_r in IH |- *. - rewrite Minus.minus_diag. + rewrite Nat.sub_0_r in IH |- *. + rewrite Nat.sub_diag. split. { apply for_first_char_nil. rewrite drop_length; omega. } { generalize dependent str. induction n; intros str H Hlen. { apply first_char_such_that_0 in H. - rewrite drop_length, Minus.minus_diag in H. + rewrite drop_length, Nat.sub_diag in H. destruct_head and; trivial. } { apply first_char_such_that_past_end in H; [ | rewrite drop_length; omega ]. left; assumption. } } } { pose proof (singleton_exists (take 1 (drop (length str - S len) str))) as H'. rewrite take_length, drop_length in H'. - destruct H' as [ch H']; [ apply Min.min_case_strong; intros; omega | ]. + destruct H' as [ch H']; [ apply Nat.min_case_strong; intros; omega | ]. rewrite get_drop. rewrite (proj1 (get_0 _ _) H'). destruct (P ch) eqn:H''. @@ -171,7 +171,7 @@ Section with_grammar. specialize (H1 (ex_intro _ n H')). pose proof (is_first_char_such_that_eq_nat_iff H1 H') as H''. destruct_head or; destruct_head and; subst; - rewrite ?Min.min_r, ?Min.min_l by assumption; + rewrite ?Nat.min_r, ?Nat.min_l by assumption; omega. Qed. @@ -210,7 +210,7 @@ Section with_grammar. specialize (H1 (ex_intro _ n H')). pose proof (is_first_char_such_that_eq_nat_iff H1 H') as H''. destruct_head or; destruct_head and; subst; - rewrite ?Min.min_r by assumption; + rewrite ?Nat.min_r by assumption; omega. Qed. diff --git a/src/Parsers/Refinement/DisjointRulesRev.v b/src/Parsers/Refinement/DisjointRulesRev.v index 7028ec03e..bbe07e686 100644 --- a/src/Parsers/Refinement/DisjointRulesRev.v +++ b/src/Parsers/Refinement/DisjointRulesRev.v @@ -104,7 +104,7 @@ Section with_grammar. unfold rev_search_for_condition in H1. pose proof (is_after_last_char_such_that_eq_nat_iff H1 H') as H''. destruct_head or; destruct_head and; subst; - rewrite ?Min.min_r, ?Min.min_l by assumption; + rewrite ?Nat.min_r, ?Nat.min_l by assumption; omega. Qed. @@ -142,7 +142,7 @@ Section with_grammar. specialize (H1 (ex_intro _ n H')). pose proof (is_after_last_char_such_that_eq_nat_iff H1 H') as H''. destruct_head or; destruct_head and; subst; - rewrite ?Min.min_r by assumption; + rewrite ?Nat.min_r by assumption; omega. Qed. diff --git a/src/Parsers/Refinement/EmptyLemmas.v b/src/Parsers/Refinement/EmptyLemmas.v index 465813cc4..edf254431 100644 --- a/src/Parsers/Refinement/EmptyLemmas.v +++ b/src/Parsers/Refinement/EmptyLemmas.v @@ -118,8 +118,8 @@ Section correctness. | [ x : nonemptyT |- _ ] => destruct x | [ x : bool |- _ ] => destruct x | [ |- and _ _ ] => split - | _ => progress rewrite ?take_length, ?drop_length, ?Min.min_0_r, ?Min.min_0_l - | [ H : _ |- _ ] => progress rewrite ?take_length, ?drop_length, ?Min.min_0_r, ?Min.min_0_l in H + | _ => progress rewrite ?take_length, ?drop_length, ?Nat.min_0_r, ?Nat.min_0_l + | [ H : _ |- _ ] => progress rewrite ?take_length, ?drop_length, ?Nat.min_0_r, ?Nat.min_0_l in H | _ => omega | [ H : ?x = ?x -> _ |- _ ] => specialize (H eq_refl) | [ H : ?P ?v, H' : forall str, ?P str -> length str <> 0 |- _ ] @@ -127,7 +127,7 @@ Section correctness. | [ H : ?P ?v, H' : forall str, ?P str -> length str = 0 |- _ ] => apply H' in H | [ H : ?x = 0, H' : context[?x] |- _ ] => rewrite H in H' - | [ H : min _ _ = 0 |- _ ] => revert H; apply Min.min_case_strong + | [ H : min _ _ = 0 |- _ ] => revert H; apply Nat.min_case_strong end. Local Ltac t_Proper := repeat t_Proper_step. diff --git a/src/Parsers/Refinement/FixedLengthLemmas.v b/src/Parsers/Refinement/FixedLengthLemmas.v index c4b7f8d23..d7b60c313 100644 --- a/src/Parsers/Refinement/FixedLengthLemmas.v +++ b/src/Parsers/Refinement/FixedLengthLemmas.v @@ -19,12 +19,12 @@ Set Implicit Arguments. Local Open Scope grammar_fixedpoint_scope. Definition length_result_lub (l1 l2 : nat) : lattice_for nat - := if beq_nat l1 l2 then constant l1 else ⊤. + := if Nat.eqb l1 l2 then constant l1 else ⊤. Global Instance length_result_lattice : grammar_fixedpoint_lattice_data nat. Proof. refine {| prestate_lt x y := false; - prestate_beq := beq_nat; + prestate_beq := Nat.eqb; preleast_upper_bound := length_result_lub |}; try abstract (repeat match goal with | [ |- is_true true ] => reflexivity @@ -36,8 +36,8 @@ Proof. | _ => progress unfold length_result_lub, is_true in * | _ => progress subst | [ H : constant _ = constant _ |- _ ] => inversion H; clear H - | [ H : beq_nat _ _ = true |- _ ] => apply beq_nat_true_iff in H - | _ => rewrite <- beq_nat_refl + | [ H : Nat.eqb _ _ = true |- _ ] => apply Nat.eqb_eq in H + | _ => rewrite Nat.eqb_refl | [ H : context[match ?e with _ => _ end] |- _ ] => destruct e eqn:? | [ |- context[match ?e with _ => _ end] ] => destruct e eqn:? end). @@ -57,8 +57,8 @@ Proof. | _ => progress simpl in * | _ => progress unfold is_true in * | _ => progress subst - | [ H : beq_nat _ _ = true |- _ ] => apply beq_nat_true_iff in H - | [ |- beq_nat _ _ = true ] => apply beq_nat_true_iff + | [ H : Nat.eqb _ _ = true |- _ ] => apply Nat.eqb_eq in H + | [ |- Nat.eqb _ _ = true ] => apply Nat.eqb_eq | _ => reflexivity end ). } @@ -82,7 +82,7 @@ Section correctness. | _ => progress unfold respectful, pointwise_relation, length_result_accurate, length_result_accurate, ensemble_bottom, ensemble_top, ensemble_least_upper_bound, ensemble_on_terminal, ensemble_combine_production, lattice_for_related, not, length_result_lub, prestate_le in * | _ => intro | _ => progress subst - | [ H : is_true (beq_nat _ _) |- _ ] => apply beq_nat_true_iff in H + | [ H : is_true (Nat.eqb _ _) |- _ ] => apply Nat.eqb_eq in H | _ => progress destruct_head lattice_for | [ |- iff _ _ ] => split | [ H : forall x y, _ -> (_ <-> _) |- _ ] => specialize (fun x => H x x (reflexivity _)) @@ -106,8 +106,8 @@ Section correctness. => specialize (fun str => H (fun _ => True) str I) | [ H : _ |- _ ] => rewrite Bool.orb_false_r in H | _ => rewrite Bool.orb_false_r - | [ |- is_true (beq_nat _ _) ] => apply beq_nat_true_iff - | [ H : beq_nat _ _ = true |- _ ] => apply beq_nat_true_iff in H + | [ |- is_true (Nat.eqb _ _) ] => apply Nat.eqb_eq + | [ H : Nat.eqb _ _ = true |- _ ] => apply Nat.eqb_eq in H | [ H : forall P, (forall str, P str -> @?P' str) -> forall str', P str' -> @?Q' str' |- _ ] => specialize (H P' (fun str pf => pf)) | [ H : forall str, length str = ?n -> length str = ?n' |- _ ] diff --git a/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflective.v b/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflective.v index b07518e6b..466090eef 100644 --- a/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflective.v +++ b/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflective.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** First step of a splitter refinement; indexed representation, and handle all rules with at most one nonterminal; leave a reflective goal *) -Require Import Coq.Strings.String Coq.Arith.Lt Coq.Lists.List. -Require Import Coq.Numbers.Natural.Peano.NPeano. +Require Import Coq.Strings.String Coq.Arith.PeanoNat Coq.Lists.List. Require Import Fiat.Parsers.BaseTypes. Require Import Fiat.Parsers.Splitters.RDPList. Require Import Fiat.Parsers.ParserInterface. @@ -214,14 +213,14 @@ Section helpers. | [ H : context[length (drop _ _)] |- _ ] => rewrite drop_length in H | [ H : min ?x ?y = 1 |- _ ] => is_var x; destruct x | [ H : min (S ?x) ?y = 1 |- _ ] => is_var x; destruct x - | [ H : min (S (S ?x)) ?y = 1 |- _ ] => revert H; apply (Min.min_case_strong (S (S x)) y) - | [ H : context[min _ 0] |- _ ] => rewrite Min.min_0_r in H - | [ H : context[min 0 _] |- _ ] => rewrite Min.min_0_l in H + | [ H : min (S (S ?x)) ?y = 1 |- _ ] => revert H; apply (Nat.min_case_strong (S (S x)) y) + | [ H : context[min _ 0] |- _ ] => rewrite Nat.min_0_r in H + | [ H : context[min 0 _] |- _ ] => rewrite Nat.min_0_l in H | [ H : 0 = 1 |- _ ] => exfalso; clear -H; discriminate | [ H : S (S _) = 1 |- _ ] => exfalso; clear -H; discriminate | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | [ H : ?x = 1 |- context[?x] ] => rewrite H - | [ H : min ?x ?y = 1 |- _ ] => revert H; apply (Min.min_case_strong x y) + | [ H : min ?x ?y = 1 |- _ ] => revert H; apply (Nat.min_case_strong x y) | _ => omega end. } { exfalso. @@ -237,10 +236,10 @@ Module Export PrettyNotations. Infix "=p" := default_production_carrierT_beq (at level 70, no associativity). Infix "=ℕ" := opt.beq_nat (at level 70, no associativity). Infix "=ℕ" := opt2.beq_nat (at level 70, no associativity). - Infix "=ℕ" := EqNat.beq_nat (at level 70, no associativity). + Infix "=ℕ" := Nat.eqb (at level 70, no associativity). End PrettyNotations. -Definition beq_nat_opt := Eval compute in EqNat.beq_nat. +Definition beq_nat_opt := Eval compute in Nat.eqb. Definition andb_opt := Eval compute in andb. Inductive ret_cases : Set := @@ -257,12 +256,12 @@ Local Ltac t_ret_cases := | _ => intro | _ => progress subst | _ => congruence - | [ H : ?f ?x ?y = ?b |- _ ] => progress change (EqNat.beq_nat x y = b) in H - | [ |- ?f ?x ?y = ?b ] => progress change (EqNat.beq_nat x y = b) + | [ H : ?f ?x ?y = ?b |- _ ] => progress change (Nat.eqb x y = b) in H + | [ |- ?f ?x ?y = ?b ] => progress change (Nat.eqb x y = b) | [ H : ?f ?x ?y = ?b |- _ ] => progress change (andb x y = b) in H | [ |- ?f ?x ?y = ?b ] => progress change (andb x y = b) - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H - | [ |- EqNat.beq_nat _ _ = true ] => apply EqNat.beq_nat_true_iff + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H + | [ |- Nat.eqb _ _ = true ] => apply Nat.eqb_eq | [ H : andb _ _ = true |- _ ] => apply Bool.andb_true_iff in H | [ |- andb _ _ = true ] => apply Bool.andb_true_iff | [ H : and _ _ |- _ ] => destruct H @@ -557,9 +556,9 @@ Section IndexedImpl. | [ |- context[_ - 0] ] => rewrite Nat.sub_0_r | [ |- context[substring _ (min _ (length ?str)) ?str] ] => rewrite substring_min_length - | [ |- context[_ + 0] ] => rewrite Plus.plus_0_r + | [ |- context[_ + 0] ] => rewrite Nat.add_0_r | [ |- context[min ?x ?x] ] - => rewrite (Min.min_idempotent x) + => rewrite (Nat.min_idempotent x) | _ => reflexivity | _ => assumption end; @@ -567,7 +566,7 @@ Section IndexedImpl. | repeat match goal with | _ => intro | [ |- context[min ?x ?x] ] - => rewrite (Min.min_idempotent x) + => rewrite (Nat.min_idempotent x) | _ => reflexivity | _ => rewrite substring_substring | _ => rewrite Nat.sub_0_r @@ -578,15 +577,15 @@ Section IndexedImpl. | [ |- context[min ?x ?y] ] => match goal with | [ |- context[min y x] ] - => rewrite (Min.min_comm x y) + => rewrite (Nat.min_comm x y) end | [ |- context[min (min _ ?x) (?x - ?y)] ] - => rewrite <- (Min.min_assoc _ x (x - y)), (Min.min_r x (x - y)) by omega + => rewrite <- (Nat.min_assoc _ x (x - y)), (Nat.min_r x (x - y)) by omega | [ |- substring (?x + ?y) _ _ = substring (?y + ?x) _ _ ] - => rewrite (Plus.plus_comm x y) + => rewrite (Nat.add_comm x y) | [ |- substring ?x ?y ?z = substring ?x (min ?w ?y) ?z ] - => apply (@Min.min_case_strong w y) - | [ H : _ |- _ ] => rewrite Min.min_assoc in H + => apply (@Nat.min_case_strong w y) + | [ H : _ |- _ ] => rewrite Nat.min_assoc in H | _ => apply substring_correct4; omega end | repeat match goal with @@ -595,7 +594,7 @@ Section IndexedImpl. | [ |- List.In ?x [?y] ] => left | [ |- context[List.map ?f [?x] ] ] => change (List.map f [x]) with [f x] | [ |- context[min ?x ?x] ] - => rewrite (Min.min_idempotent x) + => rewrite (Nat.min_idempotent x) | _ => reflexivity | [ H : parse_of_production _ _ nil |- _ ] => let H' := fresh in rename H into H'; dependent destruction H' | [ H : parse_of_production _ _ (_::_) |- _ ] => let H' := fresh in rename H into H'; dependent destruction H' @@ -604,8 +603,8 @@ Section IndexedImpl. | [ H : length (substring ?n ?m ?s) = _, H' : context[length (substring ?n ?m ?s)] |- _ ] => rewrite H in H' | [ H : context[length (take _ _)] |- _ ] => rewrite take_length in H | _ => erewrite <- has_only_terminals_length by eassumption - | [ H : _ |- _ ] => progress rewrite ?(@drop_length _ HSL HSLP), ?(@take_length _ HSL HSLP), ?substring_length, ?Nat.add_sub, <- ?plus_n_O, ?Minus.minus_diag, ?Nat.sub_0_r, ?sub_plus in H by omega - | _ => progress rewrite ?drop_length, ?take_length, ?substring_length, ?Nat.add_sub, ?Minus.minus_diag, ?Nat.sub_0_r, <- ?plus_n_O, ?sub_plus by omega + | [ H : _ |- _ ] => progress rewrite ?(@drop_length _ HSL HSLP), ?(@take_length _ HSL HSLP), ?substring_length, ?Nat.add_sub, <- ?plus_n_O, ?Nat.sub_diag, ?Nat.sub_0_r, ?sub_plus in H by omega + | _ => progress rewrite ?drop_length, ?take_length, ?substring_length, ?Nat.add_sub, ?Nat.sub_diag, ?Nat.sub_0_r, <- ?plus_n_O, ?sub_plus by omega | [ H : is_true (string_beq _ _) |- _ ] => apply string_bl in H | [ |- _ \/ False ] => left | [ H : String.substring _ _ _ = String.String _ _ |- _ = _ :> nat ] => apply (f_equal String.length) in H; simpl in H @@ -615,37 +614,37 @@ Section IndexedImpl. => rewrite <- (Nat.sub_min_distr_r x (y + z) z) | [ H : context[min ?x (?y + ?z) - ?z] |- _ ] => rewrite <- (Nat.sub_min_distr_r x (y + z) z) in H - | [ |- context[min ?x (?x - 1)] ] => rewrite (Min.min_r x (x - 1)) by (clear; omega) - | [ H : min ?x ?y = 1 |- _ ] => is_var x; revert H; apply (Min.min_case_strong x y) - | [ H : min ?x ?y = 1 |- _ ] => is_var y; revert H; apply (Min.min_case_strong x y) - | [ H : min ?x ?y = 0 |- _ ] => is_var x; revert H; apply (Min.min_case_strong x y) - | [ H : min ?x ?y = 0 |- _ ] => is_var y; revert H; apply (Min.min_case_strong x y) - | [ H : min ?x 1 = 0 |- _ ] => revert H; apply (Min.min_case_strong x 1) + | [ |- context[min ?x (?x - 1)] ] => rewrite (Nat.min_r x (x - 1)) by (clear; omega) + | [ H : min ?x ?y = 1 |- _ ] => is_var x; revert H; apply (Nat.min_case_strong x y) + | [ H : min ?x ?y = 1 |- _ ] => is_var y; revert H; apply (Nat.min_case_strong x y) + | [ H : min ?x ?y = 0 |- _ ] => is_var x; revert H; apply (Nat.min_case_strong x y) + | [ H : min ?x ?y = 0 |- _ ] => is_var y; revert H; apply (Nat.min_case_strong x y) + | [ H : min ?x 1 = 0 |- _ ] => revert H; apply (Nat.min_case_strong x 1) | [ |- context[0 + ?x] ] => change (0 + x) with x | [ |- context[?x - S ?y] ] - => not constr_eq y 0; rewrite !(Nat.sub_succ_r x y), !Minus.pred_of_minus + => not constr_eq y 0; rewrite !(Nat.sub_succ_r x y), !(fun n => eq_sym (Nat.sub_1_r n)) | [ H : ?x = 1 |- context[?x] ] => rewrite H | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | [ H : ?x <= ?y |- context[min ?x ?y] ] - => rewrite (Min.min_l x y H) + => rewrite (Nat.min_l x y H) | [ H : ?y <= ?x |- context[min ?x ?y] ] - => rewrite (Min.min_r x y H) + => rewrite (Nat.min_r x y H) | [ H : ?x <= ?y |- context[?x - ?y] ] => replace (x - y) with 0 by (clear -H; omega) | [ H : context[?x - ?y], H' : ?x <= ?y |- _ ] => rewrite (proj2 (@Nat.sub_0_le x y)) in H by exact H' | [ H : context[min 0 ?x] |- _ ] => change (min 0 x) with 0 in H | [ |- context[min (min _ ?x) (?x - ?y)] ] - => rewrite <- (Min.min_assoc _ x (x - y)), (Min.min_r x (x - y)) by omega + => rewrite <- (Nat.min_assoc _ x (x - y)), (Nat.min_r x (x - y)) by omega | [ |- 1 = ?x ] => is_var x; destruct x | [ |- 1 = S ?x ] => is_var x; destruct x - | [ H : _ <= 0 |- _ ] => apply Le.le_n_0_eq in H; symmetry in H + | [ H : _ <= 0 |- _ ] => apply (fun n Hle => eq_sym (proj1 (Nat.le_0_r n) Hle)) in H; symmetry in H | [ H : context[min 1 ?x] |- _ ] => is_var x; destruct x | [ H : context[min 1 (S ?x)] |- _ ] => is_var x; destruct x | [ H : context[min 1 (S ?x)] |- _ ] => change (min 1 (S x)) with 1 in H - | [ H : context[min ?x ?y], H' : ?x <= ?y |- _ ] => rewrite Min.min_l in H by assumption - | [ H : context[min ?x ?y], H' : ?y <= ?x |- _ ] => rewrite Min.min_r in H by assumption - | [ H : context[min (?x - ?y) ?x] |- _ ] => rewrite Min.min_l in H by omega - | [ H : context[min ?x (?x - ?y)] |- _ ] => rewrite Min.min_r in H by omega + | [ H : context[min ?x ?y], H' : ?x <= ?y |- _ ] => rewrite Nat.min_l in H by assumption + | [ H : context[min ?x ?y], H' : ?y <= ?x |- _ ] => rewrite Nat.min_r in H by assumption + | [ H : context[min (?x - ?y) ?x] |- _ ] => rewrite Nat.min_l in H by omega + | [ H : context[min ?x (?x - ?y)] |- _ ] => rewrite Nat.min_r in H by omega | _ => omega end ]. @@ -741,7 +740,7 @@ Section IndexedImpl. end. Local Ltac pre_fin := repeat pre_fin'. - Local Arguments EqNat.beq_nat : simpl never. + Local Arguments Nat.eqb : simpl never. Local Ltac do_Iterate_Ensemble_BoundedIndex_equiv := eapply Iterate_Ensemble_BoundedIndex_equiv; @@ -764,7 +763,7 @@ Section IndexedImpl. | _ => progress subst | [ H : Some _ = Some _ |- _ ] => inversion H; clear H | [ |- context[min ?x ?x] ] - => rewrite (Min.min_idempotent x) + => rewrite (Nat.min_idempotent x) | [ |- List.In ?x [?y] ] => left | [ |- context[List.map ?f [?x] ] ] => change (List.map f [x]) with [f x] | [ |- _ \/ False ] => left @@ -776,19 +775,19 @@ Section IndexedImpl. | [ H : context[unsafe_get] |- _ ] => erewrite unsafe_get_correct in H by eassumption | [ H : andb _ _ = true |- _ ] => apply Bool.andb_true_iff in H | [ H : andb _ _ = false |- _ ] => apply Bool.andb_false_iff in H - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H - | [ H : EqNat.beq_nat _ _ = false |- _ ] => apply EqNat.beq_nat_false in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => proj1 (Nat.eqb_eq n m)) in H + | [ H : Nat.eqb _ _ = false |- _ ] => apply (fun n m => proj1 (Nat.eqb_neq n m)) in H | [ H : Compare_dec.leb _ _ = true |- _ ] => apply Compare_dec.leb_complete in H | [ H : Compare_dec.leb _ _ = false |- _ ] => apply Compare_dec.leb_complete_conv in H - | [ H : context[?x - ?x] |- _ ] => rewrite Minus.minus_diag in H - | [ H : context[min _ 0] |- _ ] => rewrite Min.min_0_r in H - | [ H : context[min 0 _] |- _ ] => rewrite Min.min_0_l in H + | [ H : context[?x - ?x] |- _ ] => rewrite Nat.sub_diag in H + | [ H : context[min _ 0] |- _ ] => rewrite Nat.min_0_r in H + | [ H : context[min 0 _] |- _ ] => rewrite Nat.min_0_l in H | [ H : option_beq ascii_beq _ _ = true |- _ ] => apply (option_bl (@ascii_bl)) in H | [ H : context[min ?x ?y], H' : ?x <= ?y |- _ ] - => rewrite (Min.min_l x y) in H by assumption + => rewrite (Nat.min_l x y) in H by assumption | [ H : context[min ?x ?y], H' : ?y <= ?x |- _ ] - => rewrite (Min.min_r x y) in H by assumption + => rewrite (Nat.min_r x y) in H by assumption | [ H : context[?x + ?y - ?y] |- _ ] => rewrite Nat.add_sub in H | [ H : context[(0 + ?x)%nat] |- _ ] => change (0 + x) with x in H | [ |- context[min ?x (?y + ?z) - ?z] ] @@ -814,19 +813,19 @@ Section IndexedImpl. | [ H : ?x = 0, H' : context[?x] |- _ ] => rewrite H in H' | [ H : 1 = snd (snd ?x), H' : context[snd (snd ?x)] |- _ ] => is_var x; rewrite <- H in H' - | [ |- context[min 0 _] ] => rewrite Min.min_0_l - | [ |- context[min _ 0] ] => rewrite Min.min_0_r + | [ |- context[min 0 _] ] => rewrite Nat.min_0_l + | [ |- context[min _ 0] ] => rewrite Nat.min_0_r | [ |- context[(_ - _)%natr] ] => rewrite minusr_minus | [ |- context[?x - ?y + min ?y ?x] ] => rewrite minus_plus_min - | [ |- context[?x - ?y + (min ?y ?x + _)] ] => rewrite !Plus.plus_assoc, minus_plus_min + | [ |- context[?x - ?y + (min ?y ?x + _)] ] => rewrite !Nat.add_assoc, minus_plus_min | [ H : ?x < S ?y |- context[?x - ?y] ] => rewrite (proj2 (Nat.sub_0_le x y)) by omega | [ H : ?x + ?y <= ?z, H' : context[min ?x (?z - ?y)] |- _ ] - => rewrite (Min.min_l x (z - y)) in H' by (clear -H; omega) + => rewrite (Nat.min_l x (z - y)) in H' by (clear -H; omega) | [ H : ?x <= ?y |- context[min ?x ?y] ] - => rewrite (Min.min_l x y H) + => rewrite (Nat.min_l x y H) | [ H : ?y <= ?x |- context[min ?x ?y] ] - => rewrite (Min.min_r x y H) + => rewrite (Nat.min_r x y H) | [ |- context[min ?x (?x - _)] ] => rewrite min_minus_r | _ => progress destruct_head' and | _ => progress destruct_head' or @@ -838,9 +837,9 @@ Section IndexedImpl. | [ |- context[_ - 0] ] => rewrite Nat.sub_0_r | [ H : forall n, n <= 0 -> _ |- _ ] => specialize (H 0 (reflexivity _)) | [ H : min _ ?x = ?v |- min ?v ?x = ?v ] - => revert H; clear; repeat apply Min.min_case_strong; intros; omega + => revert H; clear; repeat apply Nat.min_case_strong; intros; omega | [ |- context[min (min ?x ?y) ?y] ] - => rewrite <- (Min.min_assoc x y y), Min.min_idempotent + => rewrite <- (Nat.min_assoc x y y), Nat.min_idempotent | [ H : ret_length_less _ = ret_length_less _ |- _ ] => inversion H; clear H | [ H : ret_nat _ = ret_nat _ |- _ ] => inversion H; clear H | [ H : ret_pick _ = ret_pick _ |- _ ] => inversion H; clear H @@ -874,26 +873,26 @@ Section IndexedImpl. => rewrite substring_length, H in H' | [ H : length ?x = _ |- context[length (drop _ (drop _ ?x))] ] => (do 2 rewrite drop_length; rewrite H) - | _ => rewrite Minus.pred_of_minus + | _ => rewrite (fun n => eq_sym (Nat.sub_1_r n)) | [ H : ?x = 1 |- context[?x] ] => rewrite H | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | [ H : min ?x ?y = 1 |- context[?x] ] - => (revert H; apply (Min.min_case_strong x y)) + => (revert H; apply (Nat.min_case_strong x y)) | _ => intro | [ |- context[?x - ?y] ] => rewrite (proj2 (Nat.sub_0_le x y)) by assumption | [ |- context[min ?x (pred ?x)] ] - => rewrite (Min.min_r x (pred x)) by omega + => rewrite (Nat.min_r x (pred x)) by omega | [ |- context[?x - ?y - (?x - ?z - ?y)] ] - => rewrite <- (Nat.sub_add_distr x z y), (Plus.plus_comm z y), (Nat.sub_add_distr x y z) + => rewrite <- (Nat.sub_add_distr x z y), (Nat.add_comm z y), (Nat.sub_add_distr x y z) | [ |- context[?x - (?x - _)] ] => rewrite sub_twice | [ |- context[min ?x (min (?x - ?z) ?y)] ] - => rewrite (Min.min_assoc x (x - z)), (Min.min_r x (x - z)) by omega - | [ |- min ?x ?y = ?y ] => rewrite Min.min_r by omega + => rewrite (Nat.min_assoc x (x - z)), (Nat.min_r x (x - z)) by omega + | [ |- min ?x ?y = ?y ] => rewrite Nat.min_r by omega | [ H : ?x - ?y = 0 |- _ ] => apply Nat.sub_0_le in H | [ H : ?x <= ?y, H' : ?y <= ?x |- _ ] - => assert (y = x) by (apply Le.le_antisym; assumption); + => assert (y = x) by (apply Nat.le_antisymm; assumption); clear H H' | [ H : length (substring _ ?x _) = ?v |- min ?v ?x = ?v ] => rewrite substring_length in H; clear -H @@ -903,7 +902,7 @@ Section IndexedImpl. | [ H : context[length (substring _ _ _)] |- _ ] => rewrite substring_length_no_min in H by first [ assumption | left; reflexivity | right; omega ] | [ H : context[min (?x - ?y) ?z], H' : ?y + ?z <= ?x |- _ ] - => rewrite (Min.min_r (x - y) z) in H by (clear -H'; omega) + => rewrite (Nat.min_r (x - y) z) in H by (clear -H'; omega) end. Local Ltac fin2 := repeat first [ progress fin_common | progress string_from_parse @@ -1017,7 +1016,7 @@ Section IndexedImpl. | [ H : context[length (substring ?n ?m ?s)] |- _ ] => let H' := fresh in assert (H' : length (substring n m s) = m) - by (rewrite substring_length; fin_common; rewrite Min.min_r by omega; omega); + by (rewrite substring_length; fin_common; rewrite Nat.min_r by omega; omega); rewrite !H' in H |- * end. @@ -1054,7 +1053,7 @@ Section IndexedImpl. | _ => progress destruct_head or; try solve [ fin2 ]; [] | [ H : ?x = ?y |- _ ] => is_var y; subst y | [ H : ?x = ?y |- _ ] => is_var x; subst x - | _ => rewrite Min.min_r by omega + | _ => rewrite Nat.min_r by omega | _ => progress fin2 | [ H : ?x = ?x |- _ ] => clear H end; @@ -1063,9 +1062,9 @@ Section IndexedImpl. | [ H : _ <= _ |- _ ] => revert H end; clear -HSLP; intros; - rewrite !drop_length, !substring_length, Min.min_r, Nat.add_sub by omega; + rewrite !drop_length, !substring_length, Nat.min_r, Nat.add_sub by omega; repeat match goal with - | [ H : context[min] |- _ ] => revert H; apply Min.min_case_strong + | [ H : context[min] |- _ ] => revert H; apply Nat.min_case_strong | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | [ H : ?x = 1 |- context[?x] ] => rewrite H | [ H : ?x <= ?y |- context[?x - ?y] ] => replace (x - y) with 0 by omega @@ -1078,8 +1077,8 @@ Section IndexedImpl. => rewrite (Nat.sub_succ_r x y) | _ => progress rewrite ?sub_twice, ?Nat.sub_0_r | _ => erewrite <- S_pred by eassumption - | _ => rewrite Min.min_r by omega - | _ => rewrite Min.min_l by omega + | _ => rewrite Nat.min_r by omega + | _ => rewrite Nat.min_l by omega | _ => reflexivity end. } { repeat match goal with diff --git a/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflectiveOpt.v b/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflectiveOpt.v index 446833ae9..ea14bfa14 100644 --- a/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflectiveOpt.v +++ b/src/Parsers/Refinement/IndexedAndAtMostOneNonTerminalReflectiveOpt.v @@ -519,7 +519,7 @@ Section IndexedImpl_opt. | [ |- opt.minus _ _ = _ ] => apply f_equal2 | [ |- opt.combine _ _ = _ ] => apply f_equal2 | [ |- opt2.ret_cases_BoolDecR _ _ = _ ] => apply f_equal2 - | [ |- EqNat.beq_nat _ _ = _ ] => apply f_equal2 + | [ |- Nat.eqb _ _ = _ ] => apply f_equal2 | [ |- opt.nth _ _ _ = _ ] => apply f_equal3 | [ |- 0 = _ ] => reflexivity | [ |- opt.length (pregrammar_productions G) = _ ] => reflexivity @@ -572,7 +572,7 @@ Section IndexedImpl_opt. change (snd d) with (Common.opt2.snd d); change (fst (Common.opt2.snd d)) with (Common.opt2.fst (Common.opt2.snd d)); change (snd (Common.opt2.snd d)) with (Common.opt2.snd (Common.opt2.snd d)). - change EqNat.beq_nat with Common.opt2.beq_nat. + change Nat.eqb with Common.opt2.beq_nat. change andb with Common.opt2.andb. reflexivity. } diff --git a/src/Parsers/Refinement/PreTactics.v b/src/Parsers/Refinement/PreTactics.v index 6d49acc33..0f0f590bb 100644 --- a/src/Parsers/Refinement/PreTactics.v +++ b/src/Parsers/Refinement/PreTactics.v @@ -2,6 +2,7 @@ Require Export Fiat.Parsers.ContextFreeGrammar.Core. Require Export Fiat.Parsers.ContextFreeGrammar.PreNotations. Require Export Fiat.Parsers.StringLike.FirstCharSuchThat. Require Export Coq.Strings.String. +Require Export Coq.Arith.PeanoNat. Require Export Fiat.Computation.Core. Require Export Coq.Program.Program. Require Export Fiat.Computation.ApplyMonad. @@ -123,14 +124,14 @@ Ltac solve_prod_beq := repeat match goal with | [ |- context[true = false] ] => congruence | [ |- context[false = true] ] => congruence - | [ |- context[EqNat.beq_nat ?x ?y] ] + | [ |- context[Nat.eqb ?x ?y] ] => is_var x; let H := fresh in - destruct (EqNat.beq_nat x y) eqn:H; - [ apply EqNat.beq_nat_true in H; subst x + destruct (Nat.eqb x y) eqn:H; + [ apply (fun n m => proj1 (Nat.eqb_eq n m)) in H; subst x | ]; simpl - | [ |- context[EqNat.beq_nat ?x ?y] ] + | [ |- context[Nat.eqb ?x ?y] ] => first [ is_var x; fail 1 | generalize x; intro ] end. diff --git a/src/Parsers/Reflective/LogicalRelations.v b/src/Parsers/Reflective/LogicalRelations.v index cce387d0e..bdd1605b3 100644 --- a/src/Parsers/Reflective/LogicalRelations.v +++ b/src/Parsers/Reflective/LogicalRelations.v @@ -307,7 +307,7 @@ Proof. apply list_rect_nodep_meaning_correct; simpl; eauto with nocore. simpler_meaning; meaning_tac. } { simpler_meaning; meaning_tac. - rewrite Plus.plus_comm; meaning_tac. } + rewrite Nat.add_comm; meaning_tac. } Qed. Local Hint Resolve push_var. diff --git a/src/Parsers/Reflective/ParserSemantics.v b/src/Parsers/Reflective/ParserSemantics.v index 1dac18bc9..24b8764e3 100644 --- a/src/Parsers/Reflective/ParserSemantics.v +++ b/src/Parsers/Reflective/ParserSemantics.v @@ -3,6 +3,7 @@ Require Import Fiat.Parsers.Reflective.Semantics. Require Import Fiat.Parsers.Splitters.RDPList. Require Import Fiat.Parsers.GenericRecognizer. Require Import Fiat.Common.Wf Fiat.Common.Wf2. +Require Import Coq.Arith.PeanoNat. Set Implicit Arguments. Definition step_option_rec @@ -26,7 +27,7 @@ Proof. (or_introl pf) up_to_G_length offset' - (len - len0_minus_len') (Minus.le_minus len len0_minus_len'))) + (len - len0_minus_len') (Nat.le_sub_l len len0_minus_len'))) (fun _ => _) (Compare_dec.lt_dec len len0)). @@ -35,13 +36,13 @@ Proof. (fun is_valid => _) (fun _ => None) - (Sumbool.sumbool_of_bool (negb (EqNat.beq_nat valid_len0 0) && is_valid_nonterminal valids nt))%bool). + (Sumbool.sumbool_of_bool (negb (Nat.eqb valid_len0 0) && is_valid_nonterminal valids nt))%bool). refine (Some (fun offset' len0_minus_len' => parse_nt len0 (pred valid_len0) (or_intror (conj eq_refl (GenericRecognizer.pred_lt_beq _))) (RDPList.filter_out_eq nt valids) offset' - (len0 - len0_minus_len') (Minus.le_minus len0 len0_minus_len'))). + (len0 - len0_minus_len') (Nat.le_sub_l len0 len0_minus_len'))). apply (proj1 (proj1 (Bool.andb_true_iff _ _) is_valid)). Defined. diff --git a/src/Parsers/Reflective/PartialUnfold.v b/src/Parsers/Reflective/PartialUnfold.v index a6262beff..d4b142fe8 100644 --- a/src/Parsers/Reflective/PartialUnfold.v +++ b/src/Parsers/Reflective/PartialUnfold.v @@ -135,7 +135,7 @@ Section normalization_by_evaluation. | Rbeq_nat => specific_meaning_apply2 cnat cnat cbool (syntactify_bool _) - EqNat.beq_nat + Nat.eqb | Rmap A B => fun af => option_map (@syntactify_list _ _) diff --git a/src/Parsers/Reflective/Reify.v b/src/Parsers/Reflective/Reify.v index e25d1030b..07dd10765 100644 --- a/src/Parsers/Reflective/Reify.v +++ b/src/Parsers/Reflective/Reify.v @@ -185,7 +185,7 @@ Ltac reify_Term var term | andbr => constr:(@Randbr) | minusr => constr:(@Rminusr) | Compare_dec.leb => constr:(@Rleb) - | EqNat.beq_nat => constr:(@Rbeq_nat) + | Nat.eqb => constr:(@Rbeq_nat) | Equality.string_beq => constr:(@Rstring_beq) | @Reflective.interp_RCharExpr _ _ => constr:(@Rinterp_RCharExpr_ascii) end in diff --git a/src/Parsers/Reflective/Semantics.v b/src/Parsers/Reflective/Semantics.v index 197478e48..6ba082192 100644 --- a/src/Parsers/Reflective/Semantics.v +++ b/src/Parsers/Reflective/Semantics.v @@ -90,7 +90,7 @@ Definition interp_RLiteralTerm {T} (t : RLiteralTerm T) : interp_TypeCode T | Rpred => pred | Rplus => plus | Rleb => Compare_dec.leb - | Rbeq_nat => EqNat.beq_nat + | Rbeq_nat => Nat.eqb | Rstring_beq => Equality.string_beq | Rlength _ => @List.length _ | Rbool_rect_nodep _ => @bool_rect_nodep _ diff --git a/src/Parsers/Reflective/SyntaxEquivalenceReflective.v b/src/Parsers/Reflective/SyntaxEquivalenceReflective.v index 5738b7b4f..693c35103 100644 --- a/src/Parsers/Reflective/SyntaxEquivalenceReflective.v +++ b/src/Parsers/Reflective/SyntaxEquivalenceReflective.v @@ -103,7 +103,7 @@ Section Term_equiv. : option pointed_Prop := match e1, e2 return option _ with | RVar t0 x, RVar t1 y - => match beq_nat (fst x) (fst y), List.nth_error G (List.length G - S (fst x)) with + => match Nat.eqb (fst x) (fst y), List.nth_error G (List.length G - S (fst x)) with | true, Some v => eq_type_and_var v (existT _ (t0, t1) (snd x, snd y)) | _, _ => None end diff --git a/src/Parsers/SimpleRecognizerCorrect.v b/src/Parsers/SimpleRecognizerCorrect.v index 029bae677..1c042a53d 100644 --- a/src/Parsers/SimpleRecognizerCorrect.v +++ b/src/Parsers/SimpleRecognizerCorrect.v @@ -1,5 +1,5 @@ (** * Proof that SimpleRecognizer outputs correct parse trees *) -Require Import Coq.Classes.Morphisms. +Require Import Coq.Classes.Morphisms Coq.Arith.PeanoNat. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. Require Import Fiat.Parsers.ContextFreeGrammar.Core. @@ -56,7 +56,7 @@ Section convenience. | _ => progress simpl in * | _ => progress simpl_simple_parse_of_correct | [ |- exists p, Some ?x = Some p /\ _ ] => exists x - | [ H : ?x = 0 \/ _, H' : andb (EqNat.beq_nat ?x (S _)) _ = true |- _ ] + | [ H : ?x = 0 \/ _, H' : andb (Nat.eqb ?x (S _)) _ = true |- _ ] => destruct H | [ H : andb _ (char_at_matches _ _ _) = true |- _ ] => apply char_at_matches_is_char_no_ex in H; [ | assumption ] @@ -104,7 +104,7 @@ Section convenience. | [ |- context[take _ (take _ _)] ] => setoid_rewrite take_take | [ H : context[substring (?offset + ?loc) _ _] |- context[substring (_ + ?offset) _ _] ] - => rewrite (Plus.plus_comm offset loc) in H + => rewrite (Nat.add_comm offset loc) in H | [ H : context[to_production (production_tl _)] |- _ ] => rewrite production_tl_correct in H | [ H : ?x = _, H' : context[List.tl ?x] |- _ ] diff --git a/src/Parsers/SplitterFromParserADT.v b/src/Parsers/SplitterFromParserADT.v index 0589a0273..81b2bde66 100644 --- a/src/Parsers/SplitterFromParserADT.v +++ b/src/Parsers/SplitterFromParserADT.v @@ -184,7 +184,7 @@ Section parser. prove_string_eq. Qed. Definition mis_char (ch : Ascii.ascii) (str : cRep (projT1 splitter_impl)) : bool - := (EqNat.beq_nat (mlength str) 1 && mchar_at_matches 0 (ascii_beq ch) str)%bool. + := (Nat.eqb (mlength str) 1 && mchar_at_matches 0 (ascii_beq ch) str)%bool. Global Arguments mis_char : simpl never. Definition mis_char_eq {arg st str} (H : AbsR (projT2 splitter_impl) str st) : mis_char arg st = is_char str arg. @@ -197,8 +197,8 @@ Section parser. destruct H' as [H'0 H'1]. rewrite H'0, H'1; simpl. rewrite ascii_lb; reflexivity. } - { destruct (EqNat.beq_nat (length str) 1) eqn:H'0; simpl; [ | reflexivity ]. - apply EqNat.beq_nat_true in H'0. + { destruct (Nat.eqb (length str) 1) eqn:H'0; simpl; [ | reflexivity ]. + apply (fun n m => proj1 (Nat.eqb_eq n m)) in H'0. destruct (get 0 str) as [ch|] eqn:H'1; simpl; [ | exfalso ]. { destruct (ascii_beq arg ch) eqn:H''; [ | reflexivity ]. apply ascii_bl in H''; subst. diff --git a/src/Parsers/Splitters/BruteForce.v b/src/Parsers/Splitters/BruteForce.v index b19120ff3..1f3afa1c5 100644 --- a/src/Parsers/Splitters/BruteForce.v +++ b/src/Parsers/Splitters/BruteForce.v @@ -44,15 +44,15 @@ Section brute_force_splitter. try solve [ reflexivity | left; reflexivity | rewrite !take_long; trivial; reflexivity - | rewrite take_long at 1 by (rewrite Properties.substring_length; apply Min.min_case_strong; omega); + | rewrite take_long at 1 by (rewrite Properties.substring_length; apply Nat.min_case_strong; omega); symmetry; - rewrite take_long at 1 by (rewrite Properties.substring_length; apply Min.min_case_strong; omega); + rewrite take_long at 1 by (rewrite Properties.substring_length; apply Nat.min_case_strong; omega); reflexivity ]. } { eapply expand_minimal_parse_of_production; [ .. | eassumption ]; try solve [ reflexivity | left; reflexivity | apply bool_eq_empty; rewrite drop_length; omega - | apply bool_eq_empty; rewrite drop_length, Properties.substring_length; apply Min.min_case_strong; omega ]. } } + | apply bool_eq_empty; rewrite drop_length, Properties.substring_length; apply Nat.min_case_strong; omega ]. } } { exists n; simpl; repeat split; try assumption. right; apply List.in_map, in_up_to; assumption. } Qed. diff --git a/src/Parsers/Splitters/RDPList.v b/src/Parsers/Splitters/RDPList.v index b3b03c8ac..163604cd8 100644 --- a/src/Parsers/Splitters/RDPList.v +++ b/src/Parsers/Splitters/RDPList.v @@ -26,14 +26,14 @@ Section recursive_descent_parser_list. Notation rdp_list_production_carrierT := default_production_carrierT. Definition list_bin_eq - := Eval unfold list_bin in list_bin EqNat.beq_nat. + := Eval unfold list_bin in list_bin Nat.eqb. Definition rdp_list_is_valid_nonterminal : rdp_list_nonterminals_listT -> rdp_list_nonterminal_carrierT -> bool := fun ls nt => list_bin_eq nt ls. Local Ltac fix_list_bin_eq := unfold rdp_list_is_valid_nonterminal; - change list_bin_eq with (list_bin EqNat.beq_nat) in *. + change list_bin_eq with (list_bin Nat.eqb) in *. Local Notation valid_nonterminals := (map fst (pregrammar_productions G)). @@ -77,8 +77,8 @@ Section recursive_descent_parser_list. | _ => progress subst | [ |- nth _ _ _ = _ ] => apply nth_error_Some_nth | [ H : ?T |- _ <-> ?T ] => split; [ intro; assumption | intro ] - | [ H : is_true (list_bin _ _ _) |- _ ] => apply (list_in_bl (@beq_nat_true)) in H - | [ |- is_true (list_bin _ _ _) ] => apply list_in_lb; [ intros ???; subst; symmetry; apply beq_nat_refl | ] + | [ H : is_true (list_bin _ _ _) |- _ ] => apply (list_in_bl (fun n m => proj1 (Nat.eqb_eq n m))) in H + | [ |- is_true (list_bin _ _ _) ] => apply list_in_lb; [ intros ???; subst; symmetry; apply eq_sym, Nat.eqb_refl | ] | [ H : In _ (up_to _) |- _ ] => apply in_up_to_iff in H | [ |- In _ (up_to _) ] => apply in_up_to_iff | [ H : In ?nt _ |- _ ] => let H' := fresh in assert (H' : string_beq nt nt = false) by eauto; exfalso; clear -H' @@ -165,7 +165,7 @@ Section recursive_descent_parser_list. simpl in *. unfold rdp_list_to_nonterminal, rdp_list_of_nonterminal, rdp_list_is_valid_nonterminal, rdp_list_initial_nonterminals_data, default_to_nonterminal, default_of_nonterminal. intros nt H. - apply (list_in_bl (@beq_nat_true)), in_up_to_iff in H. + apply (list_in_bl (fun n m => proj1 (Nat.eqb_eq n m))), in_up_to_iff in H. revert nt H. replace valid_nonterminals with (uniquize string_beq valid_nonterminals) by (rewrite HNoDup; reflexivity). induction valid_nonterminals as [|x xs IHxs]. @@ -186,7 +186,7 @@ Section recursive_descent_parser_list. rewrite (string_lb eq_refl); trivial. } { simpl Datatypes.length. intro H. - apply lt_S_n in H. + apply Nat.succ_lt_mono in H. etransitivity; [ | eapply (f_equal S), (IHxs _ H); clear IHxs ]. rewrite first_index_default_S_cons; simpl. match goal with @@ -252,12 +252,12 @@ Section recursive_descent_parser_list. induction (Lookup_string G nt) as [|p ps IHps]; simpl. { reflexivity. } { simpl. - rewrite minus_diag; simpl. + rewrite Nat.sub_diag; simpl. apply f_equal. etransitivity; [ | apply IHps ]; clear IHps. apply map_ext_in. intros a H; apply in_up_to_iff in H. - rewrite NPeano.Nat.sub_succ_r. + rewrite Nat.sub_succ_r. destruct (Datatypes.length ps - a) eqn:H'; try omega; []. reflexivity. } Qed. @@ -292,7 +292,7 @@ Section recursive_descent_parser_list. intros nt Hnt. apply Forall_map, Forall_forall; unfold compose; simpl; intros ? H. fix_list_bin_eq. - apply list_in_bl in Hnt; [ | exact (EqNat.beq_nat_true) ]. + apply list_in_bl in Hnt; [ | exact (fun n m => proj1 (Nat.eqb_eq n m)) ]. apply in_up_to_iff in Hnt. unfold default_production_carrier_valid; simpl. repeat (apply Bool.andb_true_iff; split); apply leb_iff. @@ -324,7 +324,7 @@ Section recursive_descent_parser_list. { apply rdp_list_initial_nonterminals_correct'. unfold rdp_list_is_valid_nonterminal, rdp_list_initial_nonterminals_data. fix_list_bin_eq. - apply list_in_lb; [ apply beq_nat_true_iff | ]. + apply list_in_lb; [ apply Nat.eqb_eq | ]. rewrite map_length. apply in_up_to; assumption. } { unfold rdp_list_to_nonterminal. @@ -337,7 +337,7 @@ Section recursive_descent_parser_list. Qed. Definition filter_out_eq nt ls - := Eval unfold filter_out in filter_out (EqNat.beq_nat nt) ls. + := Eval unfold filter_out in filter_out (Nat.eqb nt) ls. Definition rdp_list_remove_nonterminal : rdp_list_nonterminals_listT -> rdp_list_nonterminal_carrierT -> rdp_list_nonterminals_listT := fun ls nt => @@ -345,7 +345,7 @@ Section recursive_descent_parser_list. Local Ltac fix_filter_out_eq := unfold rdp_list_remove_nonterminal; - change filter_out_eq with (fun nt ls => filter_out (EqNat.beq_nat nt) ls); cbv beta; + change filter_out_eq with (fun nt ls => filter_out (Nat.eqb nt) ls); cbv beta; setoid_rewrite filter_out_filter. Local Ltac fix_eqs := try fix_filter_out_eq; try fix_list_bin_eq. @@ -357,7 +357,7 @@ Section recursive_descent_parser_list. induction ls; trivial; simpl in *. repeat match goal with | [ |- context[if ?a then _ else _] ] => destruct a; simpl in * - | [ |- S _ <= S _ ] => solve [ apply Le.le_n_S; auto ] + | [ |- S _ <= S _ ] => solve [ apply (fun n m => proj1 (Nat.succ_le_mono n m)); auto ] | [ |- _ <= S _ ] => solve [ apply le_S; auto ] end. Qed. @@ -368,7 +368,7 @@ Section recursive_descent_parser_list. intros ls prods H. unfold rdp_list_is_valid_nonterminal, rdp_list_nonterminals_listT_R, rdp_list_remove_nonterminal, ltof in *. fix_eqs. - apply list_in_bl in H; [ | solve [ intros; apply beq_nat_true; trivial ] ]. + apply list_in_bl in H; [ | solve [ intros; apply Nat.eqb_eq; trivial ] ]. match goal with | [ H : In ?prods ?ls |- context[filter ?f ?ls] ] => assert (~In prods (filter f ls)) @@ -376,7 +376,7 @@ Section recursive_descent_parser_list. { intro H'. apply filter_In in H'. destruct H' as [? H']. - rewrite <- beq_nat_refl in H'. + rewrite Nat.eqb_refl in H'. simpl in *; congruence. } { match goal with | [ |- context[filter ?f ?ls] ] => generalize dependent f; intros @@ -389,8 +389,8 @@ Section recursive_descent_parser_list. | [ H : ~(_ \/ _) |- _ ] => apply Decidable.not_or in H | [ H : _ /\ _ |- _ ] => destruct H | [ H : ?x <> ?x |- _ ] => exfalso; apply (H eq_refl) - | _ => apply Lt.lt_n_S - | _ => apply Le.le_n_S + | _ => apply (fun n m => (proj1 (Nat.succ_lt_mono n m))) + | _ => apply (fun n m => (proj1 (Nat.succ_le_mono n m))) | _ => apply filter_list_dec | [ H : _ -> _ -> ?G |- ?G ] => apply H; auto end. } @@ -418,10 +418,10 @@ Section recursive_descent_parser_list. | [ H : In _ (filter _ _) |- _ ] => apply filter_In in H | [ H : _ /\ _ |- _ ] => destruct H | [ H : ?T, H' : ~?T |- _ ] => destruct (H' H) - | [ H : list_bin _ _ _ = true |- _ ] => apply list_in_bl in H; [ | solve [ intros; apply beq_nat_true; trivial ] ] + | [ H : list_bin _ _ _ = true |- _ ] => apply list_in_bl in H; [ | solve [ intros; apply (fun n m => (proj1 (Nat.eqb_eq n m))); trivial ] ] | [ |- list_bin _ _ _ = true ] => apply list_in_lb | _ => progress subst - | [ |- context[beq_nat ?x ?x] ] => rewrite <- beq_nat_refl + | [ |- context[Nat.eqb ?x ?x] ] => rewrite Nat.eqb_refl end. Qed. @@ -454,17 +454,17 @@ Section recursive_descent_parser_list. | [ H : string_beq _ _ = true |- _ ] => apply string_bl in H | [ H : context[string_beq ?x ?x] |- _ ] => rewrite (string_lb (eq_refl x)) in H | _ => progress simpl in * - | [ H : list_bin _ _ _ = true |- _ ] => apply list_in_bl in H; [ | solve [ intros; apply beq_nat_true; trivial ] ] + | [ H : list_bin _ _ _ = true |- _ ] => apply list_in_bl in H; [ | solve [ intros; apply (fun n m => (proj1 (Nat.eqb_eq n m))); trivial ] ] | [ |- list_bin _ _ _ = true ] => apply list_in_lb | [ |- context[list_bin ?x ?y ?z = false] ] => case_eq (list_bin x y z) | [ H : list_bin _ _ _ = false |- _ ] => apply Bool.not_true_iff_false in H | [ H : list_bin _ _ _ <> true |- _ ] => specialize (fun pf H' => H (list_in_lb pf H')) | [ H : ?A -> ?B |- _ ] => let H' := fresh in - assert (H' : A) by (intros; subst; rewrite <- ?beq_nat_refl; reflexivity); + assert (H' : A) by (intros; subst; rewrite ?Nat.eqb_refl; reflexivity); specialize (H H'); clear H' - | [ H : beq_nat _ _ = true |- _ ] => apply beq_nat_true in H - | [ H : context[beq_nat ?x ?x] |- _ ] => rewrite <- beq_nat_refl in H + | [ H : Nat.eqb _ _ = true |- _ ] => apply (fun n m => (proj1 (Nat.eqb_eq n m))) in H + | [ H : context[Nat.eqb ?x ?x] |- _ ] => rewrite Nat.eqb_refl in H end. Qed. @@ -475,7 +475,7 @@ Section recursive_descent_parser_list. fix_eqs. intro H. cut (Datatypes.length ls < Datatypes.length ls); try omega. - eapply Lt.lt_le_trans; [ eassumption | ]. + eapply Nat.lt_le_trans; [ eassumption | ]. apply filter_list_dec. Qed. diff --git a/src/Parsers/StringLike/FirstChar.v b/src/Parsers/StringLike/FirstChar.v index 670465cda..7e53e9123 100644 --- a/src/Parsers/StringLike/FirstChar.v +++ b/src/Parsers/StringLike/FirstChar.v @@ -2,7 +2,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Mapping predicates over [StringLike] things *) Require Import Coq.ZArith.ZArith. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. Require Import Fiat.Parsers.StringLike.ForallChars. @@ -111,7 +110,7 @@ Section for_first_char. | [ H : _ |- _ ] => rewrite drop_length in H | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | _ => erewrite singleton_unique; eassumption - | [ H : context[min] |- _ ] => revert H; apply Min.min_case_strong + | [ H : context[min] |- _ ] => revert H; apply Nat.min_case_strong end. } { match goal with | [ H : _ |- _ ] => apply H @@ -138,7 +137,7 @@ Section for_first_char. Proof. rewrite (for_first_char__take 0). assert (H' : length (take 1 str) = 1) - by (rewrite take_length; apply Min.min_case_strong; omega). + by (rewrite take_length; apply Nat.min_case_strong; omega). destruct (singleton_exists _ H') as [ch H'']. rewrite for_first_char_singleton_length by exact H'. split; intros. @@ -361,7 +360,7 @@ Section for_first_char. destruct (get 0 (take 1 str)) as [ch|] eqn:Heq. { exists ch; split; auto. rewrite is_char_parts. - rewrite take_length; split; [ apply Min.min_case_strong; omega | assumption ]. } + rewrite take_length; split; [ apply Nat.min_case_strong; omega | assumption ]. } { apply no_first_char_empty in Heq. rewrite take_length, H in Heq. simpl in *; omega. } } diff --git a/src/Parsers/StringLike/FirstCharSuchThat.v b/src/Parsers/StringLike/FirstCharSuchThat.v index 29bfb322e..c21133931 100644 --- a/src/Parsers/StringLike/FirstCharSuchThat.v +++ b/src/Parsers/StringLike/FirstCharSuchThat.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Mapping predicates over [StringLike] things *) - -Require Import Coq.Numbers.Natural.Peano.NPeano. +Require Import Coq.Arith.PeanoNat. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. Require Import Fiat.Parsers.StringLike.ForallChars. @@ -55,7 +54,7 @@ Section is_first_char_such_that. try solve [ left; assumption | right; omega | apply for_first_char_nil; omega - | apply forall_chars_nil; rewrite ?take_length; try apply Min.min_case_strong; intros; omega + | apply forall_chars_nil; rewrite ?take_length; try apply Nat.min_case_strong; intros; omega | destruct (length str); try solve [ left; repeat split; eauto; omega | right; repeat split; eauto; omega ] ]. @@ -90,8 +89,8 @@ Section is_first_char_such_that. | _ => omega | [ H : ?A /\ ?B -> ?C |- _ ] => specialize (fun x y => H (conj x y)) | [ H : _ -> ?A /\ ?B -> ?C |- _ ] => specialize (fun a x y => H a (conj x y)) - | [ H : _ |- _ ] => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?drop_take, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Plus.plus_comm 2), <- ?Nat.lt_add_lt_sub_r in H - | _ => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?drop_take, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Plus.plus_comm 2), <- ?Nat.lt_add_lt_sub_r + | [ H : _ |- _ ] => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?drop_take, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Nat.add_comm 2), <- ?Nat.lt_add_lt_sub_r in H + | _ => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?drop_take, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Nat.add_comm 2), <- ?Nat.lt_add_lt_sub_r | _ => progress destruct_head or | [ |- _ /\ _ ] => split | [ |- _ <-> _ ] => split diff --git a/src/Parsers/StringLike/ForallChars.v b/src/Parsers/StringLike/ForallChars.v index cdb8395d3..eb2e7023c 100644 --- a/src/Parsers/StringLike/ForallChars.v +++ b/src/Parsers/StringLike/ForallChars.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Mapping predicates over [StringLike] things *) -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Coq.ZArith.ZArith. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. @@ -153,7 +152,7 @@ Section forall_chars. | [ H : _ |- _ ] => rewrite drop_length in H | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | _ => erewrite singleton_unique; eassumption - | [ H : context[min] |- _ ] => revert H; apply Min.min_case_strong + | [ H : context[min] |- _ ] => revert H; apply Nat.min_case_strong end. } { match goal with | [ H : _ |- _ ] => apply (H 0) @@ -185,7 +184,7 @@ Section forall_chars. intros n H'''. rewrite H''' in *. rewrite sub_plus in H'' by omega. - rewrite Minus.minus_diag in *. + rewrite Nat.sub_diag in *. specialize (H'' eq_refl). destruct H'' as [ch H'']. exfalso; eapply H', H. diff --git a/src/Parsers/StringLike/LastChar.v b/src/Parsers/StringLike/LastChar.v index bb5a60467..d9b0a4d3c 100644 --- a/src/Parsers/StringLike/LastChar.v +++ b/src/Parsers/StringLike/LastChar.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Mapping predicates over [StringLike] things *) Require Import Coq.ZArith.ZArith. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. Require Import Fiat.Parsers.StringLike.ForallChars. @@ -133,7 +132,7 @@ Section for_last_char. | [ H : _ |- _ ] => rewrite drop_length in H | [ H : ?x = 1, H' : context[?x] |- _ ] => rewrite H in H' | _ => erewrite singleton_unique; eassumption - | [ H : context[min] |- _ ] => revert H; apply Min.min_case_strong + | [ H : context[min] |- _ ] => revert H; apply Nat.min_case_strong end. } { match goal with | [ H : _ |- _ ] => apply H @@ -390,7 +389,7 @@ Section for_last_char. destruct (for_last_char_combine HR H0 H1) as [H2|]; [ | assumption ]. contradict H2. rewrite drop_length, take_length. - apply Min.min_case_strong; omega. + apply Nat.min_case_strong; omega. Qed. Lemma for_last_char_exists_get (str : String) P (H : length str >= 1) diff --git a/src/Parsers/StringLike/LastCharSuchThat.v b/src/Parsers/StringLike/LastCharSuchThat.v index 33b515e3d..da0e69828 100644 --- a/src/Parsers/StringLike/LastCharSuchThat.v +++ b/src/Parsers/StringLike/LastCharSuchThat.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Mapping predicates over [StringLike] things *) Require Import Coq.ZArith.ZArith. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Fiat.Parsers.StringLike.Core. Require Import Fiat.Parsers.StringLike.Properties. Require Import Fiat.Parsers.StringLike.ForallChars. @@ -44,7 +43,7 @@ Section is_after_last_char_such_that. unfold is_after_last_char_such_that. split; [ apply forall_chars_nil | apply for_last_char_nil ]; rewrite ?take_length, ?drop_length; - try apply Min.min_case_strong; omega. + try apply Nat.min_case_strong; omega. Qed. Lemma after_last_char_such_that_0 @@ -79,11 +78,11 @@ Section is_after_last_char_such_that. destruct_head_hnf and; destruct_head_hnf or; destruct_head_hnf and; repeat split; trivial; - try rewrite Min.min_idempotent in *; + try rewrite Nat.min_idempotent in *; try solve [ left; assumption | right; omega | apply for_last_char_nil; omega - | apply forall_chars_nil; rewrite ?drop_length, ?take_length; try apply Min.min_case_strong; intros; omega + | apply forall_chars_nil; rewrite ?drop_length, ?take_length; try apply Nat.min_case_strong; intros; omega | destruct (length str); try solve [ left; repeat split; eauto; omega | right; repeat split; eauto; omega ] ]. @@ -126,8 +125,8 @@ Section is_after_last_char_such_that. | _ => omega | [ H : ?A /\ ?B -> ?C |- _ ] => specialize (fun x y => H (conj x y)) | [ H : _ -> ?A /\ ?B -> ?C |- _ ] => specialize (fun a x y => H a (conj x y)) - | [ H : _ |- _ ] => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?take_drop, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Plus.plus_comm 2), <- ?Nat.lt_add_lt_sub_r in H - | _ => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?take_drop, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Plus.plus_comm 2), <- ?Nat.lt_add_lt_sub_r + | [ H : _ |- _ ] => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?take_drop, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Nat.add_comm 2), <- ?Nat.lt_add_lt_sub_r in H + | _ => progress rewrite ?drop_0, ?drop_drop, ?take_take, ?drop_length, ?take_length, ?take_drop, ?Nat.add_1_r, ?Nat.le_sub_le_add_r, <- ?(Nat.add_comm 2), <- ?Nat.lt_add_lt_sub_r | _ => progress destruct_head or | [ |- _ /\ _ ] => split | [ |- _ <-> _ ] => split @@ -138,9 +137,9 @@ Section is_after_last_char_such_that. => eapply for_last_char__add_drop; eexact H | [ H : for_last_char (drop _ _) _ |- _ ] - => rewrite <- for_last_char__drop in H by (rewrite ?take_length; apply Min.min_case_strong; omega) + => rewrite <- for_last_char__drop in H by (rewrite ?take_length; apply Nat.min_case_strong; omega) | [ |- forall_chars (drop _ (take _ _)) _ ] - => rewrite drop_take, <- minus_Sn_m, minus_diag by omega; simpl; + => rewrite drop_take, Nat.sub_succ_l, Nat.sub_diag by omega; simpl; apply (proj1 (forall_chars_take _ _)) | [ H : forall_chars (drop ?n ?str) ?P |- forall_chars (drop (S ?n) ?str) ?P ] => rewrite <- (drop_drop str 1 n); apply (proj1 (forall_chars_drop _ _)); assumption @@ -222,7 +221,7 @@ Section is_after_last_char_such_that. => destruct (fun pf => for_last_char_combine (T := T') pf H H'); [ cbv beta; tauto | | ]; clear H H' | [ H : length (take _ _) = 0 |- _ ] => rewrite take_length in H - | [ H : min ?x ?y = 0 |- _ ] => revert H; apply (Min.min_case_strong x y) + | [ H : min ?x ?y = 0 |- _ ] => revert H; apply (Nat.min_case_strong x y) | _ => intro end.. | let H := fresh in diff --git a/src/Parsers/StringLike/OcamlString.v b/src/Parsers/StringLike/OcamlString.v index 66129921a..83e6a13f3 100644 --- a/src/Parsers/StringLike/OcamlString.v +++ b/src/Parsers/StringLike/OcamlString.v @@ -1,5 +1,4 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Coq.ZArith.ZArith. Require Import Coq.Strings.Ascii. Require Import Coq.Strings.String. @@ -38,19 +37,19 @@ Local Ltac t' := | _ => exfalso; congruence | _ => rewrite substring_length | _ => rewrite <- plus_n_O - | _ => rewrite <- Minus.minus_n_O - | _ => rewrite Min.min_l by omega - | _ => rewrite Min.min_r by omega + | _ => rewrite Nat.sub_0_r + | _ => rewrite Nat.min_l by omega + | _ => rewrite Nat.min_r by omega | _ => rewrite substring_correct3 by assumption | _ => rewrite substring_substring | _ => rewrite substring_correct3' | _ => rewrite substring_correct0 | _ => rewrite Nat.sub_min_distr_l | _ => rewrite Nat.add_sub - | _ => rewrite Min.min_0_r - | _ => rewrite Min.min_0_l + | _ => rewrite Nat.min_0_r + | _ => rewrite Nat.min_0_l | _ => rewrite Nat.add_1_r - | _ => rewrite <- Min.min_assoc + | _ => rewrite <- Nat.min_assoc | [ |- String.get _ _ = _ ] => apply StringProperties.get_correct | _ => progress rewrite ?string_beq_correct, ?ascii_beq_correct, ?string_copy_length | [ H : _ |- _ ] => progress rewrite ?string_beq_correct, ?ascii_beq_correct in H @@ -59,11 +58,11 @@ Local Ltac t' := by (rewrite <- Nat.sub_min_distr_l; apply f_equal2; omega) | [ |- context[min (?m + ?n) ?m] ] => replace (min (m + n) m) with (m + min n 0) - by (rewrite <- Min.plus_min_distr_l; apply f_equal2; omega) - | _ => rewrite Min.min_comm; reflexivity + by (rewrite <- Nat.add_min_distr_l; apply f_equal2; omega) + | _ => rewrite Nat.min_comm; reflexivity | [ |- context[string_eq_dec ?x ?y] ] => destruct (string_eq_dec x y) | [ H : _ <> _ |- False ] => apply H; clear H - | _ => apply Max.max_case_strong; intro; apply substring_correct4; omega + | _ => apply Nat.max_case_strong; intro; apply substring_correct4; omega | [ H : Coq.Strings.String.length ?s = 1 |- _ ] => is_var s; destruct s | [ H : S (Coq.Strings.String.length ?s) = 1 |- _ ] => is_var s; destruct s | _ => eexists; rewrite (ascii_lb eq_refl); reflexivity @@ -77,9 +76,9 @@ Local Ltac t' := | rewrite substring_correct2 by omega ] | _ => rewrite <- substring_correct3'; apply substring_correct2; omega | [ H : forall n, Coq.Strings.String.get n _ = Coq.Strings.String.get n _ |- _ ] => apply get_correct in H - | [ H : EqNat.beq_nat _ _ = true |- _ ] => apply EqNat.beq_nat_true in H - | [ H : EqNat.beq_nat _ _ = false |- _ ] => apply EqNat.beq_nat_false in H - | [ H : context[EqNat.beq_nat ?x ?y] |- _ ] => destruct (EqNat.beq_nat x y) eqn:? + | [ H : Nat.eqb _ _ = true |- _ ] => apply (proj1 (Nat.eqb_eq _ _)) in H + | [ H : Nat.eqb _ _ = false |- _ ] => apply (proj1 (Nat.eqb_neq _ _)) in H + | [ H : context[Nat.eqb ?x ?y] |- _ ] => destruct (Nat.eqb x y) eqn:? | [ H : context[option_beq ascii_beq _ _] |- _ ] => rewrite (option_beq_correct (@ascii_bl) (@ascii_lb)) in H | [ H : context[option_eq_dec ?beq ?bl ?lb ?x ?y] |- _ ] => destruct (option_eq_dec beq bl lb x y) | _ => progress change (andb true) with (fun x : bool => x) in * @@ -107,7 +106,7 @@ Local Ltac t' := | [ |- String.substring ?n ?m ?s = String.substring ?n ?m' ?s ] => not constr_eq m m'; replace m with m' by omega | [ |- String.substring ?n ?m ?s = String.substring ?n ?m' ?s ] - => not constr_eq m m'; replace m with m' by (repeat apply Min.min_case_strong; intros; omega) + => not constr_eq m m'; replace m with m' by (repeat apply Nat.min_case_strong; intros; omega) end. Local Ltac t := repeat t'. @@ -123,7 +122,7 @@ Module Export Ocaml. := { get n s := String.safe_get s n; take n s := String.sub s 0 n; drop n s := String.sub s n (String.length s - n); - is_char s ch := ((EqNat.beq_nat (String.length s) 1) + is_char s ch := ((Nat.eqb (String.length s) 1) && (option_beq ascii_beq (String.safe_get s 0) (Some ch)))%bool; bool_eq s1 s2 := Zbool.Zeq_bool (z_of_int (Ocaml.String.compare s1 s2)) 0%Z }. diff --git a/src/Parsers/StringLike/Properties.v b/src/Parsers/StringLike/Properties.v index ff95f9929..56bd38370 100644 --- a/src/Parsers/StringLike/Properties.v +++ b/src/Parsers/StringLike/Properties.v @@ -1,7 +1,6 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. (** * Theorems about string-like types *) -Require Import Coq.Numbers.Natural.Peano.NPeano. Require Import Coq.ZArith.ZArith. Require Import Fiat.Common. Require Import Fiat.Common.List.Operations. @@ -147,10 +146,10 @@ Section String. Proof. destruct (le_ge_dec (length str) n). { rewrite take_long by assumption. - rewrite Min.min_r by assumption. + rewrite Nat.min_r by assumption. reflexivity. } { rewrite take_short_length by assumption. - rewrite Min.min_l by assumption. + rewrite Nat.min_l by assumption. reflexivity. } Qed. @@ -188,14 +187,14 @@ Section String. destruct H0' as [H''|H'']; try clear H0'; [ apply f_equal | exfalso ]. { apply le_proof_irrelevance. } { setoid_subst s1. - eapply lt_irrefl; eassumption. } + eapply Nat.lt_irrefl; eassumption. } Qed. Definition strle_right (H' : s1 =s s2) : H0' = or_intror H'. Proof. destruct H0' as [H''|H'']; try clear H0'; [ exfalso | apply f_equal ]. - { setoid_subst s1; eapply lt_irrefl; eassumption. } + { setoid_subst s1; eapply Nat.lt_irrefl; eassumption. } { apply dec_eq_uip. decide equality. } Qed. @@ -208,7 +207,7 @@ Section String. : False. Proof. rewrite H' in H0'. - eapply lt_irrefl; eassumption. + eapply Nat.lt_irrefl; eassumption. Qed. Lemma singleton_exists_unique : forall s, length s = 1 -> exists !ch, s ~= [ ch ]. @@ -236,7 +235,7 @@ Section String. Lemma take_empty {str n} (H' : length str = 0) : take n str =s str. Proof. apply bool_eq_empty; rewrite ?drop_length, ?take_length; trivial. - apply Min.min_case_strong; omega. + apply Nat.min_case_strong; omega. Qed. Definition get_first_char_nonempty' str (H' : length str <> 0) : Char. @@ -380,7 +379,7 @@ Section String. { intros. rewrite !(@get_drop (length _ - _)). rewrite drop_length, drop_drop. - rewrite <- Nat.sub_add_distr, (plus_comm m), Nat.sub_add_distr, Nat.sub_add by omega. + rewrite <- Nat.sub_add_distr, (Nat.add_comm m), Nat.sub_add_distr, Nat.sub_add by omega. match goal with | [ |- context[match ?e with _ => _ end] ] => destruct e eqn:? end. @@ -413,9 +412,9 @@ Section String. congruence. } { intros ? H''. rewrite fold'_cons. - rewrite !H'', minus_diag, H'. + rewrite !H'', Nat.sub_diag, H'. rewrite drop_length, H''; simpl. - rewrite <- minus_n_O. + rewrite Nat.sub_0_r. apply f_equal. rewrite <- fold'_drop by omega; reflexivity. } } { intro H'. @@ -475,12 +474,12 @@ Section String. { intros. rewrite !(@get_drop (length _ - _)). rewrite drop_length, drop_drop. - rewrite <- Nat.sub_add_distr, (plus_comm m), Nat.sub_add_distr, Nat.sub_add by omega. + rewrite <- Nat.sub_add_distr, (Nat.add_comm m), Nat.sub_add_distr, Nat.sub_add by omega. match goal with | [ |- context[match ?e with _ => _ end] ] => destruct e eqn:? end. { rewrite drop_drop; - rewrite <- Nat.sub_add_distr, (plus_comm m), Nat.sub_add_distr, Nat.sub_add by omega; + rewrite <- Nat.sub_add_distr, (Nat.add_comm m), Nat.sub_add_distr, Nat.sub_add by omega; rewrite <- IHlen by omega; reflexivity. } { reflexivity. } } Qed. @@ -510,10 +509,10 @@ Section String. congruence. } { intros n H''. rewrite fold_lookahead'_cons. - rewrite !H'', minus_diag, H'. + rewrite !H'', Nat.sub_diag, H'. replace (S n - n) with 1 by omega. rewrite drop_length, H''; simpl. - rewrite <- minus_n_O. + rewrite Nat.sub_0_r. apply f_equal. rewrite <- fold_lookahead'_drop by omega; reflexivity. } } { intro H'. @@ -565,14 +564,14 @@ Section String. | [ H : length ?str = 0, H' : is_true (take _ ?str ~= [ _ ]) |- _ ] => apply length_singleton in H' | [ H : ?x = 0, H' : context[?x] |- _ ] => rewrite H in H' - | [ H : context[min _ _] |- _ ] => revert H; apply Min.min_case_strong; intros + | [ H : context[min _ _] |- _ ] => revert H; apply Nat.min_case_strong; intros end. Qed. Lemma take_min_length (str : String) n : take n str =s take (min (length str) n) str. Proof. - apply Min.min_case_strong; [ | reflexivity ]. + apply Nat.min_case_strong; [ | reflexivity ]. intro H. rewrite !take_long by (assumption || reflexivity). reflexivity. @@ -581,7 +580,7 @@ Section String. Lemma drop_min_length (str : String) n : drop n str =s drop (min (length str) n) str. Proof. - apply Min.min_case_strong; [ | reflexivity ]. + apply Nat.min_case_strong; [ | reflexivity ]. intro H. apply bool_eq_empty; rewrite drop_length; omega. Qed. @@ -626,7 +625,7 @@ Section String. | specialize (IHlen (drop 1 str)); rewrite drop_length in IHlen; specialize_by omega; - try rewrite <- !H, !minus_diag; + try rewrite <- !H, !Nat.sub_diag; try rewrite <- fold'_drop in IHlen by omega ]. Lemma to_string_length str @@ -658,7 +657,7 @@ Section String. | [ |- context[length (take _ _)] ] => rewrite take_length | [ |- context[_ - 0] ] => rewrite Nat.sub_0_r | [ H : context[_ - 0] |- _ ] => rewrite Nat.sub_0_r in H - | [ |- context[fold' _ _ (drop _ _) _] ] => rewrite <- fold'_drop by (rewrite ?take_length; try apply Min.min_case_strong; omega) + | [ |- context[fold' _ _ (drop _ _) _] ] => rewrite <- fold'_drop by (rewrite ?take_length; try apply Nat.min_case_strong; omega) | [ H : context[fold' _ _ (drop _ _) _] |- _ ] => rewrite <- fold'_drop in H by omega | [ H : ?x = ?y |- context[?x - ?y] ] => replace (x - y) with 0 by omega | [ H : ?y = ?x |- context[?x - ?y] ] => replace (x - y) with 0 by omega @@ -672,12 +671,12 @@ Section String. | [ |- context[?x + 1] ] => rewrite Nat.add_1_r | [ H : S _ = ?x |- context[match ?x with _ => _ end] ] => rewrite <- H | [ H : S _ = ?x, H' : context[match ?x with _ => _ end] |- _ ] => rewrite <- H in H' - | [ H : context[?x - ?x] |- _ ] => rewrite minus_diag in H - | [ |- context[?x - ?x] ] => rewrite minus_diag + | [ H : context[?x - ?x] |- _ ] => rewrite Nat.sub_diag in H + | [ |- context[?x - ?x] ] => rewrite Nat.sub_diag | [ H : context[get _ (take _ _)] |- _ ] => rewrite get_take_lt in H by omega - | [ H : context[min (pred ?x) ?x] |- _ ] => rewrite (Min.min_l (pred x) x) in H by omega + | [ H : context[min (pred ?x) ?x] |- _ ] => rewrite (Nat.min_l (pred x) x) in H by omega | [ |- context[take _ (drop _ _)] ] => rewrite take_drop - | [ |- context[min ?x ?y] ] => rewrite (Min.min_l x y) by omega + | [ |- context[min ?x ?y] ] => rewrite (Nat.min_l x y) by omega | [ |- context[get _ (drop 0 ?s)] ] => rewrite (drop_0 s) | [ |- context[get _ (drop _ (drop _ _))] ] => rewrite drop_drop | [ H : S ?x = ?y, H' : S ?z = ?y |- _ ] @@ -773,9 +772,9 @@ Section String. | [ |- context[min ?x ?y] ] => match goal with | [ |- context[min y x] ] - => rewrite (Min.min_comm x y) + => rewrite (Nat.min_comm x y) end - | _ => repeat apply Min.min_case_strong; omega + | _ => repeat apply Nat.min_case_strong; omega end. Local Ltac substring_t := repeat substring_t'. @@ -824,14 +823,14 @@ Section String. Proof. pose proof (fun y x => @substring_substring str 0 z x y) as H'; simpl in *. setoid_rewrite Nat.sub_0_r in H'. - setoid_rewrite Min.min_comm in H'. + setoid_rewrite Nat.min_comm in H'. rewrite <- !H', H; reflexivity. Qed. Lemma substring_min_length str x y : substring x (min y (length str)) str =s substring x y str. Proof. - apply Min.min_case_strong; try reflexivity. + apply Nat.min_case_strong; try reflexivity. intro H. apply substring_correct4; omega. Qed. @@ -840,16 +839,16 @@ Section String. : length (substring offset len s) = len. Proof. rewrite substring_length; destruct Hshort as [Hshort|Hshort]; - try rewrite Min.min_r by (clear -Hshort; omega); + try rewrite Nat.min_r by (clear -Hshort; omega); subst; simpl; - rewrite <- ?NPeano.Nat.sub_min_distr_r, ?NPeano.Nat.add_sub, ?minus_diag, ?Min.min_0_r; + rewrite <- ?Nat.sub_min_distr_r, ?Nat.add_sub, ?Nat.sub_diag, ?Nat.min_0_r; reflexivity. Qed. End substring. Lemma char_at_matches_is_char_no_ex offset len str P (Hlen : offset + len <= length str) - : (EqNat.beq_nat len 1 && char_at_matches offset str P)%bool = true + : (Nat.eqb len 1 && char_at_matches offset str P)%bool = true <-> match get offset str with | Some ch => (P ch /\ substring offset len str ~= [ch])%string_like | None => False @@ -859,7 +858,7 @@ Section String. { split; intro H. { apply Bool.andb_true_iff in H. destruct H as [H0 H1]. - apply EqNat.beq_nat_true in H0; subst. + apply Nat.eqb_eq in H0; subst. erewrite char_at_matches_correct in H1 by eassumption. split; try assumption. rewrite get_drop in Heq. @@ -871,22 +870,22 @@ Section String. apply Bool.andb_true_iff; split; [ | eassumption ]. apply length_singleton in H1. rewrite substring_length, <- Nat.sub_min_distr_r, Nat.add_sub in H1. - apply EqNat.beq_nat_true_iff. + apply Nat.eqb_eq. revert H1. - apply Min.min_case_strong; intros; omega. } } + apply Nat.min_case_strong; intros; omega. } } { split; [ intro H | intros [] ]. rewrite get_drop in Heq. apply no_first_char_empty in Heq. rewrite drop_length in Heq. apply Bool.andb_true_iff in H. destruct H as [H0 H1]. - apply EqNat.beq_nat_true in H0; subst. + apply Nat.eqb_eq in H0; subst. omega. } Qed. Lemma char_at_matches_is_char offset len str P (Hlen : offset + len <= length str) - : (EqNat.beq_nat len 1 && char_at_matches offset str P)%bool = true + : (Nat.eqb len 1 && char_at_matches offset str P)%bool = true <-> (exists ch, P ch /\ substring offset len str ~= [ch])%string_like. Proof. rewrite char_at_matches_is_char_no_ex by assumption. @@ -907,7 +906,7 @@ Section String. rewrite drop_length in Heq. apply length_singleton in H1. assert (len = 0) by omega; subst. - rewrite substring_length, <- Nat.sub_min_distr_r, Nat.add_sub, Min.min_0_r in H1. + rewrite substring_length, <- Nat.sub_min_distr_r, Nat.add_sub, Nat.min_0_r in H1. omega. } Qed. @@ -970,7 +969,7 @@ Section Iso. end. } { intro n'; rewrite !get_S. rewrite drop_take, !drop_of_string; simpl. - rewrite NPeano.Nat.sub_0_r. + rewrite Nat.sub_0_r. destruct str; simpl; rewrite !IHn; simpl; trivial. destruct n; reflexivity. } } @@ -1098,8 +1097,8 @@ Ltac simpl_string_like_no_setoid_step := | [ H : @length ?Char ?HSLM ?str = @length ?Char ?HSLM ?str' |- _ ] => first [ generalize dependent (@length Char HSLM str'); intros; subst; clear str' | generalize dependent (@length Char HSLM str); intros; subst; clear str ] - | [ H : _ |- _ ] => progress rewrite ?take_length, ?drop_length, ?Min.min_0_r, ?Min.min_0_l, ?Nat.sub_0_l in H - | _ => progress rewrite ?take_length, ?drop_length, ?Min.min_0_r, ?Min.min_0_l, ?Nat.sub_0_l + | [ H : _ |- _ ] => progress rewrite ?take_length, ?drop_length, ?Nat.min_0_r, ?Nat.min_0_l, ?Nat.sub_0_l in H + | _ => progress rewrite ?take_length, ?drop_length, ?Nat.min_0_r, ?Nat.min_0_l, ?Nat.sub_0_l end. Ltac simpl_string_like_step := first [ progress simpl_string_like_no_setoid_step diff --git a/src/Parsers/StringLike/String.v b/src/Parsers/StringLike/String.v index 83cfb16bc..f4cf71e53 100644 --- a/src/Parsers/StringLike/String.v +++ b/src/Parsers/StringLike/String.v @@ -3,7 +3,7 @@ Require Export Fiat.Common.Coq__8_4__8_5__Compat. Require Import Coq.Strings.Ascii. Require Import Coq.Strings.String. Require Import Coq.ZArith.ZArith. -Require Import Coq.Numbers.Natural.Peano.NPeano. +Require Import Coq.Arith.PeanoNat. Require Import Fiat.Common Fiat.Common.Equality. Require Import Fiat.Common.StringOperations Fiat.Common.StringFacts. Require Import Fiat.Parsers.StringLike.Core. @@ -61,19 +61,19 @@ Local Ltac t := | _ => exfalso; congruence | _ => rewrite substring_length | _ => rewrite <- plus_n_O - | _ => rewrite <- Minus.minus_n_O - | _ => rewrite Min.min_l by omega - | _ => rewrite Min.min_r by omega + | _ => rewrite Nat.sub_0_r + | _ => rewrite Nat.min_l by omega + | _ => rewrite Nat.min_r by omega | _ => rewrite substring_correct3 by assumption | _ => rewrite substring_substring | _ => rewrite substring_correct3' | _ => rewrite substring_correct0 | _ => rewrite Nat.sub_min_distr_l | _ => rewrite Nat.add_sub - | _ => rewrite Min.min_0_r - | _ => rewrite Min.min_0_l + | _ => rewrite Nat.min_0_r + | _ => rewrite Nat.min_0_l | _ => rewrite Nat.add_1_r - | _ => rewrite <- Min.min_assoc + | _ => rewrite <- Nat.min_assoc | [ H : ?x = Some _ |- context[match ?x with _ => _ end] ] => rewrite H | _ => progress rewrite ?string_beq_correct, ?ascii_beq_correct, ?string_copy_length | [ H : _ |- _ ] => progress rewrite ?string_beq_correct, ?ascii_beq_correct in H @@ -82,11 +82,11 @@ Local Ltac t := by (rewrite <- Nat.sub_min_distr_l; apply f_equal2; omega) | [ |- context[min (?m + ?n) ?m] ] => replace (min (m + n) m) with (m + min n 0) - by (rewrite <- Min.plus_min_distr_l; apply f_equal2; omega) - | _ => rewrite Min.min_comm; reflexivity + by (rewrite <- Nat.add_min_distr_l; apply f_equal2; omega) + | _ => rewrite Nat.min_comm; reflexivity | [ |- context[string_eq_dec ?x ?y] ] => destruct (string_eq_dec x y) | [ H : _ <> _ |- False ] => apply H; clear H - | _ => apply Max.max_case_strong; intro; apply substring_correct4; omega + | _ => apply Nat.max_case_strong; intro; apply substring_correct4; omega | [ H : String.length ?s = 1 |- _ ] => is_var s; destruct s | [ H : S (String.length ?s) = 1 |- _ ] => is_var s; destruct s | _ => eexists; rewrite (ascii_lb eq_refl); reflexivity @@ -124,7 +124,7 @@ Lemma substring_take_drop (str : String) n m Proof. simpl. rewrite substring_substring; simpl. - apply Min.min_case_strong; simpl; trivial; []. + apply Nat.min_case_strong; simpl; trivial; []. intro H. apply substring_correct4; omega. Qed. diff --git a/src/QueryStructure/Implementation/Constraints/ConstraintChecksRefinements.v b/src/QueryStructure/Implementation/Constraints/ConstraintChecksRefinements.v index 94f6f4572..3a41df4ff 100644 --- a/src/QueryStructure/Implementation/Constraints/ConstraintChecksRefinements.v +++ b/src/QueryStructure/Implementation/Constraints/ConstraintChecksRefinements.v @@ -819,7 +819,7 @@ Lemma refine_constraint_check_into_QueryResultComp : (R tup2 /\ P' tup2)))) (Bind (Count (For (QueryResultComp R (fun tup => Where (P tup) Return tup)))) - (fun count => ret (negb (beq_nat count 0)))). + (fun count => ret (negb (Nat.eqb count 0)))). Proof. Local Transparent Count. unfold refine, Count, UnConstrQuery_In; @@ -853,7 +853,7 @@ Lemma refine_constraint_check_into_query' : (GetUnConstrRelation c tbl tup2 /\ P' tup2)))) (Bind (Count (For (UnConstrQuery_In c tbl (fun tup => Where (P tup) Return tup)))) - (fun count => ret (negb (beq_nat count 0)))). + (fun count => ret (negb (Nat.eqb count 0)))). Proof. intros; rewrite refine_constraint_check_into_QueryResultComp; eauto. reflexivity. @@ -869,7 +869,7 @@ Corollary refine_constraint_check_into_query : (GetUnConstrRelation c tbl tup2 /\ P (indexedRawTuple tup2))))) (Bind (Count (For (UnConstrQuery_In c tbl (fun tup => Where (P tup) Return tup)))) - (fun count => ret (negb (beq_nat count 0)))). + (fun count => ret (negb (Nat.eqb count 0)))). Proof. intros. setoid_rewrite refine_constraint_check_into_query'; eauto. @@ -888,7 +888,7 @@ Lemma refine_constraint_check_into_query'' : (R tup2 /\ P' tup2)))) (Bind (Count (For (QueryResultComp R (fun tup => Where (P tup) Return tup)))) - (fun count => ret (negb (beq_nat count 0)))). + (fun count => ret (negb (Nat.eqb count 0)))). Proof. Local Transparent Count. unfold refine, Count, UnConstrQuery_In; @@ -942,7 +942,7 @@ Lemma refine_functional_dependency_check_into_query : Where (tupleAgree_computational ref tup args1 /\ ~ tupleAgree_computational ref tup args2) Return tup))) - (fun count => ret (beq_nat count 0))). + (fun count => ret (Nat.eqb count 0))). Proof. intros * is_dec ** . @@ -994,7 +994,7 @@ Lemma refine_functional_dependency_check_into_query' : Where (tupleAgree_computational tup ref args1 /\ ~ tupleAgree_computational tup ref args2) Return tup))) - (fun count => ret (beq_nat count 0))). + (fun count => ret (Nat.eqb count 0))). Proof. intros * is_dec ** . @@ -1122,7 +1122,7 @@ Lemma refine_uniqueness_check_into_query' : Where (GetAttributeRaw tup attr = GetAttributeRaw tup' attr) Return tup'))); - (ret (beq_nat c 0))). + (ret (Nat.eqb c 0))). Proof. intros. setoid_replace (forall tup', GetUnConstrRelation c idx tup' -> diff --git a/src/QueryStructure/Implementation/DataStructures/Bags/BagsProperties.v b/src/QueryStructure/Implementation/DataStructures/Bags/BagsProperties.v index fa07743a0..909a28a83 100644 --- a/src/QueryStructure/Implementation/DataStructures/Bags/BagsProperties.v +++ b/src/QueryStructure/Implementation/DataStructures/Bags/BagsProperties.v @@ -65,7 +65,7 @@ Section BagsProperties. Proof. unfold _BagInsertCount, _bcount; intros; rewrite binsert_enumerate; simpl; destruct (bfind_matcher search_term item); simpl; eauto. - rewrite plus_comm; reflexivity. + rewrite Nat.add_comm; reflexivity. Qed. End BagsProperties. diff --git a/src/QueryStructure/Implementation/DataStructures/Bags/CountingListBags.v b/src/QueryStructure/Implementation/DataStructures/Bags/CountingListBags.v index 0d9bdb835..2f65d8eed 100644 --- a/src/QueryStructure/Implementation/DataStructures/Bags/CountingListBags.v +++ b/src/QueryStructure/Implementation/DataStructures/Bags/CountingListBags.v @@ -213,7 +213,7 @@ Section CountingListBags. + trivial. + simpl; destruct (search_term a); simpl; rewrite <- IHcontainer; simpl; eauto. - rewrite (plus_comm 1); eauto using plus_assoc. + rewrite (Nat.add_comm 1); eauto using Nat.add_assoc. Qed. Lemma CountingList_BagCountCorrect : @@ -222,7 +222,7 @@ Section CountingListBags. unfold BagCountCorrect, CountingListAsBag_bcount, CountingListAsBag_bfind, CountingList_RepInv; intros; pose proof (CountingList_BagCountCorrect_aux (ccontents container) search_term 0) as temp; - rewrite plus_0_r in temp; simpl in temp. + rewrite Nat.add_0_r in temp; simpl in temp. simpl; [ apply temp ]. Qed. diff --git a/src/QueryStructure/Implementation/DataStructures/Bags/ListBags.v b/src/QueryStructure/Implementation/DataStructures/Bags/ListBags.v index 2e8b9ca80..44654cd65 100644 --- a/src/QueryStructure/Implementation/DataStructures/Bags/ListBags.v +++ b/src/QueryStructure/Implementation/DataStructures/Bags/ListBags.v @@ -84,7 +84,7 @@ Section ListBags. Proof. unfold BagCountCorrect, ListAsBag_bcount, ListAsBag_bfind; intros; pose proof (List_BagCountCorrect_aux container search_term 0) as temp; - rewrite plus_0_r in temp; simpl in temp; exact temp. + rewrite Nat.add_0_r in temp; simpl in temp; exact temp. Qed. Lemma List_BagDeleteCorrect : diff --git a/src/QueryStructure/Implementation/DataStructures/Bags/RangeTreeBags.v b/src/QueryStructure/Implementation/DataStructures/Bags/RangeTreeBags.v index fbd43c797..4b0feba8f 100644 --- a/src/QueryStructure/Implementation/DataStructures/Bags/RangeTreeBags.v +++ b/src/QueryStructure/Implementation/DataStructures/Bags/RangeTreeBags.v @@ -2,6 +2,7 @@ Require Export Fiat.QueryStructure.Implementation.DataStructures.Bags.BagsInterf Fiat.QueryStructure.Implementation.DataStructures.Bags.BagsProperties. Require Import + Coq.Arith.PeanoNat Coq.FSets.FMapInterface Coq.FSets.FMapFacts Coq.FSets.FMapAVL @@ -434,13 +435,13 @@ Module RangeTreeBag (X : OrderedType). eapply binsert_RepInv; eapply containerCorrect; eauto. eapply binsert_RepInv; eapply bempty_RepInv. - rewrite <- is_eq. - rewrite binsert_enumerate_weak with (RepInv0 := RepInv) (ValidUpdate0 := ValidUpdate) in H; + rewrite binsert_enumerate_weak with (RepInv := RepInv) (ValidUpdate := ValidUpdate) in H; intuition; subst. destruct (FindWithDefault_dec (projection item') bempty container) as [ [bag' (mapsto & equality)] | (not_found & equality) ]; rewrite equality in *; clear equality. eapply containerCorrect; eauto. - rewrite benumerate_empty_eq_nil with (RepInv0 := RepInv) (ValidUpdate0 := ValidUpdate) in H0; + rewrite benumerate_empty_eq_nil with (RepInv := RepInv) (ValidUpdate := ValidUpdate) in H0; simpl in H0; intuition. reflexivity. destruct (FindWithDefault_dec (projection item') bempty container) @@ -565,7 +566,7 @@ Module RangeTreeBag (X : OrderedType). rewrite values_add_not_in by assumption. simpl. rewrite binsert_enumerate. - rewrite benumerate_empty_eq_nil with (RepInv0 := RepInv) (ValidUpdate0 := ValidUpdate); eauto. + rewrite benumerate_empty_eq_nil with (RepInv := RepInv) (ValidUpdate := ValidUpdate); eauto. apply bempty_RepInv. Qed. @@ -578,7 +579,7 @@ Module RangeTreeBag (X : OrderedType). pose proof (RangeTreeBag_btraverse_closed container (minkey, maxkey)). rewrite <- Heqbs in H; clear Heqbs. rewrite length_flatten. - setoid_rewrite Plus.plus_comm at 1. + setoid_rewrite Nat.add_comm at 1. replace (fun (y : TKey * BagType) (x : nat) => bcount (snd y) searchterm + x) with (compose plus (fun (y : TKey * BagType) => bcount (snd y) searchterm)) by reflexivity. rewrite !foldright_compose. diff --git a/src/QueryStructure/Implementation/DataStructures/Bags/TreeBags.v b/src/QueryStructure/Implementation/DataStructures/Bags/TreeBags.v index 3684daa8e..bdb8de851 100644 --- a/src/QueryStructure/Implementation/DataStructures/Bags/TreeBags.v +++ b/src/QueryStructure/Implementation/DataStructures/Bags/TreeBags.v @@ -4,6 +4,7 @@ Require Export Fiat.QueryStructure.Implementation.DataStructures.Bags.BagsInterf Fiat.QueryStructure.Implementation.DataStructures.Bags.BagsProperties. Require Import Coq.FSets.FMapInterface Coq.FSets.FMapFacts + Coq.Arith.PeanoNat Fiat.Common Fiat.Common.List.ListFacts Fiat.Common.List.FlattenList @@ -203,13 +204,13 @@ Module TreeBag (Import M: WS). eapply binsert_RepInv; eapply containerCorrect; eauto. eapply binsert_RepInv; eapply bempty_RepInv. - rewrite <- is_eq. - rewrite binsert_enumerate_weak with (RepInv0 := RepInv) (ValidUpdate0 := ValidUpdate) in H; intuition; subst. + rewrite binsert_enumerate_weak with (RepInv := RepInv) (ValidUpdate := ValidUpdate) in H; intuition; subst. destruct (FindWithDefault_dec (projection item') bempty container) as [ [bag' (mapsto & equality)] | (not_found & equality) ]; rewrite equality in *; clear equality. eapply containerCorrect; eauto. - rewrite benumerate_empty_eq_nil with (RepInv0 := RepInv) (ValidUpdate0 := ValidUpdate) in H0; + rewrite benumerate_empty_eq_nil with (RepInv := RepInv) (ValidUpdate := ValidUpdate) in H0; simpl in H0; intuition. reflexivity. destruct (FindWithDefault_dec (projection item') bempty container) @@ -368,7 +369,7 @@ Module TreeBag (Import M: WS). simpl. rewrite binsert_enumerate. - rewrite benumerate_empty_eq_nil with (RepInv0 := RepInv) (ValidUpdate0 := ValidUpdate); eauto. + rewrite benumerate_empty_eq_nil with (RepInv := RepInv) (ValidUpdate := ValidUpdate); eauto. apply bempty_RepInv. Qed. @@ -396,7 +397,7 @@ Module TreeBag (Import M: WS). + rewrite length_flatten. rewrite (fold_over_Values _ _ (fun acc bag => acc + bcount bag search_term)) by eauto. rewrite <- fold_left_rev_right. - setoid_rewrite Plus.plus_comm at 1. + setoid_rewrite Nat.add_comm at 1. replace (fun (y : BagType) (x : nat) => bcount y search_term + x) with (compose plus (fun bag => bcount bag search_term)) by reflexivity. rewrite !foldright_compose. diff --git a/src/QueryStructure/Implementation/DataStructures/Bags/TrieBags.v b/src/QueryStructure/Implementation/DataStructures/Bags/TrieBags.v index f36a69e4c..696f21eaf 100644 --- a/src/QueryStructure/Implementation/DataStructures/Bags/TrieBags.v +++ b/src/QueryStructure/Implementation/DataStructures/Bags/TrieBags.v @@ -1078,7 +1078,7 @@ Module TrieBag (X:OrderedType). replaceXMapfold. unfold XMap.fold at 2. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST H1). pose proof (@partition_after_KeyBasedPartition_and_add @@ -1980,9 +1980,9 @@ Module TrieBag (X:OrderedType). rewrite filter_Prefix; eauto. + rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. match goal with - | [ H0 : TrieOK _ _ |- _ ] => - rewrite Permutation_KeyBasedPartition with (key0 := key0) - (bst_m := SubTrieMapBST H0) + | [ H : TrieOK _ _ |- _ ] => + rewrite Permutation_KeyBasedPartition with (key := key0) + (bst_m := SubTrieMapBST H) end. simpl. apply find_2 in e0. @@ -2012,9 +2012,9 @@ Module TrieBag (X:OrderedType). + rewrite filter_Prefix; eauto. + rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. match goal with - | [ H : TrieOK _ _ |- _ ] => - rewrite Permutation_KeyBasedPartition with (key0 := key0) - (bst_m := SubTrieMapBST' H) + | [ H0 : TrieOK _ _ |- _ ] => + rewrite Permutation_KeyBasedPartition with (key := key0) + (bst_m := SubTrieMapBST' H0) end. simpl in *. match goal with @@ -2209,7 +2209,7 @@ Module TrieBag (X:OrderedType). eapply filter_negb_Prefix; eauto; reflexivity. + simpl. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). simpl in *. apply find_2 in e1. @@ -2264,7 +2264,7 @@ Module TrieBag (X:OrderedType). eapply filter_negb_Prefix; eauto; reflexivity. + simpl. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). simpl. rewrite <- (@not_find_in_iff _ (XMap.Bst (SubTrieMapBST' containerCorrect)) key0) in e1. @@ -2338,7 +2338,7 @@ Module TrieBag (X:OrderedType). + rewrite (H (l ++ [key0])); simpl; eauto. rewrite partition_filter_eq. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). simpl. apply find_2 in e1. @@ -2373,7 +2373,7 @@ Module TrieBag (X:OrderedType). eapply filter_Prefix; eauto. + simpl. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). rewrite <- (@not_find_in_iff _ (XMap.Bst (SubTrieMapBST' containerCorrect)) key0) in e1. pose proof (KeyBasedPartition_fst_singleton_None key0 (XMap.Bst (SubTrieMapBST containerCorrect)) e1) as singleton. @@ -2559,7 +2559,7 @@ Module TrieBag (X:OrderedType). f_equiv. * symmetry; f_equiv; eapply filter_Prefix; eauto. * rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). rewrite (Permutation_benumerate_add key0 bag'' (XMap.Bst (SubTrieMapBST containerCorrect))). rewrite map_app, flatten_app. @@ -2636,7 +2636,7 @@ Module TrieBag (X:OrderedType). * rewrite filter_Prefix; eauto; reflexivity. * simpl. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). simpl. rewrite <- (@not_find_in_iff _ (XMap.Bst (SubTrieMapBST' containerCorrect)) key0) in e1. @@ -2700,7 +2700,7 @@ Module TrieBag (X:OrderedType). + rewrite (H (l ++ [key0])); simpl; eauto. rewrite partition_filter_eq. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). simpl. apply find_2 in e1. @@ -2735,7 +2735,7 @@ Module TrieBag (X:OrderedType). + rewrite filter_Prefix; eauto. + simpl. rewrite <- (fun H => @fold_1 _ m H (list BagType) [ ] (fun k trie a => Trie_enumerate trie ++ a)) by eauto. - rewrite Permutation_KeyBasedPartition with (key0 := key0) + rewrite Permutation_KeyBasedPartition with (key := key0) (bst_m := SubTrieMapBST containerCorrect). rewrite <- (@not_find_in_iff _ (XMap.Bst (SubTrieMapBST' containerCorrect)) key0) in e1. pose proof (KeyBasedPartition_fst_singleton_None key0 (XMap.Bst (SubTrieMapBST containerCorrect)) e1) as singleton. diff --git a/src/QueryStructure/Implementation/Operations/General/InsertRefinements.v b/src/QueryStructure/Implementation/Operations/General/InsertRefinements.v index 8a5bcae96..eade4097e 100644 --- a/src/QueryStructure/Implementation/Operations/General/InsertRefinements.v +++ b/src/QueryStructure/Implementation/Operations/General/InsertRefinements.v @@ -660,7 +660,6 @@ Section InsertRefinements. as rewriteSat by (apply functional_extensionality; intros; rewrite GetRelDropConstraints; reflexivity). - rewrite GetRelDropConstraints in H', H'', H''''. setoid_rewrite rewriteSat in H''''; clear rewriteSat. (* Resume not-terribleness *) generalize (Iterate_Decide_Comp_BoundedIndex _ _ H''') as H3'; diff --git a/src/QueryStructure/Specification/Representation/QueryStructure.v b/src/QueryStructure/Specification/Representation/QueryStructure.v index 9c5c5aad8..d40f88542 100644 --- a/src/QueryStructure/Specification/Representation/QueryStructure.v +++ b/src/QueryStructure/Specification/Representation/QueryStructure.v @@ -50,7 +50,7 @@ Section BuildQueryStructureConstraints. (* No need to check if the indices are in the list of Relations, because they are Bounded Strings. *) unfold crossRelationR, GetNRelSchemaHeading, GetNRelSchema; simpl. - rewrite H, H0; exact (Some c). + rewrite e, e0; exact (Some c). Defined. Fixpoint BuildQueryStructureConstraints'