From e51a7ba5d06ac26f152ef77ea151283e599522d8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Fri, 9 Sep 2022 12:24:00 +0100 Subject: [PATCH 001/127] Adds prototyping --- .../keeper/prototyping/model/.gitignore | 4 + .../prototyping/model/library/Apalache.tla | 155 ++ .../keeper/prototyping/model/library/Bags.tla | 591 +++++++ .../model/library/FiniteSetTheorems.tla | 385 +++++ .../library/FiniteSetTheorems_proofs.tla | 848 ++++++++++ .../prototyping/model/library/FiniteSets.tla | 23 + .../model/library/FunctionTheorems.tla | 575 +++++++ .../model/library/FunctionTheorems_proofs.tla | 947 +++++++++++ .../prototyping/model/library/Functions.tla | 63 + .../prototyping/model/library/JectionThm.tla | 1130 +++++++++++++ .../prototyping/model/library/Jections.tla | 48 + .../model/library/NaturalsInduction.tla | 210 +++ .../library/NaturalsInduction_proofs.tla | 454 ++++++ .../prototyping/model/library/RealTime.tla | 22 + .../model/library/SequenceTheorems.tla | 636 ++++++++ .../model/library/SequenceTheorems_proofs.tla | 1446 +++++++++++++++++ .../prototyping/model/library/TLAPS.tla | 411 +++++ .../model/library/WellFoundedInduction.tla | 328 ++++ .../library/WellFoundedInduction_proofs.tla | 738 +++++++++ .../prototyping/model/library/ref/CCV.tla | 550 +++++++ .../prototyping/model/library/ref/MC_CCV.tla | 62 + .../model/library/ref/typedefs.tla | 12 + .../prototyping/model/library/tlcFolds.tla | 27 + .../keeper/prototyping/model/main.cfg | 3 + .../keeper/prototyping/model/main.tla | 90 + .../keeper/prototyping/model/traceUtil.py | 16 + .../provider/keeper/prototyping/prototype.py | 20 + 27 files changed, 9794 insertions(+) create mode 100644 x/ccv/provider/keeper/prototyping/model/.gitignore create mode 100644 x/ccv/provider/keeper/prototyping/model/library/Apalache.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/Bags.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems_proofs.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/FiniteSets.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems_proofs.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/Functions.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/JectionThm.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/Jections.tla create mode 100755 x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction_proofs.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/RealTime.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems_proofs.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/TLAPS.tla create mode 100755 x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction_proofs.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/ref/CCV.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/ref/MC_CCV.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/ref/typedefs.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/library/tlcFolds.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/main.cfg create mode 100644 x/ccv/provider/keeper/prototyping/model/main.tla create mode 100644 x/ccv/provider/keeper/prototyping/model/traceUtil.py create mode 100644 x/ccv/provider/keeper/prototyping/prototype.py diff --git a/x/ccv/provider/keeper/prototyping/model/.gitignore b/x/ccv/provider/keeper/prototyping/model/.gitignore new file mode 100644 index 0000000000..e37c5b8761 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/.gitignore @@ -0,0 +1,4 @@ +_apalache-out +states/ +*TTrace* +*.json \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/library/Apalache.tla b/x/ccv/provider/keeper/prototyping/model/library/Apalache.tla new file mode 100644 index 0000000000..b8bb5cb1c5 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/Apalache.tla @@ -0,0 +1,155 @@ +--------------------------- MODULE Apalache ----------------------------------- +(* + * This is a standard module for use with the Apalache model checker. + * The meaning of the operators is explained in the comments. + * Many of the operators serve as additional annotations of their arguments. + * As we like to preserve compatibility with TLC and TLAPS, we define the + * operator bodies by erasure. The actual interpretation of the operators is + * encoded inside Apalache. For the moment, these operators are mirrored in + * the class at.forsyte.apalache.tla.lir.oper.ApalacheOper. + * + * Igor Konnov, Jure Kukovec, Informal Systems 2020-2022 + *) + +(** + * An assignment of an expression e to a state variable x. Typically, one + * uses the non-primed version of x in the initializing predicate Init and + * the primed version of x (that is, x') in the transition predicate Next. + * Although TLA+ does not have a concept of a variable assignment, we find + * this concept extremely useful for symbolic model checking. In pure TLA+, + * one would simply write x = e, or x \in {e}. + * + * Apalache automatically converts some expressions of the form + * x = e or x \in {e} into assignments. However, if you like to annotate + * assignments by hand, you can use this operator. + * + * For a further discussion on that matter, see: + * https://github.com/informalsystems/apalache/blob/main/docs/src/idiomatic/001assignments.md + *) +__x := __e == __x = __e + +(** + * A generator of a data structure. Given a positive integer `bound`, and + * assuming that the type of the operator application is known, we + * recursively generate a TLA+ data structure as a tree, whose width is + * bound by the number `bound`. + * + * The body of this operator is redefined by Apalache. + *) +Gen(__size) == {} + +(** + * Non-deterministically pick a value out of the set `S`, if `S` is non-empty. + * If `S` is empty, return some value of the proper type. This can be + * understood as a non-deterministic version of CHOOSE x \in S: TRUE. + * + * @type: Set(a) => a; + *) +Guess(__S) == + \* Since this is not supported by TLC, + \* we fall back to the deterministic version for TLC. + \* Apalache redefines the operator `Guess` as explained above. + CHOOSE __x \in __S: TRUE + +(** + * Convert a set of pairs S to a function F. Note that if S contains at least + * two pairs <> and <> such that x = u and y /= v, + * then F is not uniquely defined. We use CHOOSE to resolve this ambiguity. + * Apalache implements a more efficient encoding of this operator + * than the default one. + * + * @type: Set(<>) => (a -> b); + *) +SetAsFun(__S) == + LET __Dom == { __x: <<__x, __y>> \in __S } + __Rng == { __y: <<__x, __y>> \in __S } + IN + [ __x \in __Dom |-> CHOOSE __y \in __Rng: <<__x, __y>> \in __S ] + +(** + * A sequence constructor that avoids using a function constructor. + * Since Apalache is typed, this operator is more efficient than + * FunAsSeq([ i \in 1..N |-> F(i) ]). Apalache requires N to be + * a constant expression. + * + * @type: (Int, (Int -> a)) => Seq(a); + *) +LOCAL INSTANCE Integers +MkSeq(__N, __F(_)) == + \* This is the TLC implementation. Apalache does it differently. + [ __i \in (1..__N) |-> __F(__i) ] + +\* required by our default definition of FoldSeq and FunAsSeq +LOCAL INSTANCE Sequences + +(** + * As TLA+ is untyped, one can use function- and sequence-specific operators + * interchangeably. However, to maintain correctness w.r.t. our type-system, + * an explicit cast is needed when using functions as sequences. + * FunAsSeq reinterprets a function over integers as a sequence. + * + * The parameters have the following meaning: + * + * - fn is the function from 1..len that should be interpreted as a sequence. + * - len is the length of the sequence, len = Cardinality(DOMAIN fn), + * len may be a variable, a computable expression, etc. + * - capacity is a static upper bound on the length, that is, len <= capacity. + * + * @type: ((Int -> a), Int, Int) => Seq(a); + *) +FunAsSeq(__fn, __len, __capacity) == + LET __FunAsSeq_elem_ctor(__i) == __fn[__i] IN + SubSeq(MkSeq(__capacity, __FunAsSeq_elem_ctor), 1, __len) + +(** + * Annotating an expression \E x \in S: P as Skolemizable. That is, it can + * be replaced with an expression c \in S /\ P(c) for a fresh constant c. + * Not every exisential can be replaced with a constant, this should be done + * with care. Apalache detects Skolemizable expressions by static analysis. + *) +Skolem(__e) == __e + +(** + * A hint to the model checker to expand a set S, instead of dealing + * with it symbolically. Apalache finds out which sets have to be expanded + * by static analysis. + *) +Expand(__S) == __S + +(** + * A hint to the model checker to replace its argument Cardinality(S) >= k + * with a series of existential quantifiers for a constant k. + * Similar to Skolem, this has to be done carefully. Apalache automatically + * places this hint by static analysis. + *) +ConstCardinality(__cardExpr) == __cardExpr + +(** + * The folding operator, used to implement computation over a set. + * Apalache implements a more efficient encoding than the one below. + * (from the community modules). + * + * @type: ((a, b) => a, a, Set(b)) => a; + *) +RECURSIVE ApaFoldSet(_, _, _) +ApaFoldSet(__Op(_,_), __v, __S) == + IF __S = {} + THEN __v + ELSE LET __w == CHOOSE __x \in __S: TRUE IN + LET __T == __S \ {__w} IN + ApaFoldSet(__Op, __Op(__v,__w), __T) + +(** + * The folding operator, used to implement computation over a sequence. + * Apalache implements a more efficient encoding than the one below. + * (from the community modules). + * + * @type: ((a, b) => a, a, Seq(b)) => a; + *) +RECURSIVE ApaFoldSeqLeft(_, _, _) +ApaFoldSeqLeft(__Op(_,_), __v, __seq) == + IF __seq = <<>> + THEN __v + ELSE ApaFoldSeqLeft(__Op, __Op(__v, Head(__seq)), Tail(__seq)) + +=============================================================================== diff --git a/x/ccv/provider/keeper/prototyping/model/library/Bags.tla b/x/ccv/provider/keeper/prototyping/model/library/Bags.tla new file mode 100644 index 0000000000..c66323edf1 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/Bags.tla @@ -0,0 +1,591 @@ +----------------------------- MODULE Bags -------------------------------- +(**************************************************************************) +(* A bag, also called a multiset, is a set that can contain multiple *) +(* copies of the same element. A bag can have infinitely many elements, *) +(* but only finitely many copies of any single element. *) +(* *) +(* We represent a bag in the usual way as a function whose range is a *) +(* subset of the positive integers. An element e belongs to bag B iff e *) +(* is in the domain of B, in which case bag B contains B[e] copies of e. *) +(**************************************************************************) +EXTENDS TLC, TLAPS, + FiniteSetTheorems, + SequenceTheorems + +LOCAL INSTANCE Naturals + +IsABag(B) == + (************************************************************************) + (* True iff B is a bag. *) + (************************************************************************) + B \in [DOMAIN B -> {n \in Nat : n > 0}] + +BagToSet(B) == DOMAIN B + (************************************************************************) + (* The set of elements at least one copy of which is in B. *) + (************************************************************************) + +SetToBag(S) == [e \in S |-> 1] + (************************************************************************) + (* The bag that contains one copy of every element of the set S. *) + (************************************************************************) + +BagIn(e,B) == e \in BagToSet(B) + (************************************************************************) + (* The \in operator for bags. *) + (************************************************************************) + +EmptyBag == SetToBag({}) + +B1 (+) B2 == + (************************************************************************) + (* The union of bags B1 and B2. *) + (************************************************************************) + [e \in (DOMAIN B1) \cup (DOMAIN B2) |-> + (IF e \in DOMAIN B1 THEN B1[e] ELSE 0) + + (IF e \in DOMAIN B2 THEN B2[e] ELSE 0) ] + +B1 (-) B2 == + (************************************************************************) + (* The bag B1 with the elements of B2 removed--that is, with one copy *) + (* of an element removed from B1 for each copy of the same element in *) + (* B2. If B2 has at least as many copies of e as B1, then B1 (-) B2 *) + (* has no copies of e. *) + (************************************************************************) + LET B == [e \in DOMAIN B1 |-> IF e \in DOMAIN B2 THEN B1[e] - B2[e] + ELSE B1[e]] + IN [e \in {d \in DOMAIN B : B[d] > 0} |-> B[e]] + +LOCAL Sum(f) == + (******************************************************************) + (* The sum of f[x] for all x in DOMAIN f. The definition assumes *) + (* that f is a Nat-valued function and that f[x] equals 0 for all *) + (* but a finite number of elements x in DOMAIN f. *) + (******************************************************************) + LET DSum[S \in SUBSET DOMAIN f] == + LET elt == CHOOSE e \in S : TRUE + IN IF S = {} THEN 0 + ELSE f[elt] + DSum[S \ {elt}] + IN DSum[DOMAIN f] + +BagUnion(S) == + (************************************************************************) + (* The bag union of all elements of the set S of bags. *) + (************************************************************************) + [e \in UNION {BagToSet(B) : B \in S} |-> + Sum( [B \in S |-> IF BagIn(e, B) THEN B[e] ELSE 0] ) ] + +B1 \sqsubseteq B2 == + (************************************************************************) + (* The subset operator for bags. B1 \sqsubseteq B2 iff, for all e, bag *) + (* B2 has at least as many copies of e as bag B1 does. *) + (************************************************************************) + /\ (DOMAIN B1) \subseteq (DOMAIN B2) + /\ \A e \in DOMAIN B1 : B1[e] \leq B2[e] + +SubBag(B) == + (************************************************************************) + (* The set of all subbags of bag B. *) + (* *) + (* The following definition is not the one described in the TLA+ book, *) + (* but rather one that TLC can evaluate. *) + (************************************************************************) + + LET RemoveFromDom(x, f) == [y \in (DOMAIN f) \ {x} |-> f[y]] + Combine(x, BagSet) == + BagSet \cup + {[y \in (DOMAIN f) \cup {x} |-> IF y = x THEN i ELSE f[y]] : + f \in BagSet, i \in 1..B[x]} + Biggest == LET Range1 == {B[x] : x \in DOMAIN B} + IN IF Range1 = {} THEN 0 + ELSE CHOOSE r \in Range1 : + \A s \in Range1 : r \geq s + RSB[BB \in UNION {[S -> 1..Biggest] : S \in SUBSET DOMAIN B}] == + IF BB = << >> THEN {<< >>} + ELSE LET x == CHOOSE x \in DOMAIN BB : TRUE + IN Combine(x, RSB[RemoveFromDom(x, BB)]) + IN RSB[B] + + (******************* Here is the definition from the TLA+ book. ******** + LET AllBagsOfSubset == + (******************************************************************) + (* The set of all bags SB such that BagToSet(SB) \subseteq *) + (* BagToSet(B). *) + (******************************************************************) + UNION {[SB -> {n \in Nat : n > 0}] : SB \in SUBSET BagToSet(B)} + IN {SB \in AllBagsOfSubset : \A e \in DOMAIN SB : SB[e] \leq B[e]} + ***************************************************************************) + +BagOfAll(F(_), B) == + (************************************************************************) + (* The bag analog of the set {F(x) : x \in B} for a set B. It's the bag *) + (* that contains, for each element e of B, one copy of F(e) for every *) + (* copy of e in B. This defines a bag iff, for any value v, the set of *) + (* e in B such that F(e) = v is finite. *) + (************************************************************************) + [e \in {F(d) : d \in BagToSet(B)} |-> + Sum( [d \in BagToSet(B) |-> IF F(d) = e THEN B[d] ELSE 0] ) ] + +BagCardinality(B) == + (************************************************************************) + (* If B is a finite bag (one such that BagToSet(B) is a finite set), *) + (* then this is its cardinality (the total number of copies of elements *) + (* in B). Its value is unspecified if B is infinite. *) + (************************************************************************) + Sum(B) + +CopiesIn(e, B) == + (************************************************************************) + (* If B is a bag, then CopiesIn(e, B) is the number of copies of e in *) + (* B. If ~BagIn(e, B), then CopiesIn(e, B) = 0. *) + (************************************************************************) + + IF BagIn(e, B) THEN B[e] ELSE 0 + +Scaling(n, B) == + (************************************************************************) + (* If B is a bag, then Scaling(e, B) is the Bag containing the same *) + (* elements of B with n times their copies *) + (************************************************************************) + IF n>0 THEN [i \in DOMAIN B |-> n*B[i] ] ELSE EmptyBag + +(***************************************************************************) +(* Converts the Sequence into a bag *) +(***************************************************************************) + +SeqToBag(seq) == [ x \in Range(seq) |-> Cardinality({i \in DOMAIN seq: seq[i]=x}) ] + + + +(***************************************************************************) +(* \sqsubseteq is a PARTIAL ORDER relattion *) +(***************************************************************************) + +(*AntiSymmetry*) +THEOREM Bags_SqsubseteqPO_AntiSymmetry == ASSUME NEW A, NEW B, IsABag(A), IsABag(B), A \sqsubseteq B, B \sqsubseteq A + PROVE A = B +<1>1. DOMAIN A = DOMAIN B + BY DEF \sqsubseteq +<1>2. (\A i \in DOMAIN A: A[i]<=B[i]) /\ (\A i \in DOMAIN B: B[i]<=A[i]) + BY DEF \sqsubseteq +<1>3. \A i \in DOMAIN A: A[i]=B[i] + BY <1>1, <1>2, SMT DEF IsABag +<1>4. A \in [DOMAIN A -> {n \in Nat: n>0}] /\ B \in [DOMAIN B -> {n \in Nat: n>0}] + BY DEF IsABag +<1>5. QED + BY <1>4, <1>3, <1>1 + +(*Reflexivity*) +THEOREM Bags_SqsubsetPO_Reflexivity == ASSUME NEW B, IsABag(B) + PROVE B \sqsubseteq B +BY SMT DEF \sqsubseteq, IsABag + +(*Transitivity*) +THEOREM Bags_SqsubseteqPO_Transitivity == ASSUME NEW A, NEW B, NEW C, IsABag(A), IsABag(B), IsABag(C), A \sqsubseteq B, B \sqsubseteq C + PROVE A \sqsubseteq C +<1>1. DOMAIN A \subseteq DOMAIN C /\ DOMAIN A \subseteq DOMAIN B + BY DEF \sqsubseteq +<1>2. (\A i \in DOMAIN A: A[i] <= B[i]) /\ (\A i \in DOMAIN B: B[i]<=C[i] ) + BY <1>1 DEF \sqsubseteq, IsABag +<1>3. \A i \in DOMAIN A: B[i]<=C[i] + BY <1>1, <1>2 +<1>4. \A i \in DOMAIN A: A[i]<=C[i] + BY <1>3, <1>2, SMT DEF IsABag +<1>.QED + BY <1>1, <1>4 DEF \sqsubseteq + +(***************************************************************************) +(* Lemmas on EmptyBags *) +(***************************************************************************) + + +THEOREM Bags_EmptyBag == ASSUME NEW B, IsABag(B) + PROVE /\ IsABag(EmptyBag) + /\ B=EmptyBag <=> DOMAIN B ={} + /\ DOMAIN EmptyBag ={} + /\ EmptyBag \sqsubseteq B + /\ \A e: ~BagIn(e, EmptyBag) +<1>1. DOMAIN EmptyBag = {} + BY DEF EmptyBag, SetToBag +<1>2. IsABag(EmptyBag) + <2>1. \A i \in DOMAIN EmptyBag: EmptyBag[i] \in {n \in Nat: n>0} + BY <1>1 + <2>2. QED + BY <2>1 DEF IsABag, EmptyBag, SetToBag +<1>3. B=EmptyBag => DOMAIN B ={} + BY DEF EmptyBag, SetToBag +<1>4. ASSUME DOMAIN B ={} PROVE B=EmptyBag + <2>1. B \in [{} -> {n \in Nat: n>0}] /\ EmptyBag \in [{} -> {n \in Nat: n>0}] + BY <1>4 DEF EmptyBag, IsABag, SetToBag + <2>2. DOMAIN B = DOMAIN EmptyBag + BY <1>4 DEF EmptyBag, SetToBag + <2>3. \A i \in DOMAIN B : B[i]=EmptyBag[i] + BY <1>4 DEF EmptyBag, SetToBag + <2>4. QED + BY <2>3, <2>2, <2>1 +<1>5. EmptyBag \sqsubseteq B + BY <1>1 DEF \sqsubseteq +<1>6. ASSUME ~(\A e: ~BagIn(e, EmptyBag)) PROVE FALSE + <2>1. \E e: BagIn(e, EmptyBag) + BY <1>6 + <2>2. PICK e : BagIn(e, EmptyBag) + BY <2>1 + <2>3. QED + BY <2>2, <1>1 DEF BagIn, BagToSet +<1>7. QED + BY <1>1, <1>2, <1>3, <1>4, <1>5, <1>6 + +(***************************************************************************) +(* Lemmas on Scalng Operator for Bags *) +(***************************************************************************) + +THEOREM Bags_Scaling == ASSUME NEW B, IsABag(B), NEW n \in Nat, NEW m \in Nat + PROVE /\ IsABag(Scaling(n, B)) + /\ Scaling(n, EmptyBag)=EmptyBag + /\ Scaling(0, B)=EmptyBag + /\ Scaling(1, B)= B + /\ Scaling((n*m), B) = Scaling(n, Scaling(m, B)) + /\ n>0 => DOMAIN(Scaling(n, B))= DOMAIN B +PROOF +<1>1. IsABag(Scaling(n, B)) + <2>1. CASE n=0 + <3>1. Scaling(n, B)= EmptyBag + BY <2>1 DEF Scaling + <3>2. QED + BY <3>1, Bags_EmptyBag + <2>2. CASE n>0 + BY <2>2, SMT DEF IsABag, Scaling + <2>3. QED + BY <2>1, <2>2, SMT + +<1>2. Scaling(n, EmptyBag)=EmptyBag + <2>1. DOMAIN Scaling(n, EmptyBag)={} + BY Bags_EmptyBag DEF Scaling + <2>2. IsABag(Scaling(n, EmptyBag)) + BY Bags_EmptyBag, SMT DEF Scaling, EmptyBag, SetToBag, IsABag + <2>. QED + BY <2>1, <2>2, Bags_EmptyBag +<1>3. Scaling(0, B)=EmptyBag + BY DEF Scaling +<1>4. Scaling(1, B)= B + BY SMT DEF Scaling, IsABag +<1>5. Scaling((n*m), B) = Scaling(n, Scaling(m, B)) + <2>1. CASE m>0 /\ n>0 + <3>1. n*m>0 + BY <2>1, SMT + <3>2. QED + BY <3>1, <2>1, SMT DEF Scaling, IsABag + <2>2. CASE m>0 /\ n=0 + <3>1. n*m=0 + BY <2>2, SMT + <3>2. QED + BY <3>1, <2>2, SMT DEF Scaling, IsABag + <2>3. CASE m=0 /\ n>0 + <3>1. Scaling(n, Scaling(m, B))=EmptyBag + BY <2>3, <1>2, <1>3 + <3>2. Scaling(n*m, B)=EmptyBag + BY <2>3, SMT DEF Scaling, IsABag + <3>3. QED + BY <3>1, <3>2 + <2>4. CASE m=0 /\ n=0 + <3>1. n*m=0 + BY <2>4, SMT + <3>2. QED + BY <3>1, <2>4, SMT DEF Scaling, IsABag + <2>5. QED + BY SMT, <2>1, <2>2, <2>3, <2>4 +<1>6. ASSUME n>0 PROVE DOMAIN Scaling(n, B)=DOMAIN B + <2>1. QED + BY <1>6, <1>1 DEF Scaling, IsABag +<1> QED + BY <1>1, <1>2, <1>3, <1>4, <1>5, <1>6 + + +(***************************************************************************) +(* SetToBag and BagToSet are inverse of each other *) +(***************************************************************************) + +THEOREM Bags_Inverse == ASSUME NEW S + PROVE BagToSet(SetToBag(S))=S +BY DEF SetToBag, BagToSet + +THEOREM Bags_Inverse1 == ASSUME NEW B, IsABag(B) + PROVE SetToBag(BagToSet(B)) \sqsubseteq B +<1>1. DOMAIN SetToBag(BagToSet(B)) \subseteq DOMAIN B + BY DEF SetToBag, BagToSet, \sqsubseteq, IsABag +<1>2. \A i \in DOMAIN SetToBag(BagToSet(B)): SetToBag(BagToSet(B))[i] <= B[i] + <2>1. TAKE i \in DOMAIN SetToBag(BagToSet(B)) + <2>2. QED + BY <2>1, SMT DEF SetToBag, BagToSet, IsABag +<1>3. QED + BY <1>1, <1>2 DEF \sqsubseteq + +(***************************************************************************) +(* SetToBag Preserves Equality *) +(***************************************************************************) + +THEOREM Bags_SetToBagEquality == ASSUME NEW A, NEW B + PROVE A=B <=> SetToBag(A)=SetToBag(B) +<1>1. A=B => SetToBag(A) = SetToBag(B) + BY DEF SetToBag +<1>2. ASSUME SetToBag(A)=SetToBag(B) PROVE A=B + <2>1. BagToSet(SetToBag(A))=BagToSet(SetToBag(B)) + BY <1>2 + <2>2. QED + BY <2>1, Bags_Inverse +<1>3. QED + BY <1>1, <1>2 + +(***************************************************************************) +(* Union of Bags *) +(***************************************************************************) + +THEOREM Bags_Union == + ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) + PROVE /\ IsABag(B1(+)B2) + /\ DOMAIN(B1 (+) B2) = DOMAIN B1 \cup DOMAIN B2 + /\ \A e : CopiesIn(e, B1(+)B2) = CopiesIn(e,B1) + CopiesIn(e,B2) +BY DEF IsABag, (+), CopiesIn, BagIn, BagToSet + +(***************************************************************************) +(* Differene of Bags *) +(***************************************************************************) + +THEOREM Bags_Difference == + ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) + PROVE /\ IsABag(B1(-)B2) + /\ DOMAIN (B1 (-) B2) = {e \in DOMAIN B1 : e \notin DOMAIN B2 \/ B1[e] > B2[e]} + /\ \A e : CopiesIn(e, B1 (-) B2) = IF BagIn(e, B1(-)B2) THEN CopiesIn(e,B1) - CopiesIn(e,B2) ELSE 0 +<1>. DEFINE B == [e \in DOMAIN B1 |-> IF e \in DOMAIN B2 THEN B1[e] - B2[e] + ELSE B1[e]] + D == {d \in DOMAIN B1 : B[d] > 0} +<1>1. B \in [DOMAIN B1 -> Int] + BY DEF IsABag +<1>2. B1 (-) B2 = [e \in D |-> B[e]] + BY DEF (-) +<1>3. D = {e \in DOMAIN B1 : e \notin DOMAIN B2 \/ B1[e] > B2[e]} + BY DEF IsABag +<1>4. \A e \in D : B[e] = B1[e] - (IF e \in DOMAIN B2 THEN B2[e] ELSE 0) + BY DEF IsABag +<1>. HIDE DEF B +<1>. QED + BY <1>1, <1>2, <1>3, <1>4 DEF IsABag, CopiesIn, BagIn, BagToSet + +(***************************************************************************) +(* Union is Commutative *) +(***************************************************************************) + +THEOREM Bags_UnionCommutative == ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) + PROVE B1(+)B2 = B2(+)B1 +<1>1. DOMAIN(B1(+)B2) = DOMAIN(B2(+)B1) + BY DEF (+) +<1>2. B1(+)B2 \in [DOMAIN(B1(+)B2) -> {n \in Nat: n>0}] /\ B2(+)B1 \in [DOMAIN(B1(+)B2) -> {n \in Nat: n>0}] + BY <1>1, Bags_Union DEF IsABag +<1>3. \A i \in DOMAIN(B1(+)B2): (B1(+)B2)[i] = (B2(+)B1)[i] + <2>1. TAKE i \in DOMAIN(B1(+)B2) + <2>. QED + BY SMT, <2>1 DEF (+), IsABag +<1>4. QED + BY <1>1, <1>2, <1>3 + +(***************************************************************************) +(* Unon is Associative *) +(***************************************************************************) + +THEOREM Bags_UnionAssociative == ASSUME NEW B1, NEW B2, NEW B3, IsABag(B1), IsABag(B2), IsABag(B3) + PROVE (B1(+)B2)(+)B3 = B1(+)(B2(+)B3) +BY DEF IsABag, (+) + +(***************************************************************************) +(* Given Bags B1, B2 then B1 \sqsubseteq B1(+)B2 *) +(***************************************************************************) + +THEOREM Bags_UnionSqSubset == ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) + PROVE B1 \sqsubseteq B1(+)B2 +<1>1. IsABag(B1(+)B2) + BY Bags_Union +<1>2. DOMAIN B1 \subseteq DOMAIN(B1(+)B2) + BY DEF (+) +<1>3. \A i \in DOMAIN B1: B1[i]<=(B1(+)B2)[i] + <2>1. TAKE i \in DOMAIN B1 + <2>2. QED + BY <2>1, <1>1, SMT DEF IsABag, \sqsubseteq, (+) +<1>4. QED + BY <1>2, <1>3 DEF \sqsubseteq, (+) + +(***************************************************************************) +(* Given Bag B1, B1 \sqsubseteq Scaling(n, B1) for all n>0 *) +(***************************************************************************) + +THEOREM Bags_ScalingSqSubseteq == ASSUME NEW B, IsABag(B), NEW n \in Nat, NEW m \in Nat, m1. CASE m>0 /\ n>0 + <2>1. DOMAIN Scaling(m, B)= DOMAIN Scaling(n, B) + BY <1>1, Bags_Scaling + <2>2. \A i \in DOMAIN Scaling(m, B): Scaling(m, B)[i]<= Scaling(n, B)[i] + <3>1. TAKE i \in DOMAIN Scaling(m, B) + <3>2. QED + BY <1>1, SMT DEF Scaling, IsABag + <2>3. QED + BY <2>1, <2>2 DEF \sqsubseteq +<1>2. CASE m=0 /\ n>0 + <2>1. Scaling(m, B)=EmptyBag + BY <1>2, Bags_Union DEF Scaling + <2>2. QED + BY <2>1, Bags_EmptyBag, Bags_Scaling +<1>3. CASE m>0 /\ n=0 \* Impossible Case + BY <1>3, SMT +<1>4. CASE m=0 /\ n=0 \* Impossible Case + BY <1>4, SMT +<1>5. QED + BY <1>1, <1>2, <1>3, <1>4, SMT + +(***************************************************************************) +(* Given Bags A and B, A(-)B \sqsubseteq A *) +(***************************************************************************) + +THEOREM Bags_DifferenceSqsubset == ASSUME NEW A, NEW B, IsABag(A), IsABag(B) + PROVE A(-)B \sqsubseteq A +<1>1. DOMAIN(A(-)B) \subseteq DOMAIN A + BY DEF (-) +<1>2. \A i \in DOMAIN(A(-)B) : (A(-)B)[i] <= A[i] + <2>1. TAKE i \in DOMAIN(A(-)B) + <2>2. QED + BY <2>1, SMT DEF (-), IsABag +<1>3. QED + BY <1>1, <1>2 DEF \sqsubseteq + +(***************************************************************************) +(* EmptyBag is Addidtive Identity *) +(***************************************************************************) + +THEOREM Bags_EmptyBagOperations == ASSUME NEW B, IsABag(B) + PROVE /\ B (+) EmptyBag = B + /\ B (-) EmptyBag = B +<1>1. B (+) EmptyBag = B + <2>1. IsABag(B(+)EmptyBag) + BY Bags_EmptyBag, Bags_Union + <2>2. DOMAIN(B(+)EmptyBag) = DOMAIN B + BY Bags_EmptyBag DEF (+) + <2>3. B \in [DOMAIN B -> {n \in Nat : n>0}] /\ B(+)EmptyBag \in [DOMAIN B -> {n \in Nat : n>0}] + BY <2>1, <2>2 DEF IsABag + <2>4. \A i \in DOMAIN B: (B(+)EmptyBag)[i]=B[i] + <3>1. TAKE i \in DOMAIN B + <3>2. QED + BY <3>1, SMT DEF EmptyBag, (+), IsABag, SetToBag + <2>5. QED + BY <2>2, <2>3, <2>4 +<1>2. B (-) EmptyBag = B + <2>1. /\ IsABag(B(-)EmptyBag) + /\ DOMAIN(B (-) EmptyBag) = DOMAIN B + BY Bags_EmptyBag, Bags_Difference, Isa + <2>3. B \in [DOMAIN B -> {n \in Nat : n>0}] /\ B(-)EmptyBag \in [DOMAIN B -> {n \in Nat : n>0}] + BY <2>1 DEF IsABag + <2>4. \A i \in DOMAIN B: (B(-)EmptyBag)[i]=B[i] + <3>1. TAKE i \in DOMAIN B + <3>2. QED + BY <3>1 DEF EmptyBag, (-), IsABag, SetToBag + <2>5. QED + BY <2>1, <2>3, <2>4 +<1>3. QED + BY <1>1, <1>2 + +(***************************************************************************) +(* SetToBag of a set is a Bag *) +(***************************************************************************) + +THEOREM Bags_SetToBagIsABag == ASSUME NEW S + PROVE IsABag(SetToBag(S)) +BY DEF IsABag, SetToBag + +(***************************************************************************) +(* CopiesIn Monotone w.r.t \sqsubseteq *) +(***************************************************************************) + +THEOREM Bags_CopiesInBagsInMonotone == + ASSUME NEW B1, NEW B2, NEW e, IsABag(B1), IsABag(B2), B1 \sqsubseteq B2 + PROVE /\ BagIn(e, B1) => BagIn(e, B2) + /\ CopiesIn(e, B1) <= CopiesIn(e, B2) +<1>1. ASSUME BagIn(e, B1) PROVE BagIn(e, B2) + BY <1>1 DEF BagIn, BagToSet, \sqsubseteq +<1>2. CopiesIn(e, B1) <= CopiesIn(e, B2) + <2>1. CASE BagIn(e, B1) + BY <2>1 DEF CopiesIn, BagIn, \sqsubseteq, BagToSet + <2>2. CASE ~BagIn(e, B1) + BY <2>2, SMT DEF \sqsubseteq, IsABag, CopiesIn, BagIn, BagToSet + <2>3. QED + BY <2>1, <2>2 +<1>3. QED + BY <1>1, <1>2 + + +(***************************************************************************) +(* Given Bag B and Natural n, CopiesIn(e, Scaling(n, B))=n*CopiesIn(e, B) *) +(***************************************************************************) + +THEOREM Bags_CopiesInScaling == ASSUME NEW B, IsABag(B), NEW n \in Nat, NEW e + PROVE CopiesIn(e, Scaling(n, B))=n*CopiesIn(e, B) +PROOF +<1>1. CASE n=0 + BY <1>1, Bags_Scaling, Bags_EmptyBag, SMT DEF CopiesIn, IsABag +<1>2. CASE n>0 + BY <1>2, SMT DEF CopiesIn, IsABag, Scaling, BagIn, BagToSet +<1>3. QED + BY <1>1, <1>2, SMT + +(***************************************************************************) +(* Given set S, CopiesIn(e, SetToBag(S))=IF e \in B THEN 1 ELSE 0 *) +(***************************************************************************) + +THEOREM Bags_CopiesInSetToBag == ASSUME NEW B, NEW e + PROVE CopiesIn(e, SetToBag(B))=IF e \in B THEN 1 ELSE 0 +PROOF +<1>1. ASSUME e \in B PROVE CopiesIn(e, SetToBag(B))=1 + BY <1>1 DEF CopiesIn, BagIn, BagToSet, SetToBag +<1>2. ASSUME e \notin B PROVE CopiesIn(e, SetToBag(B))=0 + BY <1>2 DEF CopiesIn, BagIn, BagToSet, SetToBag +<1>3. QED + BY <1>2, <1>1 + +(***************************************************************************) +(* Given sequence seq, SeqToBag(seq) is a Bag *) +(***************************************************************************) + +THEOREM Bags_IsABagSeqToBag == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE IsABag(SeqToBag(seq)) +<1>1. \A x \in DOMAIN SeqToBag(seq): SeqToBag(seq)[x] \in Nat \ {0} + <2>1. TAKE x \in DOMAIN SeqToBag(seq) + <2>2. SeqToBag(seq)[x] \in Nat \ {0} + <3>1. CASE seq = << >> + <4>1. DOMAIN SeqToBag(seq)= {} + BY <3>1 DEF Range, SeqToBag + <4>2. QED + BY <4>1, Bags_EmptyBag + <3>2. CASE seq # << >> + <4>1. {i \in DOMAIN seq: seq[i]=x }#{} + BY <2>1, <3>2 DEF SeqToBag, Range + <4>. IsFiniteSet({i \in DOMAIN seq: seq[i]=x }) + <5>1. {i \in DOMAIN seq: seq[i]=x } \subseteq DOMAIN seq + OBVIOUS + <5>2. IsFiniteSet(DOMAIN seq) + BY SeqDef, FS_Interval + <5>3. QED + BY <5>1, <5>2, FS_Subset + <4>2. QED + BY <4>1, SMT, FS_EmptySet, FS_CardinalityType DEF SeqToBag + <3>3. QED + BY <3>1, <3>2 + <2>3. QED + BY <2>2 DEF SeqToBag +<1>2. QED + BY <1>1 DEF IsABag, SeqToBag + +============================================================================= + +(* Last modified on Fri 26 Jan 2007 at 8:45:03 PST by lamport *) + + 6 Apr 99 : Modified version for standard module set + 7 Dec 98 : Corrected error found by Stephan Merz. + 6 Dec 98 : Modified comments based on suggestions by Lyle Ramshaw. + 5 Dec 98 : Initial version. diff --git a/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems.tla b/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems.tla new file mode 100644 index 0000000000..9fdc0a0b2b --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems.tla @@ -0,0 +1,385 @@ +------------------------- MODULE FiniteSetTheorems -------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* Facts about finite sets and their cardinality. *) +(* Originally contributed by Tom Rodeheffer, MSR. *) +(* \vspace{12pt}}^' *) +(* Proofs of these theorems appear in module FiniteSetTheorems_proofs. *) +(***************************************************************************) + +EXTENDS + FiniteSets, + Functions, + WellFoundedInduction + + +(***************************************************************************) +(* `. .' *) +(* *) +(* A set S is finite iff there exists a natural number n such that there *) +(* exist a surjection (or a bijection) from 1..n to S. *) +(* *) +(* `. .' *) +(***************************************************************************) + +LEMMA FS_NatSurjection == + ASSUME NEW S + PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsSurjection(1..n,S) + + +LEMMA FS_NatBijection == + ASSUME NEW S + PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsBijection(1..n,S) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists n \in Nat such that a bijection exists from 1..n to S, *) +(* then Cardinality(S) = n. *) +(* *) +(* `. .' *) +(***************************************************************************) + +LEMMA FS_CountingElements == + ASSUME NEW S, NEW n \in Nat, ExistsBijection(1..n,S) + PROVE Cardinality(S) = n + + +(***************************************************************************) +(* Corollary: a surjection from 1..n to S provides a cardinality bound. *) +(***************************************************************************) +THEOREM FS_SurjCardinalityBound == + ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n, S) + PROVE Cardinality(S) <= n + + +(***************************************************************************) +(* `. .' *) +(* *) +(* For any finite set S, Cardinality(S) \in Nat. Moreover, there is a *) +(* bijection from 1 .. Cardinality(S) to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_CardinalityType == + ASSUME NEW S, IsFiniteSet(S) + PROVE /\ Cardinality(S) \in Nat + /\ ExistsBijection(1..Cardinality(S), S) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The image of a finite set under a bijection or surjection is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Bijection == + ASSUME NEW S, NEW T, IsFiniteSet(S), ExistsBijection(S,T) + PROVE /\ IsFiniteSet(T) + /\ Cardinality(T) = Cardinality(S) + + +THEOREM FS_SameCardinalityBij == + ASSUME NEW S, NEW T, IsFiniteSet(S), IsFiniteSet(T), + Cardinality(S) = Cardinality(T) + PROVE ExistsBijection(S,T) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Any surjection between two finite sets of equal cardinality is *) +(* an injection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_SurjSameCardinalityImpliesInj == + ASSUME NEW S, NEW T, IsFiniteSet(S), Cardinality(S) = Cardinality(T), + NEW f \in Surjection(S,T) + PROVE f \in Injection(S,T) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The image of a finite set is finite. *) +(* *) +(* NB: Note that any function is a surjection on its range by theorem *) +(* Fun_RangeProperties. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Surjection == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), IsFiniteSet(S) + PROVE /\ IsFiniteSet(T) + /\ Cardinality(T) <= Cardinality(S) + /\ Cardinality(T) = Cardinality(S) <=> f \in Injection(S,T) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The cardinality of a finite set S is 0 iff S is empty. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_EmptySet == + /\ IsFiniteSet({}) + /\ Cardinality({}) = 0 + /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 0 <=> S = {}) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If S is finite, so are S \cup {x} and S \ {x}. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_AddElement == + ASSUME NEW S, NEW x, IsFiniteSet(S) + PROVE /\ IsFiniteSet(S \cup {x}) + /\ Cardinality(S \cup {x}) = + IF x \in S THEN Cardinality(S) ELSE Cardinality(S)+1 + + +THEOREM FS_RemoveElement == + ASSUME NEW S, NEW x, IsFiniteSet(S) + PROVE /\ IsFiniteSet(S \ {x}) + /\ Cardinality(S \ {x}) = + IF x \in S THEN Cardinality(S)-1 ELSE Cardinality(S) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* In particular, a singleton set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Singleton == + /\ \A x : IsFiniteSet({x}) /\ Cardinality({x}) = 1 + /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 1 <=> \E x: S = {x}) + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Any subset of a finite set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Subset == + ASSUME NEW S, IsFiniteSet(S), NEW T \in SUBSET S + PROVE /\ IsFiniteSet(T) + /\ Cardinality(T) <= Cardinality(S) + /\ Cardinality(S) = Cardinality(T) => S = T + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* a..b is a finite set for any a,b \in Int. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Interval == + ASSUME NEW a \in Int, NEW b \in Int + PROVE /\ IsFiniteSet(a..b) + /\ Cardinality(a..b) = IF a > b THEN 0 ELSE b-a+1 + + +THEOREM FS_BoundedSetOfNaturals == + ASSUME NEW S \in SUBSET Nat, NEW n \in Nat, + \A s \in S : s <= n + PROVE /\ IsFiniteSet(S) + /\ Cardinality(S) \leq n+1 + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Induction for finite sets. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM FS_Induction == + ASSUME NEW S, IsFiniteSet(S), + NEW P(_), P({}), + ASSUME NEW T, NEW x, IsFiniteSet(T), P(T), x \notin T + PROVE P(T \cup {x}) + PROVE P(S) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The finite subsets form a well-founded ordering with respect to strict *) +(* set inclusion. *) +(* *) +(* `. .' *) +(***************************************************************************) + +FiniteSubsetsOf(S) == { T \in SUBSET S : IsFiniteSet(T) } +StrictSubsetOrdering(S) == { ss \in (SUBSET S) \X (SUBSET S) : + ss[1] \subseteq ss[2] /\ ss[1] # ss[2] } + +LEMMA FS_FiniteSubsetsOfFinite == + ASSUME NEW S, IsFiniteSet(S) + PROVE FiniteSubsetsOf(S) = SUBSET S + + +(*****************************************************************************) +(* The formulation of the following theorem doesn't require S being finite. *) +(* If S is finite, it implies *) +(* IsWellFoundedOn(StrictSubsetOrdering(S), SUBSET S) *) +(* using lemma FS_FiniteSubsetsOfFinite. *) +(*****************************************************************************) +THEOREM FS_StrictSubsetOrderingWellFounded == + ASSUME NEW S + PROVE IsWellFoundedOn(StrictSubsetOrdering(S), FiniteSubsetsOf(S)) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Well-founded induction for finite subsets. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM FS_WFInduction == + ASSUME NEW P(_), NEW S, IsFiniteSet(S), + ASSUME NEW T \in SUBSET S, + \A U \in (SUBSET T) \ {T} : P(U) + PROVE P(T) + PROVE P(S) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The union of two finite sets is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Union == + ASSUME NEW S, IsFiniteSet(S), + NEW T, IsFiniteSet(T) + PROVE /\ IsFiniteSet(S \cup T) + /\ Cardinality(S \cup T) = + Cardinality(S) + Cardinality(T) - Cardinality(S \cap T) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Corollary: two majorities intersect. More precisely, any two subsets *) +(* of a finite set U such that the sum of cardinalities of the subsets *) +(* exceeds that of U must have non-empty intersection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_MajoritiesIntersect == + ASSUME NEW U, NEW S, NEW T, IsFiniteSet(U), + S \subseteq U, T \subseteq U, + Cardinality(S) + Cardinality(T) > Cardinality(U) + PROVE S \cap T # {} + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The intersection of a finite set with an arbitrary set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) + + +THEOREM FS_Intersection == + ASSUME NEW S, IsFiniteSet(S), NEW T + PROVE /\ IsFiniteSet(S \cap T) + /\ IsFiniteSet(T \cap S) + /\ Cardinality(S \cap T) <= Cardinality(S) + /\ Cardinality(T \cap S) <= Cardinality(S) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The difference between a finite set and an arbitrary set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Difference == + ASSUME NEW S, NEW T, IsFiniteSet(S) + PROVE /\ IsFiniteSet(S \ T) + /\ Cardinality(S \ T) = Cardinality(S) - Cardinality(S \cap T) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The union of a finite number of finite sets is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_UNION == + ASSUME NEW S, IsFiniteSet(S), \A T \in S : IsFiniteSet(T) + PROVE IsFiniteSet(UNION S) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The product of two finite sets is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Product == + ASSUME NEW S, IsFiniteSet(S), + NEW T, IsFiniteSet(T) + PROVE /\ IsFiniteSet(S \X T) + /\ Cardinality(S \X T) = Cardinality(S) * Cardinality(T) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The powerset of a finite set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_SUBSET == + ASSUME NEW S, IsFiniteSet(S) + PROVE /\ IsFiniteSet(SUBSET S) + /\ Cardinality(SUBSET S) = 2^Cardinality(S) + + + + + + +============================================================================= +\* Modification History +\* Last modified Fri Feb 14 19:42:05 GMT-03:00 2014 by merz +\* Last modified Thu Jul 04 15:15:07 CEST 2013 by bhargav +\* Last modified Tue Jun 04 11:44:51 CEST 2013 by bhargav +\* Last modified Fri May 03 12:02:51 PDT 2013 by tomr +\* Created Fri Oct 05 15:04:18 PDT 2012 by tomr \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems_proofs.tla b/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems_proofs.tla new file mode 100644 index 0000000000..3da179777f --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems_proofs.tla @@ -0,0 +1,848 @@ +---------------------- MODULE FiniteSetTheorems_proofs ---------------------- +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* Facts about finite sets and their cardinality. *) +(* Originally contributed by Tom Rodeheffer, MSR. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + +EXTENDS + FiniteSets, + Sequences, + FunctionTheorems, + WellFoundedInduction, + TLAPS + +(***************************************************************************) +(* Arithmetic lemma that is currently not proved. *) +(***************************************************************************) +LEMMA TwoExpLemma == + ASSUME NEW n \in Nat + PROVE 2^(n+1) = 2^n + 2^n +PROOF OMITTED + + +(***************************************************************************) +(* `. .' *) +(* *) +(* A set S is finite iff there exists a natural number n such that there *) +(* exist a surjection (or a bijection) from 1..n to S. *) +(* *) +(* `. .' *) +(***************************************************************************) + +LEMMA FS_NatSurjection == + ASSUME NEW S + PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsSurjection(1..n,S) + +<1>1. ASSUME IsFiniteSet(S) PROVE \E n \in Nat : ExistsSurjection(1..n,S) + <2>1. PICK Q \in Seq(S) : \A s \in S : \E i \in 1..Len(Q) : Q[i] = s + BY <1>1 DEF IsFiniteSet + <2>2. /\ Len(Q) \in Nat + /\ Q \in Surjection(1..Len(Q),S) + BY <2>1 DEF Surjection + <2> QED BY <2>2 DEF ExistsSurjection + +<1>2. ASSUME NEW n \in Nat, ExistsSurjection(1..n,S) PROVE IsFiniteSet(S) + BY <1>2 DEF IsFiniteSet, ExistsSurjection, Surjection + +<1> QED BY <1>1, <1>2 + + +LEMMA FS_NatBijection == + ASSUME NEW S + PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsBijection(1..n,S) +BY FS_NatSurjection, Fun_NatSurjEquivNatBij + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists n \in Nat such that a bijection exists from 1..n to S, *) +(* then Cardinality(S) = n. *) +(* *) +(* `. .' *) +(***************************************************************************) + +LEMMA FS_CountingElements == + ASSUME NEW S, NEW n \in Nat, ExistsBijection(1..n,S) + PROVE Cardinality(S) = n +PROOF + <1> DEFINE + (***********************************************************************) + (* Size of set T. *) + (***********************************************************************) + Size(T) == CHOOSE i \in Nat : ExistsBijection(1..i,T) + + (***********************************************************************) + (* Size function for subsets of S. *) + (***********************************************************************) + SZ == [ T \in SUBSET S |-> Size(T) ] + + (***********************************************************************) + (* Formula part of the CS property for element T. *) + (***********************************************************************) + fn(CS,T) == IF T = {} THEN 0 ELSE 1 + CS[T \ {CHOOSE x : x \in T}] + + (***********************************************************************) + (* The CS property. *) + (***********************************************************************) + IsCS(CS) == CS = [T \in SUBSET S |-> fn(CS,T)] + + (***********************************************************************) + (* CS function for subsets of S. Since this is defined as CHOOSE *) + (* something that satisfies the CS property, we do not know that the *) + (* CS function actually satisfies the CS property until we know that *) + (* there exists something that satisfies the CS property. *) + (***********************************************************************) + CS == CHOOSE CS : IsCS(CS) + + <1> HIDE DEF SZ, CS, fn + + + (*************************************************************************) + (* The SZ function satisfies the CS property. *) + (*************************************************************************) + <1>1. IsCS(SZ) + (***********************************************************************) + (* Use induction on the size of T to show that the values match at *) + (* each T \in SUBSET S. *) + (***********************************************************************) + <2> DEFINE + Prop(i) == \A T \in SUBSET S : ExistsBijection(1..i,T) => SZ[T] = fn(SZ,T) + + <2>1. \A i \in Nat : Prop(i) + <3>1. Prop(0) + (*******************************************************************) + (* Base step. *) + (*******************************************************************) + <4>1. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..0,T) + PROVE SZ[T] = fn(SZ,T) + OBVIOUS + <4>2. Size(T) = 0 BY <4>1, Fun_NatBijSame + <4>3. T = {} BY <4>1, Fun_NatBijEmpty + <4>4. SZ[T] = 0 BY <4>2 DEF SZ + <4>5. fn(SZ,T) = 0 BY <4>3 DEF fn + <4> QED BY <4>4, <4>5 + + <3>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) + (*******************************************************************) + (* Inductive step. *) + (*******************************************************************) + <4>1. PICK j \in Nat : j = i+1 BY Isa + <4>2. j # 0 BY <4>1, SMT + <4>3. i = j-1 BY <4>1, SMT + <4>4. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..j,T) + PROVE SZ[T] = fn(SZ,T) + BY <4>1 + <4>5. ~ExistsBijection(1..0,T) BY <4>2, <4>4, Fun_NatBijSame + <4>6. T # {} BY <4>5, Fun_NatBijEmpty + <4>7. Size(T) = j BY <4>4, Fun_NatBijSame + <4>8. PICK t \in T : t = CHOOSE x : x \in T BY <4>6 + <4>9. PICK U \in SUBSET S : U = T \ {t} OBVIOUS + <4>10. ExistsBijection(1..i,U) BY <4>3, <4>4, <4>9, Fun_NatBijSubElem + <4>11. SZ[U] = fn(SZ,U) BY <4>10, <3>2 + <4>12. SZ[U] = i BY <4>10, Fun_NatBijSame DEF SZ + <4>13. fn(SZ,T) = 1 + SZ[U] BY <4>6, <4>8, <4>9 DEF fn + <4>14. fn(SZ,T) = j BY <4>1, <4>12, <4>13, SMT + <4>15. SZ[T] = j BY <4>7 DEF SZ + <4> QED BY <4>14, <4>15 + + <3> HIDE DEF Prop + <3> QED BY Isa, <3>1, <3>2, NatInduction + + <2> SUFFICES ASSUME NEW T \in SUBSET S PROVE SZ[T] = fn(SZ,T) BY DEF SZ + <2>2. PICK i \in Nat : ExistsBijection(1..i,T) BY Fun_NatBijSubset + <2> QED BY <2>1, <2>2 + + + (*************************************************************************) + (* Any two things that satisfy the CS property must be equal. *) + (*************************************************************************) + <1>2. ASSUME + NEW CS1, IsCS(CS1), + NEW CS2, IsCS(CS2) + PROVE CS1 = CS2 + (***********************************************************************) + (* Use induction on the size of T to show that the values match at *) + (* each T \in SUBSET S. *) + (***********************************************************************) + <2> DEFINE + Prop(i) == \A T \in SUBSET S : ExistsBijection(1..i,T) => CS1[T] = CS2[T] + + <2>1. \A i \in Nat : Prop(i) + <3>1. Prop(0) + (*******************************************************************) + (* Base step. *) + (*******************************************************************) + <4>1. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..0,T) + PROVE CS1[T] = CS2[T] + OBVIOUS + <4>2. T = {} BY <4>1, Fun_NatBijEmpty + <4>3. fn(CS1,T) = 0 BY <4>2 DEF fn + <4>4. fn(CS2,T) = 0 BY <4>2 DEF fn + <4> QED BY <4>3, <4>4, <1>2 + + <3>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) + (*******************************************************************) + (* Inductive step. *) + (*******************************************************************) + <4>1. PICK j \in Nat : j = i+1 BY Isa + <4>2. j # 0 BY <4>1, SMT + <4>3. i = j-1 BY <4>1, SMT + <4>4. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..j,T) + PROVE CS1[T] = CS2[T] + BY <4>1 + <4>5. ~ExistsBijection(1..0,T) BY <4>2, <4>4, Fun_NatBijSame + <4>6. T # {} BY <4>5, Fun_NatBijEmpty + <4>7. PICK t \in T : t = CHOOSE x : x \in T BY <4>6 + <4>8. PICK U \in SUBSET S : U = T \ {t} OBVIOUS + <4>9. ExistsBijection(1..i,U) BY <4>3, <4>4, <4>8, Fun_NatBijSubElem + <4>10. CS1[U] = CS2[U] BY <4>9, <3>2 + <4>11. CS1[T] = 1 + CS1[U] BY <4>6, <4>7, <4>8, <1>2 DEF fn + <4>12. CS2[T] = 1 + CS2[U] BY <4>6, <4>7, <4>8, <1>2 DEF fn + <4> QED BY <4>10, <4>11, <4>12 + + <3> HIDE DEF Prop + <3> QED BY Isa, <3>1, <3>2, NatInduction + + <2> SUFFICES ASSUME NEW T \in SUBSET S PROVE CS1[T] = CS2[T] BY <1>2 + <2>2. PICK i \in Nat : ExistsBijection(1..i,T) BY Fun_NatBijSubset + <2> QED BY <2>1, <2>2 + + + (*************************************************************************) + (* Since SZ satisfies the CS property, the CS function must satisfy the *) + (* CS property. And it must be the same as SZ. *) + (*************************************************************************) + <1>3. IsCS(CS) BY <1>1 DEF CS + <1>4. CS = SZ BY <1>1, <1>2, <1>3 + + + <1>5. Cardinality(S) = CS[S] BY DEF Cardinality, CS, fn + <1>6. S \in SUBSET S OBVIOUS + <1>7. SZ[S] = Size(S) BY <1>6 DEF SZ + <1>8. Size(S) = n BY Fun_NatBijSame + <1> QED BY <1>4, <1>5, <1>7, <1>8 + + +(***************************************************************************) +(* Corollary: a surjection from 1..n to S provides a cardinality bound. *) +(***************************************************************************) +THEOREM FS_SurjCardinalityBound == + ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n, S) + PROVE Cardinality(S) <= n +BY Fun_NatSurjImpliesNatBij, FS_CountingElements + + +(***************************************************************************) +(* `. .' *) +(* *) +(* For any finite set S, Cardinality(S) \in Nat. Moreover, there is a *) +(* bijection from 1 .. Cardinality(S) to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_CardinalityType == + ASSUME NEW S, IsFiniteSet(S) + PROVE /\ Cardinality(S) \in Nat + /\ ExistsBijection(1..Cardinality(S), S) +BY FS_NatBijection, FS_CountingElements + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The image of a finite set under a bijection or surjection is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Bijection == + ASSUME NEW S, NEW T, IsFiniteSet(S), ExistsBijection(S,T) + PROVE /\ IsFiniteSet(T) + /\ Cardinality(T) = Cardinality(S) +BY FS_CardinalityType, Fun_ExistsBijTransitive, FS_CountingElements, + FS_NatBijection + + +THEOREM FS_SameCardinalityBij == + ASSUME NEW S, NEW T, IsFiniteSet(S), IsFiniteSet(T), + Cardinality(S) = Cardinality(T) + PROVE ExistsBijection(S,T) +BY FS_CardinalityType, Fun_ExistsBijSymmetric, Fun_ExistsBijTransitive + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Any surjection between two finite sets of equal cardinality is *) +(* an injection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_SurjSameCardinalityImpliesInj == + ASSUME NEW S, NEW T, IsFiniteSet(S), Cardinality(S) = Cardinality(T), + NEW f \in Surjection(S,T) + PROVE f \in Injection(S,T) + +<1>1. SUFFICES ASSUME NEW a \in S, NEW b \in S, a # b, f[a] = f[b] + PROVE FALSE + BY DEF Injection, Surjection +<1>. DEFINE n == Cardinality(S) +<1>. n \in Nat BY FS_CardinalityType +<1>. PICK g \in Bijection(1..n, S) : TRUE + BY FS_CardinalityType DEF ExistsBijection +<1>2. PICK i,j \in 1 .. n : + /\ i < j + /\ \/ g[i] = a /\ g[j] = b + \/ g[i] = b /\ g[j] = a + <2>1. PICK i,j \in 1 .. n : i # j /\ g[i] = a /\ g[j] = b + BY <1>1 DEF Bijection, Surjection + <2>2. CASE i < j BY <2>1, <2>2 + <2>3. CASE i > j BY <2>1, <2>3 + <2>. QED BY <2>1, <2>2, <2>3 +<1>. n-1 \in Nat BY <1>2 +<1>. DEFINE h == [ k \in 1 .. n-1 |-> IF k=j THEN f[g[n]] ELSE f[g[k]] ] +<1>3. h \in Surjection(1..n-1, T) + <2>1. h \in [1..n-1 -> T] BY DEF Bijection, Surjection + <2>2. ASSUME NEW t \in T PROVE \E k \in 1..n-1 : h[k] = t + <3>1. PICK s \in S : f[s] = t BY DEF Surjection + <3>2. PICK l \in 1..n : g[l] = s BY DEF Bijection, Surjection + <3>. QED BY <1>1, <1>2, <3>1, <3>2 + <2>. QED BY <2>1, <2>2 DEF Surjection +<1>4. Cardinality(T) <= n-1 BY <1>3, FS_SurjCardinalityBound DEF ExistsSurjection +<1>. QED BY <1>4 + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The image of a finite set is finite. *) +(* NB: Note that any function is a surjection on its range by theorem *) +(* Fun_RangeProperties. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Surjection == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), IsFiniteSet(S) + PROVE /\ IsFiniteSet(T) + /\ Cardinality(T) <= Cardinality(S) + /\ Cardinality(T) = Cardinality(S) <=> f \in Injection(S,T) + +<1>1. /\ Cardinality(S) \in Nat + /\ ExistsBijection(1 .. Cardinality(S), S) + BY FS_CardinalityType +<1>2. ExistsSurjection(1 .. Cardinality(S), T) + BY <1>1, Fun_ExistsBijEquiv, Fun_ExistsSurjTransitive DEF ExistsSurjection +<1>4. IsFiniteSet(T) /\ Cardinality(T) <= Cardinality(S) + BY <1>1, <1>2, FS_NatSurjection, FS_SurjCardinalityBound +<1>5. ASSUME Cardinality(T) = Cardinality(S) PROVE f \in Injection(S,T) + BY <1>5, FS_SurjSameCardinalityImpliesInj +<1>6. ASSUME f \in Injection(S,T) PROVE Cardinality(T) = Cardinality(S) + <2>1. ExistsBijection(S, T) BY <1>6 DEF Bijection, ExistsBijection + <2>2. ExistsBijection(1..Cardinality(S), T) + BY <1>1, <2>1, Fun_ExistsBijTransitive + <2>. QED BY <1>1, <2>2, FS_CountingElements +<1>. QED BY <1>4, <1>5, <1>6 + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The cardinality of a finite set S is 0 iff S is empty. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_EmptySet == + /\ IsFiniteSet({}) + /\ Cardinality({}) = 0 + /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 0 <=> S = {}) + +<1>1. IsFiniteSet({}) /\ Cardinality({}) = 0 + BY Fun_NatBijEmpty, FS_NatBijection, FS_CountingElements, Zenon +<1>2. ASSUME NEW S, IsFiniteSet(S), Cardinality(S) = 0 + PROVE S = {} + BY <1>2, FS_CardinalityType, Fun_NatBijEmpty +<1>. QED BY <1>1, <1>2 + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If S is finite, so are S \cup {x} and S \ {x}. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_AddElement == + ASSUME NEW S, NEW x, IsFiniteSet(S) + PROVE /\ IsFiniteSet(S \cup {x}) + /\ Cardinality(S \cup {x}) = + IF x \in S THEN Cardinality(S) ELSE Cardinality(S)+1 +<1>1. CASE x \notin S + BY <1>1, FS_CardinalityType, Fun_NatBijAddElem, FS_NatBijection, + FS_CountingElements +<1>. QED BY <1>1 \* the case "x \in S" is trivial + + +THEOREM FS_RemoveElement == + ASSUME NEW S, NEW x, IsFiniteSet(S) + PROVE /\ IsFiniteSet(S \ {x}) + /\ Cardinality(S \ {x}) = + IF x \in S THEN Cardinality(S)-1 ELSE Cardinality(S) +<1>1. CASE x \in S + BY <1>1, FS_CardinalityType, Fun_NatBijSubElem, FS_NatBijection, + FS_CountingElements, FS_EmptySet +<1>. QED BY <1>1 \* the case "x \notin S" is trivial + + +(***************************************************************************) +(* `. .' *) +(* *) +(* In particular, a singleton set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Singleton == + /\ \A x : IsFiniteSet({x}) /\ Cardinality({x}) = 1 + /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 1 <=> \E x: S = {x}) + +<1>1. \A x : IsFiniteSet({x}) /\ Cardinality({x}) = 1 + BY FS_EmptySet, FS_AddElement +<1>2. ASSUME NEW S, IsFiniteSet(S), Cardinality(S) = 1 + PROVE \E x : S = {x} + BY <1>2, FS_CardinalityType, Fun_NatBijSingleton +<1>. QED BY <1>1, <1>2 + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Any subset of a finite set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Subset == + ASSUME NEW S, IsFiniteSet(S), NEW T \in SUBSET S + PROVE /\ IsFiniteSet(T) + /\ Cardinality(T) <= Cardinality(S) + /\ Cardinality(S) = Cardinality(T) => S = T +\* NB: Changing the last implication to an equivalence breaks proofs + +<1>1. /\ Cardinality(S) \in Nat + /\ ExistsBijection(1 .. Cardinality(S), S) + BY FS_CardinalityType +<1>2. PICK n \in Nat : ExistsBijection(1..n, T) /\ n <= Cardinality(S) + BY <1>1, Fun_NatBijSubset +<1>3. ASSUME Cardinality(S) = Cardinality(T), S # T + PROVE FALSE + <2>1. PICK x \in S \ T : TRUE BY <1>3 + <2>2. /\ IsFiniteSet(S \ {x}) + /\ Cardinality(S \ {x}) = Cardinality(S) - 1 + BY <2>1, FS_RemoveElement + <2>3. T \subseteq S \ {x} BY <2>1 + <2>4. PICK m \in Nat : ExistsBijection(1..m, T) /\ m <= Cardinality(S)-1 + BY <2>2, <2>3, FS_CardinalityType, Fun_NatBijSubset + <2>. QED BY <2>4, <1>3, FS_CountingElements +<1>. QED BY <1>2, <1>3, FS_NatBijection, FS_CountingElements + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* a..b is a finite set for any a,b \in Int. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Interval == + ASSUME NEW a \in Int, NEW b \in Int + PROVE /\ IsFiniteSet(a..b) + /\ Cardinality(a..b) = IF a > b THEN 0 ELSE b-a+1 + +<1>1. CASE a <= b + BY <1>1, Fun_ExistsBijInterval, FS_NatBijection, FS_CountingElements +<1>2. CASE a > b + <2>1. a..b = {} BY <1>2 + <2>. QED BY <2>1, <1>2, FS_EmptySet, Zenon +<1>. QED BY <1>1, <1>2 + + +THEOREM FS_BoundedSetOfNaturals == + ASSUME NEW S \in SUBSET Nat, NEW n \in Nat, + \A s \in S : s <= n + PROVE /\ IsFiniteSet(S) + /\ Cardinality(S) \leq n+1 +<1>1. S \subseteq 0 .. n OBVIOUS +<1>2. IsFiniteSet(0..n) /\ Cardinality(0..n) = n+1 BY FS_Interval +<1>. QED BY <1>1, <1>2, FS_Subset, Zenon + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Induction for finite sets. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM FS_Induction == + ASSUME NEW S, IsFiniteSet(S), + NEW P(_), P({}), + ASSUME NEW T, NEW x, IsFiniteSet(T), P(T), x \notin T + PROVE P(T \cup {x}) + PROVE P(S) +<1>. DEFINE Q(n) == \A T : IsFiniteSet(T) /\ Cardinality(T) = n => P(T) +<1>1. SUFFICES \A n \in Nat : Q(n) BY FS_CardinalityType +<1>2. Q(0) BY FS_EmptySet, Zenon +<1>3. ASSUME NEW n \in Nat, Q(n), + NEW T, IsFiniteSet(T), Cardinality(T) = n+1 + PROVE P(T) + <2>1. PICK x \in T : TRUE BY <1>3, FS_EmptySet + <2>2. /\ IsFiniteSet(T \ {x}) + /\ Cardinality(T \ {x}) = n + BY <1>3, FS_RemoveElement, Isa + <2>3. P(T \ {x}) BY <2>2, Q(n) + <2>4. P((T \ {x}) \cup {x}) BY <2>2, <2>3 + <2>. QED BY <2>4 +<1>4. QED BY <1>2, <1>3, NatInduction, Isa + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The finite subsets form a well-founded ordering with respect to strict *) +(* set inclusion. *) +(* *) +(* `. .' *) +(***************************************************************************) + +FiniteSubsetsOf(S) == { T \in SUBSET S : IsFiniteSet(T) } +StrictSubsetOrdering(S) == { ss \in (SUBSET S) \X (SUBSET S) : + ss[1] \subseteq ss[2] /\ ss[1] # ss[2] } + +LEMMA FS_FiniteSubsetsOfFinite == + ASSUME NEW S, IsFiniteSet(S) + PROVE FiniteSubsetsOf(S) = SUBSET S +BY FS_Subset DEF FiniteSubsetsOf + + +(*****************************************************************************) +(* The formulation of the following theorem doesn't require S being finite. *) +(* If S is finite, it implies *) +(* IsWellFoundedOn(StrictSubsetOrdering(S), SUBSET S) *) +(* using lemma FS_FiniteSubsetsOfFinite. *) +(*****************************************************************************) +THEOREM FS_StrictSubsetOrderingWellFounded == + ASSUME NEW S + PROVE IsWellFoundedOn(StrictSubsetOrdering(S), FiniteSubsetsOf(S)) + +<1>1. \A T \in FiniteSubsetsOf(S) : Cardinality(T) \in Nat + BY FS_CardinalityType, FS_Subset DEF FiniteSubsetsOf +<1>2. IsWellFoundedOn(PreImage(Cardinality, FiniteSubsetsOf(S), OpToRel(<,Nat)), + FiniteSubsetsOf(S)) + BY <1>1, PreImageWellFounded, NatLessThanWellFounded, Isa +<1>3. StrictSubsetOrdering(S) \cap (FiniteSubsetsOf(S) \X FiniteSubsetsOf(S)) + \subseteq PreImage(Cardinality, FiniteSubsetsOf(S), OpToRel(<, Nat)) + BY FS_Subset, <1>1 + DEF StrictSubsetOrdering, FiniteSubsetsOf, PreImage, OpToRel +<1>. QED BY <1>2, <1>3, IsWellFoundedOnSubrelation + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Well-founded induction for finite subsets. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM FS_WFInduction == + ASSUME NEW P(_), NEW S, IsFiniteSet(S), + ASSUME NEW T \in SUBSET S, + \A U \in (SUBSET T) \ {T} : P(U) + PROVE P(T) + PROVE P(S) +<1>. DEFINE SubS == SUBSET S +<1>1. IsWellFoundedOn(StrictSubsetOrdering(S), SubS) + BY FS_FiniteSubsetsOfFinite, FS_StrictSubsetOrderingWellFounded, Zenon +<1>2. \A T \in SubS : + (\A U \in SetLessThan(T, StrictSubsetOrdering(S), SubS) : P(U)) + => P(T) + BY DEF SetLessThan, StrictSubsetOrdering +<1>. HIDE DEF SubS +<1>3. \A T \in SubS : P(T) BY ONLY <1>1, <1>2, WFInduction, IsaM("blast") +<1>. QED BY <1>3 DEF SubS + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The union of two finite sets is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Union == + ASSUME NEW S, IsFiniteSet(S), + NEW T, IsFiniteSet(T) + PROVE /\ IsFiniteSet(S \cup T) + /\ Cardinality(S \cup T) = + Cardinality(S) + Cardinality(T) - Cardinality(S \cap T) + +<1>. DEFINE P(A) == /\ IsFiniteSet(S \cup A) + /\ Cardinality(S \cup A) = + Cardinality(S) + Cardinality(A) - Cardinality(S \cap A) +<1>1. P({}) BY FS_EmptySet, FS_CardinalityType +<1>2. ASSUME NEW A, NEW x, IsFiniteSet(A), P(A), x \notin A + PROVE P(A \cup {x}) + <2>1. IsFiniteSet(S \cup (A \cup {x})) BY P(A), FS_AddElement, Isa + <2>. /\ IsFiniteSet(S \cup A) + /\ IsFiniteSet(S \cap A) + /\ Cardinality(S) \in Nat + /\ Cardinality(A) \in Nat + /\ Cardinality(S \cap A) \in Nat + BY P(A), FS_Subset, FS_CardinalityType + <2>2. Cardinality(A \cup {x}) = Cardinality(A) + 1 + BY <1>2, FS_AddElement + <2>3. CASE x \in S + <3>1. Cardinality(S \cup (A \cup {x})) = Cardinality(S \cup A) BY <2>3, Zenon + <3>2. Cardinality(S \cap (A \cup {x})) = Cardinality((S \cap A) \cup {x}) BY <2>3, Zenon + <3>3. Cardinality(S \cap (A \cup {x})) = Cardinality(S \cap A) + 1 + BY <3>2, <1>2, FS_AddElement + <3>. QED BY <3>1, <3>3, <2>2, <2>1, P(A) + <2>4. CASE x \notin S + <3>1. Cardinality((S \cup A) \cup {x}) = Cardinality(S \cup A) + 1 + BY <1>2, <2>4, FS_AddElement + <3>1a. Cardinality(S \cup (A \cup {x})) = Cardinality(S \cup A) + 1 BY <3>1, Zenon + <3>2. Cardinality(S \cap (A \cup {x})) = Cardinality(S \cap A) BY <2>4, Zenon + <3>. QED BY <3>1a, <3>2, <2>2, <2>1, P(A) + <2>. QED BY <2>3, <2>4 +<1>. HIDE DEF P +<1>. P(T) BY <1>1, <1>2, FS_Induction, IsaM("blast") +<1>. QED BY DEF P + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Corollary: two majorities intersect. More precisely, any two subsets *) +(* of a finite set U such that the sum of cardinalities of the subsets *) +(* exceeds that of U must have non-empty intersection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_MajoritiesIntersect == + ASSUME NEW U, NEW S, NEW T, IsFiniteSet(U), + S \subseteq U, T \subseteq U, + Cardinality(S) + Cardinality(T) > Cardinality(U) + PROVE S \cap T # {} + +<1>. /\ IsFiniteSet(S) + /\ IsFiniteSet(T) + /\ Cardinality(S) \in Nat + /\ Cardinality(T) \in Nat + /\ Cardinality(U) \in Nat + /\ Cardinality(S \cap T) \in Nat + /\ Cardinality(S \cup T) <= Cardinality(U) + BY FS_Subset, FS_CardinalityType +<1>1. Cardinality(S \cup T) = + Cardinality(S) + Cardinality(T) - Cardinality(S \cap T) + BY FS_Union, Zenon +<1>2. Cardinality(S \cap T) # 0 BY <1>1 +<1>3. QED BY <1>2, FS_EmptySet, Zenon + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The intersection of a finite set with an arbitrary set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) + + +THEOREM FS_Intersection == + ASSUME NEW S, IsFiniteSet(S), NEW T + PROVE /\ IsFiniteSet(S \cap T) + /\ IsFiniteSet(T \cap S) + /\ Cardinality(S \cap T) <= Cardinality(S) + /\ Cardinality(T \cap S) <= Cardinality(S) +BY FS_Subset + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The difference between a finite set and an arbitrary set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Difference == + ASSUME NEW S, NEW T, IsFiniteSet(S) + PROVE /\ IsFiniteSet(S \ T) + /\ Cardinality(S \ T) = Cardinality(S) - Cardinality(S \cap T) + +<1>. /\ IsFiniteSet(S \ T) + /\ IsFiniteSet(S \cap T) + /\ Cardinality(S \ T) \in Nat + /\ Cardinality(S \cap T) \in Nat + BY FS_Subset, FS_CardinalityType +<1>2. Cardinality(S \ T) = Cardinality(S) - Cardinality(S \cap T) + <2>1. Cardinality(S) = Cardinality((S \cap T) \cup (S \ T)) BY Zenon + <2>2. Cardinality((S \cap T) \cup (S \ T)) = + Cardinality(S \cap T) + Cardinality(S \ T) - Cardinality((S \cap T) \cap (S \ T)) + BY FS_Union, Zenon + <2>3. Cardinality((S \cap T) \cap (S \ T)) = 0 BY FS_EmptySet, Zenon + <2>. QED BY <2>1, <2>2, <2>3 +<1>3. QED BY <1>2 + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The union of a finite number of finite sets is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_UNION == + ASSUME NEW S, IsFiniteSet(S), \A T \in S : IsFiniteSet(T) + PROVE IsFiniteSet(UNION S) + +<1>. DEFINE P(U) == (\A T \in U : IsFiniteSet(T)) => IsFiniteSet(UNION U) +<1>1. P({}) BY FS_EmptySet +<1>2. ASSUME NEW U, NEW x, P(U), x \notin U + PROVE P(U \cup {x}) + BY <1>2, FS_Union, Isa +<1>. HIDE DEF P +<1>. P(S) BY <1>1, <1>2, FS_Induction, IsaM("blast") +<1>. QED BY DEF P + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The product of two finite sets is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_Product == + ASSUME NEW S, IsFiniteSet(S), + NEW T, IsFiniteSet(T) + PROVE /\ IsFiniteSet(S \X T) + /\ Cardinality(S \X T) = Cardinality(S) * Cardinality(T) + +<1>. DEFINE P(A) == /\ IsFiniteSet(S \X A) + /\ Cardinality(S \X A) = Cardinality(S) * Cardinality(A) +<1>1. P({}) + <2>1. /\ S \X {} = {} + /\ IsFiniteSet(S \X {}) + /\ Cardinality(S \X {}) = 0 + /\ Cardinality({}) = 0 + /\ Cardinality(S) \in Nat + BY FS_EmptySet, FS_CardinalityType, Zenon + <2>. QED BY <2>1 +<1>2. ASSUME NEW A, NEW x, IsFiniteSet(A), P(A), x \notin A + PROVE P(A \cup {x}) + <2>. /\ Cardinality(A) \in Nat + /\ Cardinality(S) \in Nat + BY <1>2, FS_CardinalityType + <2>. DEFINE SX == { <> : s \in S } + <2>1. /\ IsFiniteSet(A \cup {x}) + /\ Cardinality(A \cup {x}) = Cardinality(A) + 1 + BY <1>2, FS_AddElement + <2>2. S \X (A \cup {x}) = (S \X A) \cup SX + BY <1>2, Isa + <2>3. ExistsBijection(S, SX) + <3>. DEFINE f == [s \in S |-> <>] + <3>. f \in Bijection(S, SX) BY DEF Bijection, Injection, Surjection + <3>. QED BY DEF ExistsBijection + <2>4. /\ IsFiniteSet(SX) + /\ Cardinality(SX) = Cardinality(S) + BY <2>3, FS_Bijection + <2>5. /\ IsFiniteSet(S \X (A \cup {x})) + /\ Cardinality(S \X (A \cup {x})) = + Cardinality(S \X A) + Cardinality(SX) - Cardinality((S \X A) \cap SX) + BY <2>2, <2>4, P(A), FS_Union, Isa + <2>6. (S \X A) \cap SX = {} BY <1>2 + <2>7. Cardinality((S \X A) \cap SX) = 0 BY <2>6, FS_EmptySet, Zenon + <2>. QED BY <2>1, <2>5, <2>4, <2>7, P(A) +<1>. HIDE DEF P +<1>. P(T) BY <1>1, <1>2, FS_Induction, IsaM("blast") +<1>. QED BY DEF P + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The powerset of a finite set is finite. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM FS_SUBSET == + ASSUME NEW S, IsFiniteSet(S) + PROVE /\ IsFiniteSet(SUBSET S) + /\ Cardinality(SUBSET S) = 2^Cardinality(S) + +<1>. DEFINE P(A) == /\ IsFiniteSet(SUBSET A) + /\ Cardinality(SUBSET A) = 2^Cardinality(A) +<1>1. P({}) + <2>1. /\ IsFiniteSet({{}}) + /\ Cardinality({{}}) = 1 + BY FS_Singleton, Zenon + <2>2. 1 = 2^0 OBVIOUS + <2>. QED BY <2>1, <2>2, FS_EmptySet, Zenon +<1>2. ASSUME NEW A, NEW x, IsFiniteSet(A), x \notin A, P(A) + PROVE P(A \cup {x}) + <2>. DEFINE Ax == {B \cup {x} : B \in SUBSET A} + <2>1. Cardinality(A \cup {x}) = Cardinality(A) + 1 BY <1>2, FS_AddElement + <2>2. 2^Cardinality(A \cup {x}) = 2^Cardinality(A) + 2^Cardinality(A) + BY <2>1, <1>2, FS_CardinalityType, TwoExpLemma, Zenon + <2>3. SUBSET (A \cup {x}) = (SUBSET A) \cup Ax BY <1>2, Isa + <2>4. ExistsBijection(SUBSET A, Ax) + <3>. DEFINE f == [B \in SUBSET A |-> B \cup {x}] + <3>1. ASSUME NEW B \in SUBSET A, NEW C \in SUBSET A, f[B] = f[C] + PROVE B = C + BY <3>1, <1>2, Zenon + <3>2. f \in Surjection(SUBSET A, Ax) BY DEF Surjection + <3>3. f \in Bijection(SUBSET A, Ax) + BY <3>1, <3>2 DEF Bijection, Injection + <3>. QED BY <3>3 DEF ExistsBijection + <2>5. /\ IsFiniteSet(Ax) + /\ Cardinality(Ax) = Cardinality(SUBSET A) + BY <2>4, P(A), FS_Bijection + <2>6. /\ IsFiniteSet(SUBSET (A \cup {x})) + /\ Cardinality(SUBSET (A \cup {x})) = + Cardinality(SUBSET A) + Cardinality(Ax) - Cardinality((SUBSET A) \cap Ax) + BY <2>3, <2>5, P(A), FS_Union, Isa + <2>7. (SUBSET A) \cap Ax = {} BY <1>2 + <2>8. Cardinality((SUBSET A) \cap Ax) = 0 BY <2>7, FS_EmptySet, Zenon + <2>. QED BY <2>2, <2>5, <2>6, <2>8, P(A), FS_CardinalityType +<1>. HIDE DEF P +<1>. P(S) BY <1>1, <1>2, FS_Induction, IsaM("blast") +<1>. QED BY DEF P + + + + + + +============================================================================= +\* Modification History +\* Last modified Fri Feb 14 21:24:26 GMT-03:00 2014 by merz +\* Last modified Thu Jul 04 15:15:07 CEST 2013 by bhargav +\* Last modified Tue Jun 04 11:44:51 CEST 2013 by bhargav +\* Last modified Fri May 03 12:02:51 PDT 2013 by tomr +\* Created Fri Oct 05 15:04:18 PDT 2012 by tomr \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/library/FiniteSets.tla b/x/ccv/provider/keeper/prototyping/model/library/FiniteSets.tla new file mode 100644 index 0000000000..57ac402350 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/FiniteSets.tla @@ -0,0 +1,23 @@ +---------------------------- MODULE FiniteSets ----------------------------- +LOCAL INSTANCE Naturals +LOCAL INSTANCE Sequences + (*************************************************************************) + (* Imports the definitions from Naturals and Sequences, but doesn't *) + (* export them. *) + (*************************************************************************) + +IsFiniteSet(S) == + (*************************************************************************) + (* A set S is finite iff there is a finite sequence containing all its *) + (* elements. *) + (*************************************************************************) + \E seq \in Seq(S) : \A s \in S : \E n \in 1..Len(seq) : seq[n] = s + +Cardinality(S) == + (*************************************************************************) + (* Cardinality is defined only for finite sets. *) + (*************************************************************************) + LET CS[T \in SUBSET S] == IF T = {} THEN 0 + ELSE 1 + CS[T \ {CHOOSE x : x \in T}] + IN CS[S] +============================================================================= diff --git a/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems.tla b/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems.tla new file mode 100644 index 0000000000..644b6414f1 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems.tla @@ -0,0 +1,575 @@ +------------------------- MODULE FunctionTheorems --------------------------- +(***************************************************************************) +(* `^{\large\vspace{12pt} *) +(* Facts about functions. *) +(* Originally contributed by Tom Rodeheffer, MSR. *) +(* For the proofs of these theorems, see module FunctionTheorems\_proofs. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + +EXTENDS + Functions, + Integers + +(***************************************************************************) +(* `. .' *) +(* *) +(* Function restriction. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_RestrictProperties == + ASSUME NEW S, NEW T, NEW f \in [S -> T], NEW A \in SUBSET S + PROVE /\ Restrict(f,A) \in [A -> T] + /\ \A x \in A : Restrict(f,A)[x] = f[x] + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Range of a function. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_RangeProperties == + ASSUME NEW S, NEW T, NEW f \in [S -> T] + PROVE /\ Range(f) \subseteq T + /\ \A y \in Range(f) : \E x \in S : f[x] = y + /\ f \in Surjection(S, Range(f)) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Range of a function. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InverseProperties == + ASSUME NEW S, NEW T, NEW f \in [S -> T] + PROVE /\ (S = {} => T = {}) => Inverse(f,S,T) \in [T -> S] + /\ \A y \in Range(f) : f[Inverse(f,S,T)[y]] = y + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Introduction rules for injections, surjections, bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_IsInj == + ASSUME NEW S, NEW T, NEW F \in [S -> T], + \A a,b \in S : F[a] = F[b] => a = b + PROVE F \in Injection(S,T) + + +THEOREM Fun_IsSurj == + ASSUME NEW S, NEW T, NEW F \in [S -> T], + \A t \in T : \E s \in S : F[s] = t + PROVE F \in Surjection(S,T) + + +THEOREM Fun_IsBij == + ASSUME NEW S, NEW T, NEW F, + \/ F \in Injection(S,T) + \/ (F \in [S -> T] /\ \A a,b \in S : F[a] = F[b] => a = b), + + \/ F \in Surjection(S,T) + \/ (F \in [S -> T] /\ \A t \in T : \E s \in S : F[s] = t) + PROVE F \in Bijection(S,T) + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of injections, surjections, and bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjectionProperties == + ASSUME NEW S, NEW T, NEW F \in Injection(S,T) + PROVE /\ F \in [S -> T] + /\ \A a,b \in S : F[a] = F[b] => a = b + + +THEOREM Fun_SurjectionProperties == + ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) + PROVE /\ F \in [S -> T] + /\ \A t \in T : \E s \in S : F[s] = t + /\ Range(F) = T + + +THEOREM Fun_BijectionProperties == + ASSUME NEW S, NEW T, NEW F \in Bijection(S,T) + PROVE /\ F \in [S -> T] + /\ F \in Injection(S,T) + /\ F \in Surjection(S,T) + /\ \A a,b \in S : F[a] = F[b] => a = b + /\ \A t \in T : \E s \in S : F[s] = t + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* A surjection in [S -> T] such that there is no surjection from any *) +(* subset of S to T is a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_SmallestSurjectionIsBijection == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), + \A U \in SUBSET S : U # S => Surjection(U,T) = {} + PROVE f \in Bijection(S,T) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Transitivity of injections, surjections, bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Injection(S,T), + NEW G \in Injection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Injection(S,U) + + +THEOREM Fun_SurjTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Surjection(S,T), + NEW G \in Surjection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Surjection(S,U) + + +THEOREM Fun_BijTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Bijection(S,T), + NEW G \in Bijection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Bijection(S,U) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The inverse of a surjection is an injection and vice versa. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_SurjInverse == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T) + PROVE Inverse(f,S,T) \in Injection(T,S) + + +THEOREM Fun_InjInverse == + ASSUME NEW S, NEW T, NEW f \in Injection(S,T), S = {} => T = {} + PROVE Inverse(f,S,T) \in Surjection(T,S) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of the inverse of a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_BijInverse == + ASSUME NEW S, NEW T, NEW f \in Bijection(S,T) + PROVE /\ Inverse(f,S,T) \in Bijection(T,S) + /\ \A s \in S : Inverse(f,S,T)[f[s]] = s + /\ Inverse(Inverse(f,S,T), T,S) = f + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The restriction of a bijection is a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_BijRestrict == + ASSUME NEW S, NEW T, NEW F \in Bijection(S,T), + NEW R \in SUBSET S + PROVE Restrict(F, R) \in Bijection(R, Range(Restrict(F, R))) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Given F an injection from S to T, then F is a bijection from S to F(S). *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjMeansBijImage == + ASSUME NEW S, NEW T, NEW F \in Injection(S,T) + PROVE F \in Bijection(S, Range(F)) + + + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* Facts about exists jections. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Definitions restated as facts. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsInj == + \A S,T : ExistsInjection(S,T) <=> Injection(S,T) # {} + + +THEOREM Fun_ExistsSurj == + \A S,T : ExistsSurjection(S,T) <=> Surjection(S,T) # {} + + +THEOREM Fun_ExistsBij == + \A S,T : ExistsBijection(S,T) <=> Bijection(S,T) # {} + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is a surjection from any set S to any non-empty subset T of S. *) +(* (Note that there cannot be a surjection to {} except if S is empty.) *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsSurjSubset == + ASSUME NEW S, NEW T \in SUBSET S, T # {} + PROVE ExistsSurjection(S,T) + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there is a surjection from S to T, then there is an injection from T *) +(* to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsSurjMeansExistsRevInj == + ASSUME NEW S, NEW T, ExistsSurjection(S,T) + PROVE ExistsInjection(T,S) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* ExistsBijection is reflexive, symmetric, and transitive. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijReflexive == + ASSUME NEW S + PROVE ExistsBijection(S,S) + + +THEOREM Fun_ExistsBijSymmetric == + ASSUME NEW S, NEW T, ExistsBijection(S,T) + PROVE ExistsBijection(T,S) + + +THEOREM Fun_ExistsBijTransitive == + ASSUME NEW S, NEW T, NEW U, ExistsBijection(S,T), ExistsBijection(T,U) + PROVE ExistsBijection(S,U) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Existence of injections and surjections is reflexive and transitive. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsInjReflexive == + ASSUME NEW S + PROVE ExistsInjection(S,S) + + +THEOREM Fun_ExistsSurjReflexive == + ASSUME NEW S + PROVE ExistsSurjection(S,S) + + +THEOREM Fun_ExistsInjTransitive == + ASSUME NEW S, NEW T, NEW U, + ExistsInjection(S,T), ExistsInjection(T,U) + PROVE ExistsInjection(S,U) + + +THEOREM Fun_ExistsSurjTransitive == + ASSUME NEW S, NEW T, NEW U, + ExistsSurjection(S,T), ExistsSurjection(T,U) + PROVE ExistsSurjection(S,U) + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* The Cantor-Bernstein-Schroeder theorem. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists an injection from S to T, where T is a subset of S, *) +(* then there exists a bijection from S to T. *) +(* *) +(* A lemma for the Cantor-Bernstein-Schroeder theorem. *) +(* *) +(* This proof is formalized from *) +(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder\_Theorem/Lemma}^' *) +(* retrieved April 29, 2013. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_CantorBernsteinSchroeder_Lemma == + ASSUME NEW S, NEW T, T \subseteq S, ExistsInjection(S,T) + PROVE ExistsBijection(S,T) + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If an injection exists from S to T and an injection exists from T to S, *) +(* then there is a bijection from S to T. *) +(* *) +(* This is the Cantor-Bernstein-Schroeder theorem. *) +(* *) +(* This proof is formalized from *) +(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Proof_5}^' *) +(* retrieved April 29, 2013. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_CantorBernsteinSchroeder == + ASSUME NEW S, NEW T, + ExistsInjection(S,T), ExistsInjection(T,S) + PROVE ExistsBijection(S,T) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Applications of the Cantor-Bernstein-Schroeder Theorem. *) +(* If there exists an injection f: A->B and a surjection g: A->B, then *) +(* there exists a bijection between A and B. *) +(* Also, if there are surjections between A and B, then there is a *) +(* bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM Fun_ExistInjAndSurjThenBij == + ASSUME NEW S, NEW T, + ExistsInjection(S,T), ExistsSurjection(S,T) + PROVE ExistsBijection(S,T) + + + +THEOREM Fun_ExistSurjAndSurjThenBij == + ASSUME NEW S, NEW T, + ExistsSurjection(S,T), ExistsSurjection(T,S) + PROVE ExistsBijection(S,T) + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Equivalences for ExistsBijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijEquiv == + ASSUME NEW S, NEW T + PROVE /\ ExistsBijection(S,T) <=> ExistsBijection(T,S) + /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsInjection(T,S) + /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsSurjection(S,T) + /\ ExistsBijection(S,T) <=> ExistsInjection(T,S) /\ ExistsSurjection(T,S) + /\ ExistsBijection(S,T) <=> ExistsSurjection(S,T) /\ ExistsSurjection(T,S) + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* Facts about functions involving integer intervals. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is a bijection from 1..b-a+1 to a..b for integers a,b with a <= b.*) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijInterval == + ASSUME NEW a \in Int, NEW b \in Int, a <= b + PROVE ExistsBijection(1 .. b-a+1, a .. b) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is an injection from 1..n to 1..m iff n \leq m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatInjLeq == + ASSUME NEW n \in Nat, NEW m \in Nat + PROVE ExistsInjection(1..n,1..m) <=> n \leq m + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If a surjection from 1..n to S exists (for some n \in Nat) then a *) +(* bijection from 1..m to S exists (for some m \in Nat) and m \leq n. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatSurjImpliesNatBij == + ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n,S) + PROVE \E m \in Nat : ExistsBijection(1..m,S) /\ m \leq n + + +(***************************************************************************) +(* Simple corollary. *) +(***************************************************************************) +THEOREM Fun_NatSurjEquivNatBij == + ASSUME NEW S + PROVE (\E n \in Nat : ExistsSurjection(1..n,S)) + <=> (\E m \in Nat : ExistsBijection(1..m,S)) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* For any set S, given n, m \in Nat such that bijections exist from 1..n *) +(* to S and from 1..m to S, then it must be the case that n = m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSame == + ASSUME NEW S, + NEW n \in Nat, ExistsBijection(1..n,S), + NEW m \in Nat, ExistsBijection(1..m,S) + PROVE n = m + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* S is empty iff there exists a bijection from 1..0 to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijEmpty == + ASSUME NEW S + PROVE ExistsBijection(1..0,S) <=> S = {} + + +(***************************************************************************) +(* `. .' *) +(* *) +(* S is a singleton iff there exists a bijection from 1..1 to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSingleton == + ASSUME NEW S + PROVE ExistsBijection(1..1,S) <=> \E s : S = {s} + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..n to T (for some n \in Nat), where T *) +(* is a subset of S. Furthermore n \leq m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSubset == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW T \in SUBSET S + PROVE \E n \in Nat : ExistsBijection(1..n,T) /\ n \leq m + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..(m+1) to S \cup {x}, where x \notin S. *) +(* *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijAddElem == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW x, x \notin S + PROVE ExistsBijection(1..(m+1), S \cup {x}) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..(m-1) to S \ {x}, where x \in S. *) +(* *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSubElem == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW x, x \in S + PROVE ExistsBijection(1..(m-1), S \ {x}) + + + +============================================================================= +\* Modification History +\* Last modified Thu Feb 13 14:49:08 GMT-03:00 2014 by merz +\* Last modified Tue Jun 11 12:30:05 CEST 2013 by bhargav +\* Last modified Fri May 31 15:27:41 CEST 2013 by bhargav +\* Last modified Fri May 03 12:55:32 PDT 2013 by tomr +\* Created Thu Apr 11 10:36:10 PDT 2013 by tomr diff --git a/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems_proofs.tla b/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems_proofs.tla new file mode 100644 index 0000000000..6cb01fde93 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems_proofs.tla @@ -0,0 +1,947 @@ +--------------------- MODULE FunctionTheorems_proofs ------------------------ +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* Proofs of facts about functions. *) +(* Originally contributed by Tom Rodeheffer, MSR. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + +EXTENDS + Functions, + Integers, + NaturalsInduction, + WellFoundedInduction, + TLAPS + +(***************************************************************************) +(* `. .' *) +(* *) +(* Function restriction. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_RestrictProperties == + ASSUME NEW S, NEW T, NEW f \in [S -> T], NEW A \in SUBSET S + PROVE /\ Restrict(f,A) \in [A -> T] + /\ \A x \in A : Restrict(f,A)[x] = f[x] +BY DEF Restrict + +(***************************************************************************) +(* `. .' *) +(* *) +(* Range of a function. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_RangeProperties == + ASSUME NEW S, NEW T, NEW f \in [S -> T] + PROVE /\ Range(f) \subseteq T + /\ \A y \in Range(f) : \E x \in S : f[x] = y + /\ f \in Surjection(S, Range(f)) +BY DEF Range, Surjection + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Range of a function. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InverseProperties == + ASSUME NEW S, NEW T, NEW f \in [S -> T] + PROVE /\ (S = {} => T = {}) => Inverse(f,S,T) \in [T -> S] + /\ \A y \in Range(f) : f[Inverse(f,S,T)[y]] = y +BY DEF Inverse, Range + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Introduction rules for injections, surjections, bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_IsInj == + ASSUME NEW S, NEW T, NEW F \in [S -> T], + \A a,b \in S : F[a] = F[b] => a = b + PROVE F \in Injection(S,T) +BY DEF Injection + + +THEOREM Fun_IsSurj == + ASSUME NEW S, NEW T, NEW F \in [S -> T], + \A t \in T : \E s \in S : F[s] = t + PROVE F \in Surjection(S,T) +BY DEF Surjection + + +THEOREM Fun_IsBij == + ASSUME NEW S, NEW T, NEW F, + \/ F \in Injection(S,T) + \/ (F \in [S -> T] /\ \A a,b \in S : F[a] = F[b] => a = b), + + \/ F \in Surjection(S,T) + \/ (F \in [S -> T] /\ \A t \in T : \E s \in S : F[s] = t) + PROVE F \in Bijection(S,T) +BY DEF Bijection, Injection, Surjection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of injections, surjections, and bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjectionProperties == + ASSUME NEW S, NEW T, NEW F \in Injection(S,T) + PROVE /\ F \in [S -> T] + /\ \A a,b \in S : F[a] = F[b] => a = b +BY DEF Injection + + +THEOREM Fun_SurjectionProperties == + ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) + PROVE /\ F \in [S -> T] + /\ \A t \in T : \E s \in S : F[s] = t + /\ Range(F) = T +BY DEF Surjection, Range + + +THEOREM Fun_BijectionProperties == + ASSUME NEW S, NEW T, NEW F \in Bijection(S,T) + PROVE /\ F \in [S -> T] + /\ F \in Injection(S,T) + /\ F \in Surjection(S,T) + /\ \A a,b \in S : F[a] = F[b] => a = b + /\ \A t \in T : \E s \in S : F[s] = t +BY DEF Bijection, Injection, Surjection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* A surjection in [S -> T] such that there is no surjection from any *) +(* subset of S to T is a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_SmallestSurjectionIsBijection == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), + \A U \in SUBSET S : U # S => Surjection(U,T) = {} + PROVE f \in Bijection(S,T) +<1>1. f \in [S -> T] + BY Fun_SurjectionProperties +<1>2. SUFFICES ASSUME f \notin Injection(S,T) PROVE FALSE + BY Fun_IsBij +<1>3. PICK a,b \in S : a # b /\ f[a] = f[b] + BY <1>1, <1>2, Fun_IsInj +<1>. DEFINE U == S \ {b} +<1>4. U \in SUBSET S /\ U # S + OBVIOUS +<1>. DEFINE g == [x \in U |-> f[x]] +<1>5. g \in Surjection(U,T) + <2>1. g \in [U -> T] BY <1>1 + <2>2. ASSUME NEW t \in T PROVE \E u \in U : g[u] = t + <3>1. CASE t = f[b] BY <1>3, <3>1 + <3>2. CASE t # f[b] + <4>1. PICK s \in S : f[s] = t + BY SMT, Fun_SurjectionProperties \** Zenon/Isa fail ?? + <4>2. s \in U BY <3>2, <4>1 + <4>. QED BY <4>1, <4>2 + <3>3. QED BY <3>1, <3>2 + <2>3. QED BY <2>1, <2>2, Fun_IsSurj +<1>. QED BY <1>4, <1>5 + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Transitivity of injections, surjections, bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Injection(S,T), + NEW G \in Injection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Injection(S,U) +BY DEF Injection + + +THEOREM Fun_SurjTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Surjection(S,T), + NEW G \in Surjection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Surjection(S,U) +BY DEF Surjection + + +THEOREM Fun_BijTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Bijection(S,T), + NEW G \in Bijection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Bijection(S,U) +BY Fun_SurjTransitive, Fun_InjTransitive DEF Bijection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The inverse of a surjection is an injection and vice versa. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_SurjInverse == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T) + PROVE Inverse(f,S,T) \in Injection(T,S) +BY DEF Inverse, Surjection, Injection, Range + + +THEOREM Fun_InjInverse == + ASSUME NEW S, NEW T, NEW f \in Injection(S,T), S = {} => T = {} + PROVE Inverse(f,S,T) \in Surjection(T,S) +<1>. DEFINE g == Inverse(f,S,T) +<1>0. f \in [S -> T] BY DEF Injection +<1>1. g \in [T -> S] BY <1>0, Fun_InverseProperties +<1>2. ASSUME NEW s \in S PROVE \E t \in T : g[t] = s + <2>10. g[f[s]] = s BY DEF Inverse, Range, Injection + <2>. QED BY <2>10, <1>0 +<1>. QED BY <1>1, <1>2 DEF Surjection + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of the inverse of a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_BijInverse == + ASSUME NEW S, NEW T, NEW f \in Bijection(S,T) + PROVE /\ Inverse(f,S,T) \in Bijection(T,S) + /\ \A s \in S : Inverse(f,S,T)[f[s]] = s + /\ Inverse(Inverse(f,S,T), T,S) = f + +<1>. DEFINE g == Inverse(f,S,T) +<1>1. f \in [S -> T] BY DEF Bijection, Injection +<1>2. f \in Surjection(S,T) BY DEF Bijection +<1>3. \A a,b \in S : f[a] = f[b] => a = b BY DEF Bijection, Injection +<1>4. g \in Injection(T,S) BY <1>2, Fun_SurjInverse + +<1>5. \A t \in T : f[g[t]] = t BY <1>2 DEF Surjection, Inverse, Range +<1>6. \A s \in S : g[f[s]] = s BY <1>1, <1>3 DEF Inverse, Range + +<1>7. \A a,b \in T : g[a] = g[b] => a = b BY <1>5 +<1>8. \A s \in S : \E t \in T : g[t] = s BY <1>1, <1>6 + +<1>9. g \in Bijection(T,S) BY <1>4, <1>8 DEF Bijection, Injection, Surjection + +<1>10. Inverse(g,T,S) = f + <2>1. ASSUME NEW s \in S PROVE f[s] = CHOOSE t \in T : s \in Range(g) => g[t] = s + <3>1. PICK a \in T : g[a] = s BY <1>9 DEF Bijection, Surjection + <3>2. \A b \in T : g[b] = s => a = b BY <3>1, <1>7 + <3>3. f[s] = a BY <3>1, <1>5 + <3>4. s \in Range(g) BY <3>1, <1>4 DEF Injection, Range + <3>. QED BY <3>1, <3>2, <3>3, <3>4 + <2>. QED BY <2>1, <1>1 DEF Inverse +<1>. QED BY <1>9, <1>6, <1>10 + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The restriction of a bijection is a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_BijRestrict == + ASSUME NEW S, NEW T, NEW F \in Bijection(S,T), + NEW R \in SUBSET S + PROVE Restrict(F, R) \in Bijection(R, Range(Restrict(F, R))) +BY DEF Bijection, Injection, Surjection, Range, Restrict + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Given F an injection from S to T, then F is a bijection from S to F(S). *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjMeansBijImage == + ASSUME NEW S, NEW T, NEW F \in Injection(S,T) + PROVE F \in Bijection(S, Range(F)) +BY DEF Bijection, Injection, Surjection, Range + + + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* Facts about exists jections. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Definitions restated as facts. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsInj == + \A S,T : ExistsInjection(S,T) <=> Injection(S,T) # {} +BY DEF ExistsInjection + + +THEOREM Fun_ExistsSurj == + \A S,T : ExistsSurjection(S,T) <=> Surjection(S,T) # {} +BY DEF ExistsSurjection + + +THEOREM Fun_ExistsBij == + \A S,T : ExistsBijection(S,T) <=> Bijection(S,T) # {} +BY DEF ExistsBijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is a surjection from any set S to any non-empty subset T of S. *) +(* (Note that there cannot be a surjection to {} except if S is empty.) *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsSurjSubset == + ASSUME NEW S, NEW T \in SUBSET S, T # {} + PROVE ExistsSurjection(S,T) +<1>. PICK x \in T : TRUE OBVIOUS +<1>. [s \in S |-> IF s \in T THEN s ELSE x] \in Surjection(S,T) + BY DEF Surjection +<1>. QED BY DEF ExistsSurjection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there is a surjection from S to T, then there is an injection from T *) +(* to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsSurjMeansExistsRevInj == + ASSUME NEW S, NEW T, ExistsSurjection(S,T) + PROVE ExistsInjection(T,S) +BY Fun_SurjInverse DEF ExistsSurjection, ExistsInjection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* ExistsBijection is reflexive, symmetric, and transitive. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijReflexive == + ASSUME NEW S + PROVE ExistsBijection(S,S) +<1>. [s \in S |-> s] \in Bijection(S,S) BY DEF Bijection, Injection, Surjection +<1>. QED BY DEF ExistsBijection + + +THEOREM Fun_ExistsBijSymmetric == + ASSUME NEW S, NEW T, ExistsBijection(S,T) + PROVE ExistsBijection(T,S) +BY Fun_BijInverse DEF ExistsBijection + + +THEOREM Fun_ExistsBijTransitive == + ASSUME NEW S, NEW T, NEW U, ExistsBijection(S,T), ExistsBijection(T,U) + PROVE ExistsBijection(S,U) +BY Fun_BijTransitive DEF ExistsBijection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Existence of injections and surjections is reflexive and transitive. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsInjReflexive == + ASSUME NEW S + PROVE ExistsInjection(S,S) +BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsInjection, Bijection + + +THEOREM Fun_ExistsSurjReflexive == + ASSUME NEW S + PROVE ExistsSurjection(S,S) +BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsSurjection, Bijection + + +THEOREM Fun_ExistsInjTransitive == + ASSUME NEW S, NEW T, NEW U, + ExistsInjection(S,T), ExistsInjection(T,U) + PROVE ExistsInjection(S,U) +BY Fun_InjTransitive DEF ExistsInjection + + +THEOREM Fun_ExistsSurjTransitive == + ASSUME NEW S, NEW T, NEW U, + ExistsSurjection(S,T), ExistsSurjection(T,U) + PROVE ExistsSurjection(S,U) +BY Fun_SurjTransitive DEF ExistsSurjection + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* The Cantor-Bernstein-Schroeder theorem. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists an injection from S to T, where T is a subset of S, *) +(* then there exists a bijection from S to T. *) +(* *) +(* A lemma for the Cantor-Bernstein-Schroeder theorem. *) +(* *) +(* This proof is formalized from *) +(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder\_Theorem/Lemma}^' *) +(* retrieved April 29, 2013. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_CantorBernsteinSchroeder_Lemma == + ASSUME NEW S, NEW T, T \subseteq S, ExistsInjection(S,T) + PROVE ExistsBijection(S,T) +PROOF + <1> PICK F \in Injection(S,T) : TRUE BY Fun_ExistsInj + + <1>1. /\ F \in [S -> T] + /\ \A a,b \in S : F[a] = F[b] => a = b + BY Fun_InjectionProperties + + (*************************************************************************) + (* Pick Y as S excluding T. *) + (*************************************************************************) + <1>2. PICK Y : Y = S \ T OBVIOUS + + (*************************************************************************) + (* Define Ci[0] as Y, and Ci[i+1] as the image of Ci[i] under F. *) + (*************************************************************************) + <1> DEFINE Ci[i \in Nat] == + IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} + <1> HIDE DEF Ci + + <1>3. \A i \in Nat : Ci[i] = + IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} + (***********************************************************************) + (* Use NatInductiveDef to prove that Ci equals its definition. *) + (***********************************************************************) + <2> DEFINE + f0 == Y + Def(v,i) == {F[s] : s \in v} + f == CHOOSE f : f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1],i)] + <2> SUFFICES \A i \in Nat : f[i] = IF i = 0 THEN f0 ELSE Def(f[i-1],i) BY DEF Ci + <2> HIDE DEF f0, Def, f + <2> SUFFICES NatInductiveDefConclusion(f,f0,Def) BY DEF NatInductiveDefConclusion + <2> SUFFICES NatInductiveDefHypothesis(f,f0,Def) BY NatInductiveDef + <2> QED BY DEF NatInductiveDefHypothesis, f + + (*************************************************************************) + (* Applying F to an element of Ci[i] produces an element of Ci[i+1]. *) + (*************************************************************************) + <1>4. ASSUME NEW i \in Nat, NEW s \in Ci[i] + PROVE F[s] \in Ci[i+1] + BY <1>3, SMT + + (*************************************************************************) + (* Each element of Ci[i+1] is the application of F to some element in *) + (* Ci[i]. *) + (*************************************************************************) + <1>5. ASSUME NEW i \in Nat, NEW t \in Ci[i+1] + PROVE \E s \in Ci[i] : F[s] = t + BY <1>3, SMT + + (*************************************************************************) + (* Each Ci[i] \subseteq S. *) + (*************************************************************************) + <1>6. \A i \in Nat : Ci[i] \subseteq S + <2> DEFINE Prop(i) == Ci[i] \subseteq S + <2> SUFFICES \A i \in Nat : Prop(i) OBVIOUS + <2>1. Prop(0) BY <1>2, <1>3 + <2>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) + <3> SUFFICES ASSUME NEW t \in Ci[i+1] PROVE t \in S OBVIOUS + <3>1. PICK s \in Ci[i] : F[s] = t BY <1>5 + <3>2. s \in S BY <2>2 + <3> QED BY <3>1, <3>2, <1>1 + <2> HIDE DEF Prop + <2> QED BY <2>1, <2>2, NatInduction, Isa + + (*************************************************************************) + (* Pick C as the union of all Ci[i]. *) + (*************************************************************************) + <1>7. PICK C : C = UNION {Ci[i] : i \in Nat} OBVIOUS + <1>8. C \subseteq S BY <1>6, <1>7 + + (*************************************************************************) + (* Pick FC as the image of C under F. *) + (*************************************************************************) + <1>9. PICK FC : FC = {F[c] : c \in C} OBVIOUS + <1>10. FC \subseteq T BY <1>1, <1>8, <1>9, Isa + + (*************************************************************************) + (* C = Y \cup FC because Ci[0] = Y and Ci[i+1] = image of Ci[i] under F. *) + (*************************************************************************) + <1>11. C = Y \cup FC + <2>1. ASSUME NEW c \in C PROVE c \in Y \cup FC + <3>1. PICK i \in Nat : c \in Ci[i] BY <1>7 + <3>2. CASE i = 0 BY <3>1, <3>2, <1>3 + <3>3. CASE i # 0 + <4>1. PICK s \in Ci[i-1] : F[s] = c BY <3>1, <3>3, <1>5, SMT + <4>2. s \in C BY <3>3, <1>7, SMT + <4> QED BY <4>1, <4>2, <1>9 + <3> QED BY <3>2, <3>3 + <2>2. ASSUME NEW c \in Y \cup FC PROVE c \in C + <3>1. CASE c \in Y BY <3>1, <1>3, <1>7 + <3>2. CASE c \in FC + <4>1. PICK s \in C : F[s] = c BY <3>2, <1>9 + <4>2. PICK i \in Nat : s \in Ci[i] BY <4>1, <1>7 + <4>3. F[s] \in Ci[i+1] BY <4>2, <1>4 + <4> QED BY <4>1, <4>3, <1>7, SMT + <3> QED BY <3>1, <3>2 + <2> QED BY <2>1, <2>2 + + (*************************************************************************) + (* S \ C is the same as T \ FC. *) + (*************************************************************************) + <1>12. S \ C = T \ FC BY <1>2, <1>11 + + (*************************************************************************) + (* Pick H as F on C and the identity on S \ C. Since F (restricted to *) + (* C) is a bijection from C to FC and S \ C = T \ FC, this makes H a *) + (* bijection from S to T. *) + (*************************************************************************) + <1>13. PICK H : H = [s \in S |-> IF s \in C THEN F[s] ELSE s] OBVIOUS + <1>14. H \in Bijection(S,T) + (***********************************************************************) + (* A useful lemma. If a \in C and b \notin C, then H[a] # H[b]. *) + (***********************************************************************) + <2>1. ASSUME NEW a \in S, NEW b \in S, a \in C, b \notin C PROVE H[a] # H[b] + <3>1. H[a] \in FC BY <2>1, <1>1, <1>9, <1>13 + <3>2. H[b] \in T \ FC BY <2>1, <1>12, <1>13 + <3> QED BY <3>1, <3>2 + + <2>2. H \in [S -> T] + <3> SUFFICES ASSUME NEW s \in S PROVE H[s] \in T BY <1>13 + <3>1. CASE s \in C BY <3>1, <1>1, <1>10, <1>13 + <3>2. CASE s \notin C BY <3>2, <1>12, <1>13 + <3> QED BY <3>1, <3>2 + + <2>3. ASSUME NEW a \in S, NEW b \in S, H[a] = H[b] PROVE a = b + <3> H[a] = H[b] BY <2>3 + <3>1. CASE a \in C /\ b \in C BY <3>1, <1>1, <1>13 + <3>2. CASE a \in C /\ b \notin C BY <3>2, <2>1 (* impossible by lemma *) + <3>3. CASE a \notin C /\ b \in C BY <3>3, <2>1 (* impossible by lemma *) + <3>4. CASE a \notin C /\ b \notin C BY <3>4, <1>13 + <3> QED BY <3>1, <3>2, <3>3, <3>4 + + <2>4. ASSUME NEW t \in T PROVE \E s \in S : H[s] = t + <3>1. CASE t \in FC BY <3>1, <1>8, <1>9, <1>13 + <3>2. CASE t \notin FC BY <3>2, <1>12, <1>13 + <3> QED BY <3>1, <3>2 + + <2> QED BY <2>2, <2>3, <2>4, Fun_IsBij + + <1> QED BY <1>14, Fun_ExistsBij + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If an injection exists from S to T and an injection exists from T to S, *) +(* then there is a bijection from S to T. *) +(* *) +(* This is the Cantor-Bernstein-Schroeder theorem. *) +(* *) +(* This proof is formalized from *) +(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Proof_5}^' *) +(* retrieved April 29, 2013. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_CantorBernsteinSchroeder == + ASSUME NEW S, NEW T, + ExistsInjection(S,T), ExistsInjection(T,S) + PROVE ExistsBijection(S,T) + +<1>1. PICK F : F \in Injection(S,T) BY DEF ExistsInjection +<1>2. PICK G : G \in Injection(T,S) BY DEF ExistsInjection +<1>. DEFINE GF == [s \in S |-> G[F[s]]] +<1>3. Range(G) \subseteq S BY <1>2, Fun_RangeProperties DEF Injection +<1>4. GF \in Injection(S, Range(G)) BY <1>1, <1>2 DEF Injection, Range +<1>5. ExistsBijection(S, Range(G)) + BY <1>3, <1>4, Fun_CantorBernsteinSchroeder_Lemma DEF ExistsInjection +<1>6. ExistsBijection(T, Range(G)) + BY <1>2, Fun_InjMeansBijImage DEF ExistsBijection +<1>. QED BY <1>5, <1>6, Fun_ExistsBijSymmetric, Fun_ExistsBijTransitive + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Applications of the Cantor-Bernstein-Schroeder Theorem. *) +(* If there exists an injection f: A->B and a surjection g: A->B, then *) +(* there exists a bijection between A and B. *) +(* Also, if there are surjections between A and B, then there is a *) +(* bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM Fun_ExistInjAndSurjThenBij == + ASSUME NEW S, NEW T, + ExistsInjection(S,T), ExistsSurjection(S,T) + PROVE ExistsBijection(S,T) +<1>. ExistsInjection(T,S) BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection +<1>. QED BY Fun_CantorBernsteinSchroeder + + + +THEOREM Fun_ExistSurjAndSurjThenBij == + ASSUME NEW S, NEW T, + ExistsSurjection(S,T), ExistsSurjection(T,S) + PROVE ExistsBijection(S,T) +<1>. ExistsInjection(S,T) BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection +<1>2. QED BY Fun_ExistInjAndSurjThenBij + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Equivalences for ExistsBijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijEquiv == + ASSUME NEW S, NEW T + PROVE /\ ExistsBijection(S,T) <=> ExistsBijection(T,S) + /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsInjection(T,S) + /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsSurjection(S,T) + /\ ExistsBijection(S,T) <=> ExistsInjection(T,S) /\ ExistsSurjection(T,S) + /\ ExistsBijection(S,T) <=> ExistsSurjection(S,T) /\ ExistsSurjection(T,S) + +<1>1. ExistsBijection(S,T) <=> ExistsBijection(T,S) + BY Fun_ExistsBijSymmetric +<1>2. ExistsInjection(S,T) /\ ExistsInjection(T,S) => ExistsBijection(S,T) + BY Fun_CantorBernsteinSchroeder +<1>3. \A S1, T1 : ExistsBijection(S1,T1) => ExistsSurjection(S1,T1) + BY DEF ExistsBijection, ExistsSurjection, Bijection +<1>4. \A S1,T1 : ExistsSurjection(S1,T1) => ExistsInjection(T1,S1) + BY Fun_ExistsSurjMeansExistsRevInj +<1> QED BY <1>1, <1>2, <1>3, <1>4 + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large \vspace{12pt} *) +(* Facts about jections involving 1..n. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is a bijection from 1..b-a+1 to a..b for integers a,b with a <= b.*) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijInterval == + ASSUME NEW a \in Int, NEW b \in Int, a <= b + PROVE ExistsBijection(1 .. b-a+1, a .. b) + +<1>. DEFINE f == [i \in 1 .. b-a+1 |-> i+a-1] +<1>1. f \in [1 .. b-a+1 -> a .. b] BY SMT +<1>2. f \in Injection(1 .. b-a+1, a .. b) BY SMT DEF Injection +<1>3. f \in Surjection(1 .. b-a+1, a .. b) BY SMT DEF Surjection +<1>. QED BY <1>1, <1>2, <1>3 DEF ExistsBijection, Bijection + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is an injection from 1..n to 1..m iff n \leq m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatInjLeq == + ASSUME NEW n \in Nat, NEW m \in Nat + PROVE ExistsInjection(1..n,1..m) <=> n \leq m +PROOF + (*************************************************************************) + (* n \leq m means Injection exists. This part is easy. *) + (*************************************************************************) + <1>1. ASSUME n \leq m PROVE [i \in 1..n |-> i] \in Injection(1..n, 1..m) + BY SMT, <1>1 DEF Injection + + (*************************************************************************) + (* Injection exists means n \leq m. This part is harder. *) + (*************************************************************************) + <1>2. ASSUME ExistsInjection(1..n,1..m) PROVE n \leq m + <2>. DEFINE P(mm) == \A nn \in Nat : nn > mm => Injection(1..nn, 1..mm) = {} + <2>1. SUFFICES \A mm \in Nat : P(mm) BY SMT, <1>2 DEF ExistsInjection + <2>2. P(0) BY Z3 DEF Injection + <2>3. ASSUME NEW mm \in Nat, P(mm) PROVE P(mm+1) + <3>1. SUFFICES ASSUME NEW nn \in Nat, nn > mm+1, + NEW f \in Injection(1..nn, 1..mm+1) + PROVE FALSE + OBVIOUS + <3>2. ASSUME NEW i \in 1..nn, f[i] = mm+1 PROVE FALSE + <4>. DEFINE g == [j \in 1..nn-1 |-> IF j1. nn-1 \in Nat /\ nn-1 > mm BY SMT, <3>1 + <4>2. g \in Injection(1..nn-1, 1..mm) BY SMT, <3>2 DEF Injection + <4>. QED BY <4>1, <4>2, P(mm) DEF Injection + <3>3. ASSUME ~\E i \in 1..nn : f[i] = mm+1 PROVE FALSE + <4>1. f \in Injection(1..nn, 1..mm) BY SMT, <3>3 DEF Injection + <4>. QED BY SMT, <4>1, <3>1, P(mm) + <3>. QED BY <3>2, <3>3 + <2>. QED BY Isa, NatInduction, <2>2, <2>3 + + <1> QED BY <1>1, <1>2 DEF ExistsInjection + + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If a surjection from 1..n to S exists (for some n \in Nat) then a *) +(* bijection from 1..m to S exists (for some m \in Nat) and m \leq n. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatSurjImpliesNatBij == + ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n,S) + PROVE \E m \in Nat : ExistsBijection(1..m,S) /\ m \leq n + + (*************************************************************************) + (* Pick the smallest m \in Nat for which there is a surjection from *) + (* 1..m to S. *) + (*************************************************************************) +<1>1. PICK m \in Nat : + /\ ExistsSurjection(1..m, S) + /\ \A k \in Nat : k < m => ~ExistsSurjection(1..k, S) + <2>. DEFINE NN == { m \in Nat : ExistsSurjection(1..m, S) } + <2>1. PICK m \in NN : \A k \in NN : <> \notin OpToRel(<, Nat) + BY WFMin, NatLessThanWellFounded + <2>. QED + BY <2>1 DEF OpToRel + +<1>2. m <= n BY SMT, <1>1 + (*************************************************************************) + (* Any surjection from 1..m to S is bijective. *) + (*************************************************************************) +<1>3. PICK f \in Surjection(1..m, S) : TRUE BY <1>1 DEF ExistsSurjection +<1>4. ASSUME f \notin Injection(1..m, S) PROVE FALSE + <2>1. f \in [1..m -> S] BY <1>3 DEF Surjection + <2>2. PICK i,j \in 1..m : i < j /\ f[i] = f[j] + <3>1. PICK ii,jj \in 1..m : ii # jj /\ f[ii] = f[jj] + BY <2>1, <1>4 DEF Injection + <3>2. CASE ii < jj BY <3>1, <3>2 + <3>3. CASE jj < ii BY <3>1, <3>3 + <3>. QED BY SMT, <3>1, <3>2, <3>3 + <2>3. m-1 \in Nat BY SMT, <2>2 + <2>. DEFINE g == [k \in 1..m-1 |-> IF k=j THEN f[m] ELSE f[k]] + <2>4. g \in Surjection(1..m-1, S) + <3>1. g \in [1..m-1 -> S] BY SMT, <2>1 + <3>2. ASSUME NEW s \in S PROVE \E k \in 1..m-1 : g[k] = s + <4>. PICK l \in 1..m : f[l] = s BY <1>3 DEF Surjection + <4>. QED BY SMT, <2>2 + <3>. QED BY <3>1, <3>2 DEF Surjection + <2>. QED BY SMT, <2>3, <2>4, <1>1 DEF ExistsSurjection + +<1>. QED BY <1>2, <1>3, <1>4 DEF ExistsBijection, Bijection + + +(***************************************************************************) +(* Simple corollary. *) +(***************************************************************************) +THEOREM Fun_NatSurjEquivNatBij == + ASSUME NEW S + PROVE (\E n \in Nat : ExistsSurjection(1..n,S)) + <=> (\E m \in Nat : ExistsBijection(1..m,S)) +BY Fun_NatSurjImpliesNatBij, Fun_ExistsBijEquiv + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* For any set S, given n, m \in Nat such that bijections exist from 1..n *) +(* to S and from 1..m to S, then it must be the case that n = m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSame == + ASSUME NEW S, + NEW n \in Nat, ExistsBijection(1..n,S), + NEW m \in Nat, ExistsBijection(1..m,S) + PROVE n = m +BY SMT, Fun_NatInjLeq, Fun_ExistsBijEquiv, Fun_ExistsBijTransitive + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* S is empty iff there exists a bijection from 1..0 to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijEmpty == + ASSUME NEW S + PROVE ExistsBijection(1..0,S) <=> S = {} + +<1>1. ASSUME ExistsBijection(1..0, S), S # {} PROVE FALSE + <2>. ExistsInjection(S, 1..0) BY <1>1, Fun_ExistsBijEquiv + <2>. QED BY SMT, <1>1 DEF ExistsInjection, Injection +<1>2. ASSUME S = {} PROVE ExistsBijection(1..0, S) + BY SMT, <1>2, Fun_ExistsBijReflexive +<1>3. QED BY <1>1, <1>2 + + +(***************************************************************************) +(* `. .' *) +(* *) +(* S is a singleton iff there exists a bijection from 1..1 to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSingleton == + ASSUME NEW S + PROVE ExistsBijection(1..1,S) <=> \E s : S = {s} +<1>1. ASSUME NEW f \in Bijection(1..1, S) PROVE \E s : S = {s} + BY SMT DEF Bijection, Injection, Surjection +<1>2. ASSUME NEW s, S = {s} PROVE [i \in 1..1 |-> s] \in Bijection(1..1, S) + BY SMT, <1>2 DEF Bijection, Injection, Surjection +<1>. QED BY <1>1, <1>2 DEF ExistsBijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..n to T (for some n \in Nat), where T *) +(* is a subset of S. Furthermore n \leq m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSubset == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW T \in SUBSET S + PROVE \E n \in Nat : ExistsBijection(1..n,T) /\ n \leq m + +<1>1. CASE T = {} BY Force, <1>1, Fun_NatBijEmpty +<1>2. CASE T # {} + <2>0. ExistsSurjection(1..m, S) BY Fun_ExistsBijEquiv + <2>1. ExistsSurjection(S, T) BY <1>2, Fun_ExistsSurjSubset + <2>2. ExistsSurjection(1..m, T) BY <2>0, <2>1, Fun_ExistsSurjTransitive + <2>. QED BY <2>2, Fun_NatSurjImpliesNatBij +<1> QED BY <1>1, <1>2 + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..(m+1) to S \cup {x}, where x \notin S. *) +(* *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijAddElem == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW x, x \notin S + PROVE ExistsBijection(1..(m+1), S \cup {x}) + +<1>1. PICK F \in Bijection(1..m, S) : TRUE BY DEF ExistsBijection +<1>2. F \in [1..m -> S] BY <1>1 DEF Bijection, Injection +<1>3. \A s \in S : \E i \in 1..m : F[i] = s BY <1>1 DEF Bijection, Surjection +<1>4. \A i,j \in 1..m : F[i] = F[j] => i = j BY <1>1 DEF Bijection, Injection + +<1>. DEFINE G == [i \in 1..m+1 |-> IF i <= m THEN F[i] ELSE x] +<1>10. G \in [1..m+1 -> S \cup {x}] BY SMT, <1>2 +<1>20. ASSUME NEW t \in S \cup {x} PROVE \E i \in 1..m+1 : G[i] = t BY SMT, <1>3 +<1>30. ASSUME NEW i \in 1..m+1, NEW j \in 1..m+1, G[i] = G[j] PROVE i = j + BY SMT, <1>2, <1>4, <1>30 +<1>40. G \in Bijection(1..m+1, S \cup {x}) + BY <1>10, <1>20, <1>30 DEF Bijection, Injection, Surjection +<1>. QED BY <1>40 DEF ExistsBijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..(m-1) to S \ {x}, where x \in S. *) +(* *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSubElem == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW x, x \in S + PROVE ExistsBijection(1..(m-1), S \ {x}) + +<1>1. PICK n \in Nat : ExistsBijection(1..n, S \ {x}) BY Fun_NatBijSubset +<1>2. ExistsBijection(1..n+1, (S \ {x}) \cup {x}) BY <1>1, Fun_NatBijAddElem +<1>3. ExistsBijection(1..n+1, S) BY <1>2 +<1>4. n = m-1 BY SMT, <1>3, Fun_NatBijSame +<1>. QED BY <1>1, <1>4 + + + +============================================================================= +\* Modification History +\* Last modified Thu Feb 13 14:51:29 GMT-03:00 2014 by merz +\* Last modified Tue Jun 11 12:30:05 CEST 2013 by bhargav +\* Last modified Fri May 31 15:27:41 CEST 2013 by bhargav +\* Last modified Fri May 03 12:55:32 PDT 2013 by tomr +\* Created Thu Apr 11 10:36:10 PDT 2013 by tomr diff --git a/x/ccv/provider/keeper/prototyping/model/library/Functions.tla b/x/ccv/provider/keeper/prototyping/model/library/Functions.tla new file mode 100644 index 0000000000..a96195acb6 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/Functions.tla @@ -0,0 +1,63 @@ +------------------------------ MODULE Functions ----------------------------- +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* Notions about functions including injection, surjection, and bijection.*) +(* Originally contributed by Tom Rodeheffer, MSR. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* Restriction of a function to a set (should be a subset of the domain). *) +(***************************************************************************) +Restrict(f,S) == [ x \in S |-> f[x] ] + +(***************************************************************************) +(* Range of a function. *) +(* Note: The image of a set under function f can be defined as *) +(* Range(Restrict(f,S)). *) +(***************************************************************************) +Range(f) == { f[x] : x \in DOMAIN f } + + +(***************************************************************************) +(* The inverse of a function. *) +(***************************************************************************) +Inverse(f,S,T) == [t \in T |-> CHOOSE s \in S : t \in Range(f) => f[s] = t] + + +(***************************************************************************) +(* A map is an injection iff each element in the domain maps to a distinct *) +(* element in the range. *) +(***************************************************************************) +Injection(S,T) == { M \in [S -> T] : \A a,b \in S : M[a] = M[b] => a = b } + + +(***************************************************************************) +(* A map is a surjection iff for each element in the range there is some *) +(* element in the domain that maps to it. *) +(***************************************************************************) +Surjection(S,T) == { M \in [S -> T] : \A t \in T : \E s \in S : M[s] = t } + + +(***************************************************************************) +(* A map is a bijection iff it is both an injection and a surjection. *) +(***************************************************************************) +Bijection(S,T) == Injection(S,T) \cap Surjection(S,T) + + +(***************************************************************************) +(* An injection, surjection, or bijection exists if the corresponding set *) +(* is nonempty. *) +(***************************************************************************) +ExistsInjection(S,T) == Injection(S,T) # {} +ExistsSurjection(S,T) == Surjection(S,T) # {} +ExistsBijection(S,T) == Bijection(S,T) # {} + + +============================================================================= +\* Modification History +\* Last modified Wed Jul 10 20:32:37 CEST 2013 by merz +\* Last modified Wed Jun 05 12:14:19 CEST 2013 by bhargav +\* Last modified Fri May 03 12:55:35 PDT 2013 by tomr +\* Created Thu Apr 11 10:30:48 PDT 2013 by tomr diff --git a/x/ccv/provider/keeper/prototyping/model/library/JectionThm.tla b/x/ccv/provider/keeper/prototyping/model/library/JectionThm.tla new file mode 100644 index 0000000000..1ab1d0cbd4 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/JectionThm.tla @@ -0,0 +1,1130 @@ +---------------------------- MODULE JectionThm ------------------------------ +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* Facts about injections, surjections, and bijections. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + +EXTENDS + Naturals, + Jections, + NaturalsInduction, + WellFoundedInduction, + TLAPS, + Sequences + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Definitions of injections, surjections, bijections restated as facts. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_IsInj == + ASSUME NEW S, NEW T, NEW F \in [S -> T], + \A a,b \in S : F[a] = F[b] => a = b + PROVE F \in Injection(S,T) +BY DEF Injection + + +THEOREM Fun_IsSurj == + ASSUME NEW S, NEW T, NEW F \in [S -> T], + \A t \in T : \E s \in S : F[s] = t + PROVE F \in Surjection(S,T) +BY DEF Surjection + + +THEOREM Fun_IsBij == + ASSUME NEW S, NEW T, NEW F, + \/ F \in Injection(S,T) + \/ (F \in [S -> T] /\ \A a,b \in S : F[a] = F[b] => a = b), + + \/ F \in Surjection(S,T) + \/ (F \in [S -> T] /\ \A t \in T : \E s \in S : F[s] = t) + PROVE F \in Bijection(S,T) +BY DEF Bijection, Injection, Surjection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of an injection. *) +(* *) +(* `. .' *) +(***************************************************************************) +Fun_InjProp_Qed(S,T,F) == +/\ F \in [S -> T] +/\ \A a,b \in S : F[a] = F[b] => a = b + + +THEOREM Fun_InjProp == + ASSUME NEW S, NEW T, NEW F \in Injection(S,T) + PROVE Fun_InjProp_Qed(S,T,F) +BY DEF Injection, Fun_InjProp_Qed + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of a surjection. *) +(* *) +(* `. .' *) +(***************************************************************************) +Fun_SurjProp_Qed(S,T,F) == +/\ F \in [S -> T] +/\ \A t \in T : \E s \in S : F[s] = t + + +THEOREM Fun_SurjProp == + ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) + PROVE Fun_SurjProp_Qed(S,T,F) +BY DEF Surjection, Fun_SurjProp_Qed + + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +Fun_BijProp_Qed(S,T,F) == +/\ F \in [S -> T] +/\ F \in Injection(S,T) +/\ F \in Surjection(S,T) +/\ \A a,b \in S : F[a] = F[b] => a = b +/\ \A t \in T : \E s \in S : F[s] = t + + +THEOREM Fun_BijProp == + ASSUME NEW S, NEW T, NEW F \in Bijection(S,T) + PROVE Fun_BijProp_Qed(S,T,F) +BY DEF Bijection, Injection, Surjection, Fun_BijProp_Qed + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* A surjection in [S -> T] such that there is no surjection from any *) +(* subset of S to T is a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_SmallestSurjectionIsBijection == + ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), + \A U \in SUBSET S : U # S => Surjection(U,T) = {} + PROVE f \in Bijection(S,T) +<1>1. f \in [S -> T] + BY Fun_SurjProp DEF Fun_SurjProp_Qed +<1>2. SUFFICES ASSUME f \notin Injection(S,T) PROVE FALSE + BY Fun_IsBij +<1>3. PICK a,b \in S : a # b /\ f[a] = f[b] + BY <1>1, <1>2, Fun_IsInj +<1>. DEFINE U == S \ {b} +<1>4. U \in SUBSET S /\ U # S + OBVIOUS +<1>. DEFINE g == [x \in U |-> f[x]] +<1>5. g \in Surjection(U,T) + <2>1. g \in [U -> T] BY <1>1 + <2>2. ASSUME NEW t \in T PROVE \E u \in U : g[u] = t + <3>1. CASE t = f[b] BY <1>3, <3>1 + <3>2. CASE t # f[b] + <4>1. PICK s \in S : f[s] = t + BY SMT, Fun_SurjProp DEF Fun_SurjProp_Qed \** Zenon fails ?? + <4>2. s \in U BY <3>2, <4>1 + <4>. QED BY <4>1, <4>2 + <3>3. QED BY <3>1, <3>2 + <2>3. QED BY <2>1, <2>2, Fun_IsSurj +<1>. QED BY <1>4, <1>5 + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Transitivity of injections, surjections, bijections. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Injection(S,T), + NEW G \in Injection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Injection(S,U) +BY DEF Injection + + +THEOREM Fun_SurjTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Surjection(S,T), + NEW G \in Surjection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Surjection(S,U) +BY DEF Surjection + + +THEOREM Fun_BijTransitive == + ASSUME NEW S, NEW T, NEW U, + NEW F \in Bijection(S,T), + NEW G \in Bijection(T,U) + PROVE [s \in S |-> G[F[s]]] \in Bijection(S,U) +BY Fun_SurjTransitive, Fun_InjTransitive DEF Bijection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* The inverse of a surjection is an injection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_SurjInverse == + ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) + PROVE JectionInverse(S,T,F) \in Injection(T,S) +BY DEF JectionInverse, Surjection, Injection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Properties of the inverse of a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +Fun_BijInverse_Qed(S,T,F,G) == + /\ G \in Bijection(T,S) + /\ \A s \in S : G[F[s]] = s + /\ \A t \in T : F[G[t]] = t + /\ F = JectionInverse(T,S,G) + + +THEOREM Fun_BijInverse == + ASSUME NEW S, NEW T, + NEW F \in Bijection(S,T), + NEW G, G = JectionInverse(S,T,F) + PROVE Fun_BijInverse_Qed(S,T,F,G) + +<1>1. \A a,b \in S : F[a] = F[b] => a = b BY DEF Bijection, Injection +<1>2. \A t \in T : \E s \in S : F[s] = t BY DEF Bijection, Surjection +<1>3. F \in [S -> T] BY DEF Bijection, Injection + +<1>4. G = [t \in T |-> CHOOSE s \in S : F[s] = t] BY DEF JectionInverse +<1>5. G \in [T -> S] BY <1>2, <1>4 + +<1>6. \A t \in T : F[G[t]] = t BY <1>2, <1>4 +<1>7. \A s \in S : G[F[s]] = s BY <1>1, <1>3, <1>4 + +<1>8. \A a,b \in T : G[a] = G[b] => a = b BY <1>6 +<1>9. \A s \in S : \E t \in T : G[t] = s BY <1>3, <1>7 +<1>10. G \in Bijection(T,S) BY <1>5, <1>8, <1>9, Fun_IsBij + +<1>11. F = JectionInverse(T,S,G) + <2>10. ASSUME NEW s \in S PROVE F[s] = CHOOSE t \in T : G[t] = s + <3>1. PICK a \in T : G[a] = s BY <1>3, <1>7 + <3>2. \A b \in T : G[b] = s => a = b BY <3>1, <1>8 + <3>3. F[s] = a BY <3>1, <1>6 + <3> QED BY <3>1, <3>2, <3>3 + <2> QED BY <2>10, <1>3 DEF JectionInverse + +<1> QED BY <1>6, <1>7, <1>11, <1>10 DEF Fun_BijInverse_Qed + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Subset of a bijection is a bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +Fun_BijSubset_Qed(S,T,F,S1,T1,F1) == + /\ T1 \in SUBSET T + /\ F1 \in Bijection(S1,T1) + + +THEOREM Fun_BijSubset == + ASSUME + NEW S, NEW T, NEW F \in Bijection(S,T), + NEW S1 \in SUBSET S + PROVE + LET + T1 == {F[s] : s \in S1} + F1 == [s \in S1 |-> F[s]] + IN + Fun_BijSubset_Qed(S,T,F,S1,T1,F1) +PROOF + <1>1. PICK T1 : T1 = {F[s] : s \in S1} OBVIOUS + <1>2. PICK F1 : F1 = [s \in S1 |-> F[s]] OBVIOUS + + <1> HIDE DEF Fun_BijProp_Qed + <1>3. Fun_BijProp_Qed(S,T,F) BY Fun_BijProp + <1> USE DEF Fun_BijProp_Qed + + <1>4. F \in [S -> T] BY <1>3 + <1>5. \A a,b \in S : F[a] = F[b] => a = b BY <1>3 + <1>6. \A t \in T : \E s \in S : F[s] = t BY <1>3 + + <1>7. T1 \in SUBSET T BY <1>1, <1>4 + + <1>8. F1 \in [S1 -> T1] BY <1>1, <1>2 + <1>9. \A a,b \in S1 : F1[a] = F1[b] => a = b BY <1>2, <1>5 + <1>10. \A t \in T1 : \E s \in S1 : F1[s] = t BY <1>1, <1>2, <1>6 + + <1>11. F1 \in Bijection(S1,T1) BY <1>8, <1>9, <1>10, Fun_IsBij + + <1> USE DEF Fun_BijSubset_Qed + <1> QED BY <1>1, <1>2, <1>7, <1>11 + + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Given F an injection from S to T, then F is a bijection from S to F(S). *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_InjMeansBijImage == + ASSUME NEW S, NEW T, + NEW F \in Injection(S,T), + NEW FS, FS = {F[s] : s \in S} + PROVE F \in Bijection(S,FS) +BY DEF Bijection, Injection, Surjection + + + + + + + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* Facts about exists jections. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Definitions restated as facts. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsInj == + \A S,T : ExistsInjection(S,T) <=> Injection(S,T) # {} +BY DEF ExistsInjection + + +THEOREM Fun_ExistsSurj == + \A S,T : ExistsSurjection(S,T) <=> Surjection(S,T) # {} +BY DEF ExistsSurjection + + +THEOREM Fun_ExistsBij == + \A S,T : ExistsBijection(S,T) <=> Bijection(S,T) # {} +BY DEF ExistsBijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is a surjection from any set S to any non-empty subset T of S. *) +(* (Note that there cannot be a surjection to {} except if S is empty.) *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsSurjSubset == + ASSUME NEW S, NEW T \in SUBSET S, T # {} + PROVE ExistsSurjection(S,T) +<1>. PICK x \in T : TRUE OBVIOUS +<1>. [s \in S |-> IF s \in T THEN s ELSE x] \in Surjection(S,T) + BY DEF Surjection +<1>. QED BY DEF ExistsSurjection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there is a surjection from S to T, then there is an injection from T *) +(* to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsSurjMeansExistsRevInj == + ASSUME NEW S, NEW T + PROVE ExistsSurjection(S,T) => ExistsInjection(T,S) +BY Fun_SurjInverse DEF ExistsSurjection, ExistsInjection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* ExistsBijection is reflexive, symmetric, and transitive. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijReflexive == + ASSUME NEW S + PROVE ExistsBijection(S,S) +<1>. [s \in S |-> s] \in Bijection(S,S) BY DEF Bijection, Injection, Surjection +<1>. QED BY DEF ExistsBijection + + +THEOREM Fun_ExistsBijSymmetric == + ASSUME NEW S, NEW T, ExistsBijection(S,T) + PROVE ExistsBijection(T,S) +BY Fun_BijInverse DEF Fun_BijInverse_Qed, ExistsBijection + + +THEOREM Fun_ExistsBijTransitive == + ASSUME NEW S, NEW T, NEW U, ExistsBijection(S,T), ExistsBijection(T,U) + PROVE ExistsBijection(S,U) +BY Fun_BijTransitive DEF ExistsBijection + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Existence of injections and surjections is reflexive and transitive. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsInjReflexive == + ASSUME NEW S + PROVE ExistsInjection(S,S) +BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsInjection, Bijection + + +THEOREM Fun_ExistsSurjReflexive == + ASSUME NEW S + PROVE ExistsSurjection(S,S) +BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsSurjection, Bijection + + +THEOREM Fun_ExistsInjTransitive == + ASSUME NEW S, NEW T, NEW U, + ExistsInjection(S,T), ExistsInjection(T,U) + PROVE ExistsInjection(S,U) +BY Fun_InjTransitive DEF ExistsInjection + + +THEOREM Fun_ExistsSurjTransitive == + ASSUME NEW S, NEW T, NEW U, + ExistsSurjection(S,T), ExistsSurjection(T,U) + PROVE ExistsSurjection(S,U) +BY Fun_SurjTransitive DEF ExistsSurjection + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* The Cantor-Bernstein-Schroeder theorem. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists an injection from S to T, where T is a subset of S, *) +(* then there exists a bijection from S to T. *) +(* *) +(* A lemma for the Cantor-Bernstein-Schroeder theorem. *) +(* *) +(* This proof is formalized from *) +(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Lemma}^' *) +(* retrieved April 29, 2013. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_CantorBernsteinSchroeder_Lemma == + ASSUME NEW S, NEW T, T \subseteq S, ExistsInjection(S,T) + PROVE ExistsBijection(S,T) +PROOF + <1> PICK F \in Injection(S,T) : TRUE BY Fun_ExistsInj + + <1> USE DEF Fun_InjProp_Qed + <1>1. Fun_InjProp_Qed(S,T,F) BY Fun_InjProp + <1> USE DEF Fun_InjProp_Qed + + (*************************************************************************) + (* Pick Y as S excluding T. *) + (*************************************************************************) + <1>2. PICK Y : Y = S \ T OBVIOUS + + (*************************************************************************) + (* Define Ci[0] as Y, and Ci[i+1] as the image of Ci[i] under F. *) + (*************************************************************************) + <1> DEFINE Ci[i \in Nat] == + IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} + <1> HIDE DEF Ci + + <1>3. \A i \in Nat : Ci[i] = + IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} + (***********************************************************************) + (* Use NatInductiveDef to prove that Ci equals its definition. *) + (***********************************************************************) + <2> DEFINE + f0 == Y + Def(v,i) == {F[s] : s \in v} + f == CHOOSE f : f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1],i)] + <2> SUFFICES \A i \in Nat : f[i] = IF i = 0 THEN f0 ELSE Def(f[i-1],i) BY DEF Ci + <2> HIDE DEF f0, Def, f + <2> SUFFICES NatInductiveDefConclusion(f,f0,Def) BY DEF NatInductiveDefConclusion + <2> SUFFICES NatInductiveDefHypothesis(f,f0,Def) BY NatInductiveDef + <2> QED BY DEF NatInductiveDefHypothesis, f + + (*************************************************************************) + (* Applying F to an element of Ci[i] produces an element of Ci[i+1]. *) + (*************************************************************************) + <1>4. ASSUME NEW i \in Nat, NEW s \in Ci[i] + PROVE F[s] \in Ci[i+1] + BY <1>3, SMT + + (*************************************************************************) + (* Each element of Ci[i+1] is the application of F to some element in *) + (* Ci[i]. *) + (*************************************************************************) + <1>5. ASSUME NEW i \in Nat, NEW t \in Ci[i+1] + PROVE \E s \in Ci[i] : F[s] = t + BY <1>3, SMT + + (*************************************************************************) + (* Each Ci[i] \subseteq S. *) + (*************************************************************************) + <1>6. \A i \in Nat : Ci[i] \subseteq S + <2> DEFINE Prop(i) == Ci[i] \subseteq S + <2> SUFFICES \A i \in Nat : Prop(i) OBVIOUS + <2>1. Prop(0) BY <1>2, <1>3 + <2>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) + <3> SUFFICES ASSUME NEW t \in Ci[i+1] PROVE t \in S OBVIOUS + <3>1. PICK s \in Ci[i] : F[s] = t BY <1>5 + <3>2. s \in S BY <2>2 + <3> QED BY <3>1, <3>2, <1>1 + <2> HIDE DEF Prop + <2> QED BY <2>1, <2>2, NatInduction, Isa + + (*************************************************************************) + (* Pick C as the union of all Ci[i]. *) + (*************************************************************************) + <1>7. PICK C : C = UNION {Ci[i] : i \in Nat} OBVIOUS + <1>8. C \subseteq S BY <1>6, <1>7 + + (*************************************************************************) + (* Pick FC as the image of C under F. *) + (*************************************************************************) + <1>9. PICK FC : FC = {F[c] : c \in C} OBVIOUS + <1>10. FC \subseteq T BY <1>1, <1>8, <1>9, Isa + + (*************************************************************************) + (* C = Y \cup FC because Ci[0] = Y and Ci[i+1] = image of Ci[i] under F. *) + (*************************************************************************) + <1>11. C = Y \cup FC + <2>1. ASSUME NEW c \in C PROVE c \in Y \cup FC + <3>1. PICK i \in Nat : c \in Ci[i] BY <1>7 + <3>2. CASE i = 0 BY <3>1, <3>2, <1>3 + <3>3. CASE i # 0 + <4>1. PICK s \in Ci[i-1] : F[s] = c BY <3>1, <3>3, <1>5, SMT + <4>2. s \in C BY <3>3, <1>7, SMT + <4> QED BY <4>1, <4>2, <1>9 + <3> QED BY <3>2, <3>3 + <2>2. ASSUME NEW c \in Y \cup FC PROVE c \in C + <3>1. CASE c \in Y BY <3>1, <1>3, <1>7 + <3>2. CASE c \in FC + <4>1. PICK s \in C : F[s] = c BY <3>2, <1>9 + <4>2. PICK i \in Nat : s \in Ci[i] BY <4>1, <1>7 + <4>3. F[s] \in Ci[i+1] BY <4>2, <1>4 + <4> QED BY <4>1, <4>3, <1>7, SMT + <3> QED BY <3>1, <3>2 + <2> QED BY <2>1, <2>2 + + (*************************************************************************) + (* S \ C is the same as T \ FC. *) + (*************************************************************************) + <1>12. S \ C = T \ FC BY <1>2, <1>11 + + (*************************************************************************) + (* Pick H as F on C and the identity on S \ C. Since F (restricted to *) + (* C) is a bijection from C to FC and S \ C = T \ FC, this makes H a *) + (* bijection from S to T. *) + (*************************************************************************) + <1>13. PICK H : H = [s \in S |-> IF s \in C THEN F[s] ELSE s] OBVIOUS + <1>14. H \in Bijection(S,T) + (***********************************************************************) + (* A useful lemma. If a \in C and b \notin C, then H[a] # H[b]. *) + (***********************************************************************) + <2>1. ASSUME NEW a \in S, NEW b \in S, a \in C, b \notin C PROVE H[a] # H[b] + <3>1. H[a] \in FC BY <2>1, <1>1, <1>9, <1>13 + <3>2. H[b] \in T \ FC BY <2>1, <1>12, <1>13 + <3> QED BY <3>1, <3>2 + + <2>2. H \in [S -> T] + <3> SUFFICES ASSUME NEW s \in S PROVE H[s] \in T BY <1>13 + <3>1. CASE s \in C BY <3>1, <1>1, <1>10, <1>13 + <3>2. CASE s \notin C BY <3>2, <1>12, <1>13 + <3> QED BY <3>1, <3>2 + + <2>3. ASSUME NEW a \in S, NEW b \in S, H[a] = H[b] PROVE a = b + <3> H[a] = H[b] BY <2>3 + <3>1. CASE a \in C /\ b \in C BY <3>1, <1>1, <1>13 + <3>2. CASE a \in C /\ b \notin C BY <3>2, <2>1 (* impossible by lemma *) + <3>3. CASE a \notin C /\ b \in C BY <3>3, <2>1 (* impossible by lemma *) + <3>4. CASE a \notin C /\ b \notin C BY <3>4, <1>13 + <3> QED BY <3>1, <3>2, <3>3, <3>4 + + <2>4. ASSUME NEW t \in T PROVE \E s \in S : H[s] = t + <3>1. CASE t \in FC BY <3>1, <1>8, <1>9, <1>13 + <3>2. CASE t \notin FC BY <3>2, <1>12, <1>13 + <3> QED BY <3>1, <3>2 + + <2> QED BY <2>2, <2>3, <2>4, Fun_IsBij + + <1> QED BY <1>14, Fun_ExistsBij + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If an injection exists from S to T and an injection exists from T to S, *) +(* then there is a bijection from S to T. *) +(* *) +(* This is the Cantor-Bernstein-Schroeder theorem. *) +(* *) +(* This proof is formalized from *) +(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Proof_5}^' *) +(* retrieved April 29, 2013. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_CantorBernsteinSchroeder == + ASSUME NEW S, NEW T, + ExistsInjection(S,T), ExistsInjection(T,S) + PROVE ExistsBijection(S,T) + +<1>1. PICK F : F \in Injection(S,T) BY DEF ExistsInjection +<1>2. PICK G : G \in Injection(T,S) BY DEF ExistsInjection +<1>. DEFINE RngG == {G[t] : t \in T} + GF == [s \in S |-> G[F[s]]] +<1>3. RngG \subseteq S BY <1>2 DEF Injection +<1>4. GF \in Injection(S, RngG) BY <1>1, <1>2 DEF Injection +<1>5. ExistsBijection(S, RngG) BY <1>3, <1>4, Fun_CantorBernsteinSchroeder_Lemma DEF ExistsInjection +<1>6. ExistsBijection(T, RngG) BY <1>2, Fun_InjMeansBijImage DEF ExistsBijection +<1>. QED BY <1>5, <1>6, Fun_ExistsBijSymmetric, Fun_ExistsBijTransitive + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Applications of the Cantor-Bernstein-Schroeder Theorem. *) +(* If there exists an injection f: A->B and a surjection g: A->B, then *) +(* there exists a bijection between A and B. *) +(* Also, if there are surjections between A and B, then there is a *) +(* bijection. *) +(* *) +(* `. .' *) +(***************************************************************************) + +THEOREM Fun_ExistInjAndSurjThenBij == + ASSUME NEW S, NEW T, + ExistsInjection(S,T), ExistsSurjection(S,T) + PROVE ExistsBijection(S,T) +<1>. ExistsInjection(T,S) + BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection +<1>. QED BY Fun_CantorBernsteinSchroeder + + + +THEOREM Fun_ExistSurjAndSurjThenBij == + ASSUME NEW S, NEW T, + ExistsSurjection(S,T), ExistsSurjection(T,S) + PROVE ExistsBijection(S,T) +<1>. ExistsInjection(S,T) + BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection +<1>2. QED BY Fun_ExistInjAndSurjThenBij + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* Equivalences for ExistsBijection. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_ExistsBijEquiv == + ASSUME NEW S, NEW T + PROVE /\ ExistsBijection(S,T) <=> ExistsBijection(T,S) + /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsInjection(T,S) + /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsSurjection(S,T) + /\ ExistsBijection(S,T) <=> ExistsInjection(T,S) /\ ExistsSurjection(T,S) + /\ ExistsBijection(S,T) <=> ExistsSurjection(S,T) /\ ExistsSurjection(T,S) + +<1>1. ExistsBijection(S,T) <=> ExistsBijection(T,S) + BY Fun_ExistsBijSymmetric +<1>2. ExistsInjection(S,T) /\ ExistsInjection(T,S) => ExistsBijection(S,T) + BY Fun_CantorBernsteinSchroeder +<1>3. \A S1, T1 : ExistsBijection(S1,T1) => ExistsSurjection(S1,T1) + BY DEF ExistsBijection, ExistsSurjection, Bijection +<1>4. \A S1,T1 : ExistsSurjection(S1,T1) => ExistsInjection(T1,S1) + BY Fun_ExistsSurjMeansExistsRevInj +<1> QED BY <1>1, <1>2, <1>3, <1>4 + + +----------------------------------------------------------------------------- +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* Facts about jections involving 1..n. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* There is an injection from 1..n to 1..m iff n \leq m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatInjLeq == + ASSUME NEW n \in Nat, NEW m \in Nat + PROVE ExistsInjection(1..n,1..m) <=> n \leq m +PROOF + (*************************************************************************) + (* n \leq m means Injection exists. This part is easy. *) + (*************************************************************************) + <1>1. ASSUME n \leq m PROVE [i \in 1..n |-> i] \in Injection(1..n, 1..m) + BY SMT, <1>1 DEF Injection + + (*************************************************************************) + (* Injection exists means n \leq m. This part is harder. *) + (*************************************************************************) + <1>2. ASSUME ExistsInjection(1..n,1..m) PROVE n \leq m + <2>. DEFINE P(mm) == \A nn \in Nat : nn > mm => Injection(1..nn, 1..mm) = {} + <2>1. SUFFICES \A mm \in Nat : P(mm) BY SMT, <1>2 DEF ExistsInjection + <2>2. P(0) BY Z3 DEF Injection + <2>3. ASSUME NEW mm \in Nat, P(mm) PROVE P(mm+1) + <3>1. SUFFICES ASSUME NEW nn \in Nat, nn > mm+1, + NEW f \in Injection(1..nn, 1..mm+1) + PROVE FALSE + OBVIOUS + <3>2. ASSUME NEW i \in 1..nn, f[i] = mm+1 PROVE FALSE + <4>. DEFINE g == [j \in 1..nn-1 |-> IF j1. nn-1 \in Nat /\ nn-1 > mm BY SMT, <3>1 + <4>2. g \in Injection(1..nn-1, 1..mm) BY SMT, <3>2 DEF Injection + <4>. QED BY <4>1, <4>2, P(mm) DEF Injection + <3>3. ASSUME ~\E i \in 1..nn : f[i] = mm+1 PROVE FALSE + <4>1. f \in Injection(1..nn, 1..mm) BY SMT, <3>3 DEF Injection + <4>. QED BY SMT, <4>1, <3>1, P(mm) + <3>. QED BY <3>2, <3>3 + <2>. QED BY Isa, NatInduction, <2>2, <2>3 + + <1> QED BY <1>1, <1>2 DEF ExistsInjection + + + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If a surjection from 1..n to S exists (for some n \in Nat) then a *) +(* bijection from 1..m to S exists (for some m \in Nat) and m \leq n. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatSurjImpliesNatBij == + ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n,S) + PROVE \E m \in Nat : ExistsBijection(1..m,S) /\ m \leq n + + (*************************************************************************) + (* Pick the smallest m \in Nat for which there is a surjection from *) + (* 1..m to S. *) + (*************************************************************************) +<1>1. PICK m \in Nat : + /\ ExistsSurjection(1..m, S) + /\ \A k \in Nat : k < m => ~ExistsSurjection(1..k, S) + <2>. DEFINE NN == { m \in Nat : ExistsSurjection(1..m, S) } + <2>1. PICK m \in NN : \A k \in NN : <> \notin OpToRel(<, Nat) + BY WFMin, NatLessThanWellFounded + <2>. QED + BY <2>1 DEF OpToRel + +<1>2. m <= n BY SMT, <1>1 + (*************************************************************************) + (* Any surjection from 1..m to S is bijective. *) + (*************************************************************************) +<1>3. PICK f \in Surjection(1..m, S) : TRUE BY <1>1 DEF ExistsSurjection +<1>4. ASSUME f \notin Injection(1..m, S) PROVE FALSE + <2>1. f \in [1..m -> S] BY <1>3 DEF Surjection + <2>2. PICK i,j \in 1..m : i < j /\ f[i] = f[j] + <3>1. PICK ii,jj \in 1..m : ii # jj /\ f[ii] = f[jj] + BY <2>1, <1>4 DEF Injection + <3>2. CASE ii < jj BY <3>1, <3>2 + <3>3. CASE jj < ii BY <3>1, <3>3 + <3>. QED BY SMT, <3>1, <3>2, <3>3 + <2>3. m-1 \in Nat BY SMT, <2>2 + <2>. DEFINE g == [k \in 1..m-1 |-> IF k=j THEN f[m] ELSE f[k]] + <2>4. g \in Surjection(1..m-1, S) + <3>1. g \in [1..m-1 -> S] BY SMT, <2>1 + <3>2. ASSUME NEW s \in S PROVE \E k \in 1..m-1 : g[k] = s + <4>. PICK l \in 1..m : f[l] = s BY <1>3 DEF Surjection + <4>. QED BY SMT, <2>2 + <3>. QED BY <3>1, <3>2 DEF Surjection + <2>. QED BY SMT, <2>3, <2>4, <1>1 DEF ExistsSurjection + +<1>. QED BY <1>2, <1>3, <1>4 DEF ExistsBijection, Bijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* A surjection from some 1..n to S exists iff a bijection from some *) +(* 1..m to S exists. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatSurjEquivNatBij == + ASSUME NEW S + PROVE (\E n \in Nat : ExistsSurjection(1..n,S)) + <=> (\E m \in Nat : ExistsBijection(1..m,S)) +BY Fun_NatSurjImpliesNatBij, Fun_ExistsBijEquiv + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* For any set S, given n, m \in Nat such that bijections exist from 1..n *) +(* to S and from 1..m to S, then it must be the case that n = m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSame == + ASSUME NEW S, + NEW n \in Nat, ExistsBijection(1..n,S), + NEW m \in Nat, ExistsBijection(1..m,S) + PROVE n = m +BY SMT, Fun_NatInjLeq, Fun_ExistsBijEquiv, Fun_ExistsBijTransitive + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* S is empty iff there exists a bijection from 1..0 to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijEmpty == + ASSUME NEW S + PROVE ExistsBijection(1..0,S) <=> S = {} +<1>1. ASSUME ExistsBijection(1..0, S), S # {} PROVE FALSE + <2>1. ExistsInjection(S, 1..0) BY <1>1, Fun_ExistsBijEquiv + <2>2. QED BY SMT, <1>1, <2>1 DEF ExistsInjection, Injection +<1>2. ASSUME S = {} PROVE ExistsBijection(1..0, S) + BY SMT, <1>2, Fun_ExistsBijReflexive +<1>3. QED BY <1>1, <1>2 + + +(***************************************************************************) +(* `. .' *) +(* *) +(* S is a singleton iff there exists a bijection from 1..1 to S. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSingleton == + ASSUME NEW S + PROVE ExistsBijection(1..1,S) <=> \E s : S = {s} +<1>1. ASSUME NEW f \in Bijection(1..1, S) PROVE \E s : S = {s} + BY SMT DEF Bijection, Injection, Surjection +<1>2. ASSUME NEW s, S = {s} PROVE [i \in 1..1 |-> s] \in Bijection(1..1, S) + BY SMT, <1>2 DEF Bijection, Injection, Surjection +<1>. QED BY <1>1, <1>2 DEF ExistsBijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..n to T (for some n \in Nat), where T *) +(* is a subset of S. Furthermore n \leq m. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSubset == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW T \in SUBSET S + PROVE \E n \in Nat : ExistsBijection(1..n,T) /\ n \leq m + +<1>1. CASE T = {} BY Force, <1>1, Fun_NatBijEmpty +<1>2. CASE T # {} + <2>0. ExistsSurjection(1..m, S) BY Fun_ExistsBijEquiv + <2>1. ExistsSurjection(S, T) BY <1>2, Fun_ExistsSurjSubset + <2>2. ExistsSurjection(1..m, T) BY <2>0, <2>1, Fun_ExistsSurjTransitive + <2>. QED BY <2>2, Fun_NatSurjImpliesNatBij +<1> QED BY <1>1, <1>2 + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..(m+1) to S \cup {x}, where x \notin S. *) +(* *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijAddElem == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW x, x \notin S + PROVE ExistsBijection(1..(m+1), S \cup {x}) + +<1>1. PICK F \in Bijection(1..m, S) : TRUE BY DEF ExistsBijection +<1>2. F \in [1..m -> S] BY <1>1 DEF Bijection, Injection +<1>3. \A s \in S : \E i \in 1..m : F[i] = s BY <1>1 DEF Bijection, Surjection +<1>4. \A i,j \in 1..m : F[i] = F[j] => i = j BY <1>1 DEF Bijection, Injection + +<1>. DEFINE G == [i \in 1..m+1 |-> IF i <= m THEN F[i] ELSE x] +<1>10. G \in [1..m+1 -> S \cup {x}] BY SMT, <1>2 +<1>20. ASSUME NEW t \in S \cup {x} PROVE \E i \in 1..m+1 : G[i] = t BY SMT, <1>3 +<1>30. ASSUME NEW i \in 1..m+1, NEW j \in 1..m+1, G[i] = G[j] PROVE i = j + BY SMT, <1>2, <1>4, <1>30 +<1>40. G \in Bijection(1..m+1, S \cup {x}) + BY <1>10, <1>20, <1>30 DEF Bijection, Injection, Surjection +<1>. QED BY <1>40 DEF ExistsBijection + + + + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) +(* there exists a bijection from 1..(m-1) to S \ {x}, where x \in S. *) +(* *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijSubElem == + ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW x, x \in S + PROVE ExistsBijection(1..(m-1), S \ {x}) + +<1>1. PICK n \in Nat : ExistsBijection(1..n, S \ {x}) BY Fun_NatBijSubset +<1>2. ExistsBijection(1..n+1, (S \ {x}) \cup {x}) BY <1>1, Fun_NatBijAddElem +<1>3. ExistsBijection(1..n+1, S) BY <1>2 +<1>4. n = m-1 BY SMT, <1>3, Fun_NatBijSame +<1>. QED BY <1>1, <1>4 + + + +(* doesn't seem to be used anywhere, and is superseded in practice by cardinality theorems + +(***************************************************************************) +(* `. .' *) +(* *) +(* If there exists a bijection from 1..m to S (for some m \in Nat) and *) +(* there exists a bijection from 1..n to T (for some n \in Nat), and S and *) +(* T are disjoint, then there exists a bijection from 1..(m+n) to S \cup *) +(* T. *) +(* *) +(* `. .' *) +(***************************************************************************) +THEOREM Fun_NatBijDisjointUnion == + ASSUME + NEW S, NEW m \in Nat, ExistsBijection(1..m,S), + NEW T, NEW n \in Nat, ExistsBijection(1..n,T), + S \cap T = {} + PROVE + ExistsBijection(1..(m+n),S \cup T) +PROOF + (*************************************************************************) + (* Restate the assumptions and then remove them from automatic use. It *) + (* seems these assumptions cause some of the SMT appeals to fail. *) + (*************************************************************************) + <1>1. ExistsBijection(1..m,S) OBVIOUS + <1>2. ExistsBijection(1..n,T) OBVIOUS + <1>3. S \cap T = {} OBVIOUS + <1> USE ONLY TRUE + + <1> USE DEF ExistsBijection + <1> USE DEF Fun_BijProp_Qed + + (*************************************************************************) + (* Proof by induction on n. *) + (*************************************************************************) + <1> DEFINE + Prop(i) == + \A T1 : + ExistsBijection(1..i,T1) /\ T1 \cap S = {} => + ExistsBijection(1..(m+i),S \cup T1) + + <1>4. \A i \in Nat : Prop(i) + <2>1. Prop(0) + (*********************************************************************) + (* Base case. *) + (*********************************************************************) + <3>1. SUFFICES ASSUME NEW T1, ExistsBijection(1..0,T1), T1 \cap S = {} + PROVE ExistsBijection(1..(m+0),S \cup T1) + OBVIOUS + <3>2. T1 = {} BY <3>1, Fun_NatBijEmpty + <3>3. m+0 = m BY SMT + <3>4. S \cup T1 = S BY <3>2 + <3> QED BY <3>3, <3>4, <1>1 + + <2>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) + (*********************************************************************) + (* Inductive case. *) + (*********************************************************************) + <3>1. PICK j \in Nat : j = i+1 BY SMT + <3>2. SUFFICES ASSUME NEW T1, ExistsBijection(1..j,T1), T1 \cap S = {} + PROVE ExistsBijection(1..(m+j),S \cup T1) + BY <3>1 + + <3>3. j # 0 BY <3>1, SMT + <3>4. ~ExistsBijection(1..0,T1) BY <3>2, <3>3, Fun_NatBijSame + <3>5. T1 # {} BY <3>4, Fun_NatBijEmpty + + (*********************************************************************) + (* Construct T2 by removing element t from T1. *) + (*********************************************************************) + <3>6. PICK t : t \in T1 BY <3>5 + <3>7. t \notin S BY <3>2, <3>6 + <3>8. PICK T2 : T2 = T1 \ {t} OBVIOUS + <3>9. t \notin T2 BY <3>8 + <3>10. T2 \subseteq T1 BY <3>8 + <3>11. T1 = T2 \cup {t} BY <3>6, <3>8 + <3>12. T2 \cap S = {} BY <3>2, <3>8 + + (*********************************************************************) + (* Show that there exists a bijection from 1..i to T2. *) + (*********************************************************************) + <3>13. PICK j2 \in Nat : ExistsBijection(1..j2,T2) BY <3>2, <3>10, Fun_NatBijSubset + <3>14. ExistsBijection(1..(j2+1),T1) BY <3>9, <3>11, <3>13, Fun_NatBijAddElem + <3>15. j2+1 \in Nat BY SMT + <3>16. j = j2 + 1 BY <3>2, <3>14, <3>15, Fun_NatBijSame + <3>17. j2 = i BY <3>1, <3>16, SMT + <3>18. ExistsBijection(1..(m+i),S \cup T2) BY <3>12, <3>13, <3>17, <2>2 + + (*********************************************************************) + (* By the inductive hypothesis, there exists a bijection F from *) + (* 1..(m+i) to S \cup T2. *) + (*********************************************************************) + <3>19. PICK F : F \in Bijection(1..(m+i),S \cup T2) BY <3>18 + <3>20. Fun_BijProp_Qed(1..(m+i),S \cup T2,F) + <4> HIDE DEF Fun_BijProp_Qed + <4> QED BY <3>19, Fun_BijProp + <3>21. F \in [1..(m+i) -> S \cup T2] BY <3>20 + <3>22. \A s \in S \cup T2 : \E k \in 1..(m+i) : F[k] = s BY <3>20 + <3>23. \A a,b \in 1..(m+i) : F[a] = F[b] => a = b BY <3>20 + + (*********************************************************************) + (* Construct G by extending F to cover t. G is a bijection from *) + (* 1..(m+j) to S \cup T1. *) + (*********************************************************************) + <3>24. PICK G : G = [k \in 1..(m+j) |-> IF k \leq (m+i) THEN F[k] ELSE t] OBVIOUS + <3>25. G \in Bijection(1..(m+j),S \cup T1) + <4>1. \A a \in 1..(m+j) : a \leq m+i => a \in 1..(m+i) BY <3>1, SMT + <4>2. \A a,b \in 1..(m+j) : a \leq m+i /\ ~(b \leq m+i) => G[a] # G[b] + BY <4>1, <3>7, <3>9, <3>21, <3>24 + + <4>3. G \in [1..(m+j) -> S \cup T1] + (*****************************************************************) + (* Function. *) + (*****************************************************************) + <5>1. SUFFICES ASSUME NEW k \in 1..(m+j) PROVE G[k] \in S \cup T1 BY <3>24 + <5>2. CASE k \leq (m+i) + <6>1. G[k] = F[k] BY <5>2, <3>24 + <6>2. F[k] \in S \cup T2 BY <5>2, <4>1, <3>21 + <6> QED BY <6>1, <6>2, <3>10 + <5>3. CASE ~(k \leq (m+i)) + <6>1. G[k] = t BY <5>3, <3>24 + <6> QED BY <6>1, <3>6 + <5> QED BY <5>2, <5>3 + <4>4. ASSUME NEW s \in S \cup T1 PROVE \E k \in 1..(m+j) : G[k] = s + (*****************************************************************) + (* Injective. *) + (*****************************************************************) + <5>1. CASE s \in S \cup T2 + <6>1. PICK k \in 1..(m+i) : F[k] = s BY <5>1, <3>22 + <6>2. k \in 1..(m+j) BY <3>1, SMT + <6>3. k \leq m+i BY SMT + <6>4. G[k] = F[k] BY <6>2, <6>3, <3>24 + <6> QED BY <6>1, <6>2, <6>4 + <5>2. CASE s = t + <6>1. m+j \in 1..(m+j) BY <3>3, SMT + <6>2. ~(m+j \leq m+i) BY <3>1, SMT + <6>3. G[m+j] = t BY <6>1, <6>2, <3>24 + <6> QED BY <6>1, <6>3, <5>2 + <5> QED BY <5>1, <5>2, <3>11 + <4>5. ASSUME NEW a \in 1..(m+j), NEW b \in 1..(m+j), G[a] = G[b] PROVE a = b + (*****************************************************************) + (* Surjective. *) + (*****************************************************************) + <5> G[a] = G[b] BY <4>5 + <5>1. CASE (a \leq m+i) /\ (b \leq m+i) BY <5>1, <4>1, <3>23, <3>24 + <5>2. CASE (a \leq m+i) /\ ~(b \leq m+i) BY <5>2, <4>2 (* impossible *) + <5>3. CASE ~(a \leq m+i) /\ (b \leq m+i) BY <5>3, <4>2 (* impossible *) + <5>4. CASE ~(a \leq m+i) /\ ~(b \leq m+i) BY <5>4, <3>1, SMT + <5> QED BY <5>1, <5>2, <5>3, <5>4 + <4> QED BY <4>3, <4>4, <4>5, Fun_IsBij + <3> QED BY <3>1, <3>25 + <2> HIDE DEF Prop + <2> QED BY <2>1, <2>2, NatInduction + + <1> QED BY <1>1, <1>2, <1>3, <1>4 + +*) + + + +============================================================================= +\* Modification History +\* Last modified Tue Jul 09 19:00:04 CEST 2013 by merz +\* Last modified Tue Jun 11 12:30:05 CEST 2013 by bhargav +\* Last modified Fri May 31 15:27:41 CEST 2013 by bhargav +\* Last modified Fri May 03 12:55:32 PDT 2013 by tomr +\* Created Thu Apr 11 10:36:10 PDT 2013 by tomr diff --git a/x/ccv/provider/keeper/prototyping/model/library/Jections.tla b/x/ccv/provider/keeper/prototyping/model/library/Jections.tla new file mode 100644 index 0000000000..cb58eac0b4 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/Jections.tla @@ -0,0 +1,48 @@ +------------------------------ MODULE Jections ------------------------------ +(***************************************************************************) +(* `^{\large\bf \vspace{12pt} *) +(* Definition of injection, surjection, and bijection. *) +(* \vspace{12pt}}^' *) +(***************************************************************************) + + +(***************************************************************************) +(* A map is an injection iff each element in the domain maps to a distinct *) +(* element in the range. *) +(***************************************************************************) +Injection(S,T) == { M \in [S -> T] : \A a,b \in S : M[a] = M[b] => a = b } + + +(***************************************************************************) +(* A map is a surjection iff for each element in the range there is some *) +(* element in the domain that maps to it. *) +(***************************************************************************) +Surjection(S,T) == { M \in [S -> T] : \A t \in T : \E s \in S : M[s] = t } + + +(***************************************************************************) +(* A map is a bijection iff it is both an injection and a surjection. *) +(***************************************************************************) +Bijection(S,T) == Injection(S,T) \cap Surjection(S,T) + + +(***************************************************************************) +(* An injection, surjection, or bijection exists if the corresponding set *) +(* is nonempty. *) +(***************************************************************************) +ExistsInjection(S,T) == Injection(S,T) # {} +ExistsSurjection(S,T) == Surjection(S,T) # {} +ExistsBijection(S,T) == Bijection(S,T) # {} + + +(***************************************************************************) +(* The inverse of a jection. *) +(***************************************************************************) +JectionInverse(S,T,M) == [t \in T |-> CHOOSE s \in S : M[s] = t] + +JectionInverseSets(S, T, M, B) == { s \in S : M[s] \in B } +============================================================================= +\* Modification History +\* Last modified Wed Jun 05 12:14:19 CEST 2013 by bhargav +\* Last modified Fri May 03 12:55:35 PDT 2013 by tomr +\* Created Thu Apr 11 10:30:48 PDT 2013 by tomr diff --git a/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction.tla b/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction.tla new file mode 100755 index 0000000000..219853474c --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction.tla @@ -0,0 +1,210 @@ +------------------------- MODULE NaturalsInduction ------------------------- +(***************************************************************************) +(* This module contains useful theorems for inductive proofs and recursive *) +(* definitions over the naturals. *) +(* *) +(* Some of the statements of the theorems are decomposed in terms of *) +(* definitions. This is done for two reasons: *) +(* *) +(* - It makes it easier for the backends to instantiate the theorems *) +(* when those definitions are not expanded. *) +(* *) +(* - It can be convenient when writing proofs to use those definitions *) +(* rather than having to write out their expansions. *) +(* *) +(* The proofs of these theorems appear in module NaturalsInduction\_proofs.*) +(***************************************************************************) +EXTENDS Integers, TLAPS + +(***************************************************************************) +(* The following is the simple statement of inductions over the naturals. *) +(* For predicates P defined by a moderately complex operator, it is often *) +(* useful to hide the operator definition before using this theorem. That *) +(* is, you first define a suitable operator P (not necessarily by that *) +(* name), prove the two hypotheses of the theorem, and then hide the *) +(* definition of P when using the theorem. *) +(***************************************************************************) +THEOREM NatInduction == + ASSUME NEW P(_), + P(0), + \A n \in Nat : P(n) => P(n+1) + PROVE \A n \in Nat : P(n) + +(***************************************************************************) +(* A useful corollary of NatInduction *) +(***************************************************************************) +THEOREM DownwardNatInduction == + ASSUME NEW P(_), NEW m \in Nat, P(m), + \A n \in 1 .. m : P(n) => P(n-1) + PROVE P(0) + +(***************************************************************************) +(* The following theorem expresses a stronger induction principle, *) +(* also known as course-of-values induction, where the induction *) +(* hypothesis is available for all strictly smaller natural numbers. *) +(***************************************************************************) +THEOREM GeneralNatInduction == + ASSUME NEW P(_), + \A n \in Nat : (\A m \in 0..(n-1) : P(m)) => P(n) + PROVE \A n \in Nat : P(n) + +(***************************************************************************) +(* The following theorem expresses the ``least-number principle'': *) +(* if P(n) is true for some natural number n then there is a *) +(* smallest natural number for which P is true. It could be derived in *) +(* module WellFoundedInduction as a corollary of the fact that the natural *) +(* numbers are well ordered, but we give a direct proof. *) +(***************************************************************************) +THEOREM SmallestNatural == + ASSUME NEW P(_), NEW n \in Nat, P(n) + PROVE \E m \in Nat : /\ P(m) + /\ \A k \in 0 .. m-1 : ~ P(k) + +(***************************************************************************) +(* The following theorem says that a recursively defined function f over *) +(* the natural numbers is well-defined if for every n \in Nat the *) +(* definition of f[n] depends only on arguments smaller than n. *) +(***************************************************************************) +THEOREM RecursiveFcnOfNat == + ASSUME NEW Def(_,_), + ASSUME NEW n \in Nat, NEW g, NEW h, + \A i \in 0..(n-1) : g[i] = h[i] + PROVE Def(g, n) = Def(h, n) + PROVE LET f[n \in Nat] == Def(f, n) + IN f = [n \in Nat |-> Def(f, n)] + + +(***************************************************************************) +(* The following theorem NatInductiveDef is what you use to justify a *) +(* function defined by primitive recursion over the naturals. *) +(***************************************************************************) +NatInductiveDefHypothesis(f, f0, Def(_,_)) == + (f = CHOOSE g : g = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(g[i-1], i)]) +NatInductiveDefConclusion(f, f0, Def(_,_)) == + f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1], i)] + +THEOREM NatInductiveDef == + ASSUME NEW Def(_,_), NEW f, NEW f0, + NatInductiveDefHypothesis(f, f0, Def) + PROVE NatInductiveDefConclusion(f, f0, Def) + + +(***************************************************************************) +(* The following two theorems allow you to prove the type of a recursively *) +(* defined function over the natural numbers. *) +(***************************************************************************) +THEOREM RecursiveFcnOfNatType == + ASSUME NEW f, NEW S, NEW Def(_,_), f = [n \in Nat |-> Def(f,n)], + ASSUME NEW n \in Nat, NEW g, \A i \in 0 .. n-1 : g[i] \in S + PROVE Def(g,n) \in S + PROVE f \in [Nat -> S] + +THEOREM NatInductiveDefType == + ASSUME NEW Def(_,_), NEW S, NEW f, NEW f0 \in S, + NatInductiveDefConclusion(f, f0, Def), + f0 \in S, + \A v \in S, n \in Nat \ {0} : Def(v, n) \in S + PROVE f \in [Nat -> S] + +(***************************************************************************) +(* The following theorems show uniqueness of functions recursively defined *) +(* over Nat. *) +(***************************************************************************) +THEOREM RecursiveFcnOfNatUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, + f = [n \in Nat |-> Def(f,n)], + g = [n \in Nat |-> Def(g,n)], + ASSUME NEW n \in Nat, NEW ff, NEW gg, + \A i \in 0..(n-1) : ff[i] = gg[i] + PROVE Def(ff, n) = Def(gg, n) + PROVE f = g + +THEOREM NatInductiveUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, NEW f0, + NatInductiveDefConclusion(f, f0, Def), + NatInductiveDefConclusion(g, f0, Def) + PROVE f = g + +(***************************************************************************) +(* The following theorems are analogous to the preceding ones but for *) +(* functions defined over intervals of natural numbers. *) +(***************************************************************************) + +FiniteNatInductiveDefHypothesis(f, c, Def(_,_), m, n) == + (f = CHOOSE g : g = [i \in m..n |-> IF i = m THEN c ELSE Def(g[i-1], i)]) +FiniteNatInductiveDefConclusion(f, c, Def(_,_), m, n) == + f = [i \in m..n |-> IF i = m THEN c ELSE Def(f[i-1], i)] + +THEOREM FiniteNatInductiveDef == + ASSUME NEW Def(_,_), NEW f, NEW c, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefHypothesis(f, c, Def, m, n) + PROVE FiniteNatInductiveDefConclusion(f, c, Def, m, n) + +THEOREM FiniteNatInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW c \in S, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefConclusion(f, c, Def, m, n), + \A v \in S, i \in (m+1) .. n : Def(v,i) \in S + PROVE f \in [m..n -> S] + +THEOREM FiniteNatInductiveUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, NEW c, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefConclusion(f, c, Def, m, n), + FiniteNatInductiveDefConclusion(g, c, Def, m, n) + PROVE f = g + +============================================================================= +(***************************************************************************) +(* The following theorems are analogous to the preceding ones but for *) +(* functions defined over intervals of natural numbers. *) +(***************************************************************************) + +FiniteNatInductiveDefHypothesis(f, c, Def(_,_), m, n) == + (f = CHOOSE g : g = [i \in m..n |-> IF i = m THEN c ELSE Def(g[i-1], i)]) +FiniteNatInductiveDefConclusion(f, c, Def(_,_), m, n) == + f = [i \in m..n |-> IF i = m THEN c ELSE Def(f[i-1], i)] + +THEOREM FiniteNatInductiveDef == + ASSUME NEW Def(_,_), NEW f, NEW c, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefHypothesis(f, c, Def, m, n) + PROVE FiniteNatInductiveDefConclusion(f, c, Def, m, n) + +THEOREM FiniteNatInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW c \in S, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefConclusion(f, c, Def, m, n), + \A v \in S, i \in (m+1) .. n : Def(v,i) \in S + PROVE f \in [m..n -> S] + +THEOREM FiniteNatInductiveUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, NEW c, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefConclusion(f, c, Def, m, n), + FiniteNatInductiveDefConclusion(g, c, Def, m, n) + PROVE f = g + +(***************************************************************************) +(* The following example shows how this module is used. *) +(***************************************************************************) + +factorial[n \in Nat] == IF n = 0 THEN 1 ELSE n * factorial[n-1] + +THEOREM FactorialDefConclusion == NatInductiveDefConclusion(factorial, 1, LAMBDA v,n : n*v) +<1>1. NatInductiveDefHypothesis(factorial, 1, LAMBDA v,n : n*v) + BY DEF NatInductiveDefHypothesis, factorial +<1>2. QED + BY <1>1, NatInductiveDef + +THEOREM FactorialDef == \A n \in Nat : factorial[n] = IF n = 0 THEN 1 ELSE n * factorial[n-1] +BY FactorialDefConclusion DEFS NatInductiveDefConclusion + +THEOREM FactorialType == factorial \in [Nat -> Nat] +<1>1. \A v \in Nat, n \in Nat \ {0} : n * v \in Nat + OBVIOUS +<1>2. QED + BY <1>1, 1 \in Nat, NatInductiveDefType, FactorialDefConclusion, Isa + +============================================================================= +\* Modification History +\* Last modified Thu May 08 12:29:46 CEST 2014 by merz +\* Last modified Tue Oct 15 12:06:48 CEST 2013 by shaolin +\* Last modified Sat Nov 26 08:49:59 CET 2011 by merz +\* Last modified Mon Nov 07 08:58:05 PST 2011 by lamport +\* Created Mon Oct 31 02:52:05 PDT 2011 by lamport diff --git a/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction_proofs.tla b/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction_proofs.tla new file mode 100644 index 0000000000..fa2cb70bef --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction_proofs.tla @@ -0,0 +1,454 @@ +---------------------- MODULE NaturalsInduction_proofs ---------------------- +(***************************************************************************) +(* This module contains useful theorems for inductive proofs and recursive *) +(* definitions over the naturals. *) +(* *) +(* Some of the statements of the theorems are decomposed in terms of *) +(* definitions. This is done for two reasons: *) +(* *) +(* - It makes it easier for the backends to instantiate the theorems *) +(* when those definitions are not expanded. *) +(* *) +(* - It can be convenient when writing proofs to use those definitions *) +(* rather than having to write out their expansions. *) +(***************************************************************************) +EXTENDS Integers, TLAPS + +(***************************************************************************) +(* The following is the simple statement of inductions over the naturals. *) +(* For predicates P defined by a moderately complex operator, it is often *) +(* useful to hide the operator definition before using this theorem. That *) +(* is, you first define a suitable operator P (not necessarily by that *) +(* name), prove the two hypotheses of the theorem, and then hide the *) +(* definition of P when using the theorem. *) +(***************************************************************************) +THEOREM NatInduction == + ASSUME NEW P(_), + P(0), + \A n \in Nat : P(n) => P(n+1) + PROVE \A n \in Nat : P(n) +BY IsaM("(intro natInduct, auto)") + +(***************************************************************************) +(* A useful corollary of NatInduction *) +(***************************************************************************) +THEOREM DownwardNatInduction == + ASSUME NEW P(_), NEW m \in Nat, P(m), + \A n \in 1 .. m : P(n) => P(n-1) + PROVE P(0) +<1>. DEFINE Q(i) == i \leq m => P(m-i) +<1>1. Q(0) OBVIOUS +<1>2. ASSUME NEW n \in Nat, Q(n) + PROVE Q(n+1) + BY <1>2 +<1>3. \A n \in Nat : Q(n) BY <1>1, <1>2, NatInduction, Isa +<1>. QED BY <1>3, Isa + +(***************************************************************************) +(* The following theorem expresses a stronger induction principle, *) +(* also known as course-of-values induction, where the induction *) +(* hypothesis is available for all strictly smaller natural numbers. *) +(***************************************************************************) +THEOREM GeneralNatInduction == + ASSUME NEW P(_), + \A n \in Nat : (\A m \in 0..(n-1) : P(m)) => P(n) + PROVE \A n \in Nat : P(n) +<1> DEFINE Q(n) == \A m \in 0..n : P(m) +<1>1. Q(0) BY SMT +<1>2. \A n \in Nat : Q(n) => Q(n+1) BY SMT +<1>3. \A n \in Nat : Q(n) BY <1>1, <1>2, NatInduction, Isa +<1>4. QED BY ONLY <1>3, SMT + +(***************************************************************************) +(* The following theorem expresses the ``least-number principle'': *) +(* if P(n) is true for some natural number n then there is a *) +(* smallest natural number for which P is true. It could be derived in *) +(* module WellFoundedInduction as a corollary of the fact that the natural *) +(* numbers are well ordered, but we give a direct proof. *) +(***************************************************************************) +THEOREM SmallestNatural == + ASSUME NEW P(_), NEW n \in Nat, P(n) + PROVE \E m \in Nat : /\ P(m) + /\ \A k \in 0 .. m-1 : ~ P(k) +<1>. DEFINE Q(k) == ~ P(k) +<1>. SUFFICES ASSUME \A m \in Nat : P(m) => \E k \in 0 .. m-1 : P(k) + PROVE \A m \in Nat : Q(m) + OBVIOUS +<1>1. ASSUME NEW l \in Nat, \A k \in 0 .. l-1 : Q(k) + PROVE Q(l) + BY <1>1 +<1>. HIDE DEF Q +<1>. QED BY ONLY <1>1, GeneralNatInduction, Isa + +(***************************************************************************) +(* The following theorem says that a recursively defined function f over *) +(* the natural numbers is well-defined if for every n \in Nat the *) +(* definition of f[n] depends only on arguments smaller than n. *) +(***************************************************************************) +THEOREM RecursiveFcnOfNat == + ASSUME NEW Def(_,_), + ASSUME NEW n \in Nat, NEW g, NEW h, + \A i \in 0..(n-1) : g[i] = h[i] + PROVE Def(g, n) = Def(h, n) + PROVE LET f[n \in Nat] == Def(f, n) + IN f = [n \in Nat |-> Def(f, n)] +<1>. SUFFICES \E ff : ff = [n \in Nat |-> Def(ff, n)] + OBVIOUS + (*************************************************************************) + (* The strategy of the proof is to define a sequence F of approximations *) + (* such that F[n] is a function with domain 0 .. n-1 that computes *) + (* F[n][i] by applying the definition to the preceding approximation *) + (* function F[n-1]. *) + (*************************************************************************) +<1>. DEFINE F[n \in Nat] == [i \in 0 .. n-1 |-> Def(F[n-1], i)] + f[n \in Nat] == F[n+1][n] + + (*************************************************************************) + (* We first show that F itself is well-defined by diagonalization *) + (* over functions that are defined over finite intervals of integers. *) + (*************************************************************************) +<1>1. F = [n \in Nat |-> [i \in 0 .. n-1 |-> Def(F[n-1], i)]] + <2>. SUFFICES \E FF : FF = [n \in Nat |-> [i \in 0 .. n-1 |-> Def(FF[n-1], i)]] + OBVIOUS + <2>. DEFINE P(g,k) == g = [n \in 0 .. k |-> [i \in 0 .. n-1 |-> Def(g[n-1], i)]] + G(k) == CHOOSE g : P(g,k) + FF == [n \in Nat |-> [i \in 0 .. n-1 |-> G(n)[n][i] ]] + <2>0. ASSUME NEW g, NEW k \in Nat, P(g,k), + NEW n \in 0 .. k, NEW i \in 0 .. n-1 + PROVE g[n][i] = Def(g[n-1], i) + <3>. DEFINE gg == [m \in 0 .. k |-> [j \in 0 .. m-1 |-> Def(g[m-1], j)]] + <3>1. gg[n][i] = Def(g[n-1],i) OBVIOUS + <3>2. g = gg BY <2>0, Zenon + <3>. QED BY <3>1, <3>2, Zenon + <2>1. \A k \in Nat : \E g : P(g,k) + <3>. DEFINE Q(k) == \E g : P(g,k) + <3>. SUFFICES \A k \in Nat : Q(k) OBVIOUS + <3>1. Q(0) + <4>. DEFINE g0 == [n \in {0} |-> [i \in {} |-> {}]] + <4>1. P(g0, 0) OBVIOUS + <4>. QED BY <4>1 + <3>2. ASSUME NEW k \in Nat, Q(k) + PROVE Q(k+1) + <4>1. PICK g : P(g,k) BY <3>2 + <4>1a. ASSUME NEW n \in 0 .. k, NEW i \in 0 .. n-1 + PROVE g[n][i] = Def(g[n-1], i) + BY <4>1, <2>0 + <4>. DEFINE h == [n \in 0 .. k+1 |-> [i \in 0 .. n-1 |-> Def(g[n-1], i) ]] + <4>2. h = [n \in 0 .. k+1 |-> [i \in 0 .. n-1 |-> Def(h[n-1], i)]] + <5>. SUFFICES ASSUME NEW n \in 0 .. k+1, NEW i \in 0 .. n-1 + PROVE h[n][i] = Def(h[n-1], i) + BY Zenon + <5>1. h[n][i] = Def(g[n-1], i) OBVIOUS + <5>2. ASSUME NEW j \in 0 .. i-1 + PROVE g[n-1][j] = h[n-1][j] + BY <4>1a + <5>. HIDE DEF h + <5>3. Def(g[n-1],i) = Def(h[n-1],i) BY <5>2 + <5>. QED BY <5>1, <5>3 + <4>. HIDE DEF h + <4>. QED BY <4>2 + <3>. HIDE DEF Q + <3>. QED BY <3>1, <3>2, NatInduction, Blast + <2>2. \A k \in Nat : P(G(k), k) BY <2>1 + <2>3. \A k \in Nat : \A l \in 0 .. k : \A i \in 0 .. l-1 : \A g,h : + P(g,k) /\ P(h,l) => g[l][i] = h[l][i] + <3>. DEFINE Q(k) == \A l \in 0 .. k : \A i \in 0 .. l-1 : \A g,h : + P(g,k) /\ P(h,l) => g[l][i] = h[l][i] + <3>. SUFFICES \A k \in Nat : Q(k) OBVIOUS + <3>0. Q(0) OBVIOUS + <3>1. ASSUME NEW k \in Nat, Q(k) + PROVE Q(k+1) + <4>. HIDE DEF P + <4>. SUFFICES ASSUME NEW l \in 0 .. k+1, NEW i \in 0 .. l-1, NEW g, NEW h, + P(g,k+1), P(h,l) + PROVE g[l][i] = h[l][i] + OBVIOUS + <4>1. /\ g[l][i] = Def(g[l-1],i) + /\ h[l][i] = Def(h[l-1],i) + BY <2>0 + <4>. DEFINE gg == [nn \in 0 .. k |-> [ii \in 0 .. nn-1 |-> Def(g[nn-1],ii)]] + hh == [nn \in 0 .. l-1 |-> [ii \in 0 .. nn-1 |-> Def(h[nn-1],ii)]] + <4>2. P(gg,k) + <5>1. ASSUME NEW nn \in 0 .. k, NEW j \in 0 .. nn-1 + PROVE gg[nn-1] = g[nn-1] + <6>. /\ nn-1 \in 0 .. k + /\ nn-1 \in 0 .. k+1 + OBVIOUS + <6>1. gg[nn-1] = [ii \in 0 .. nn-2 |-> Def(g[nn-2],ii)] OBVIOUS + <6>2. g[nn-1] = [ii \in 0 .. (nn-1)-1 |-> Def(g[(nn-1)-1],ii)] BY DEF P + <6>. QED BY <6>1, <6>2 + <5>. QED BY <5>1 DEF P + <4>3. P(hh,l-1) + <5>1. ASSUME NEW nn \in 0 .. l-1, NEW j \in 0 .. nn-1 + PROVE hh[nn-1] = h[nn-1] + <6>. /\ nn-1 \in 0 .. l-1 + /\ nn-1 \in 0 .. l + OBVIOUS + <6>1. hh[nn-1] = [ii \in 0 .. nn-2 |-> Def(h[nn-2],ii)] OBVIOUS + <6>2. h[nn-1] = [ii \in 0 .. (nn-1)-1 |-> Def(h[(nn-1)-1],ii)] BY DEF P + <6>. QED BY <6>1, <6>2 + <5>. QED BY <5>1 DEF P + <4>4. \A m \in 0 .. i-1 : gg[l-1][m] = hh[l-1][m] BY <3>1, <4>2, <4>3 + <4>5. \A m \in 0 .. i-1 : g[l-1][m] = gg[l-1][m] BY <2>0 + <4>6. \A m \in 0 .. i-1 : h[l-1][m] = hh[l-1][m] BY <2>0 + <4>7. \A m \in 0 .. i-1 : g[l-1][m] = h[l-1][m] BY <4>4, <4>5, <4>6 + <4>8. Def(g[l-1],i) = Def(h[l-1],i) BY <4>7 + <4>. QED BY <4>8, <2>0 + <3>. HIDE DEF Q + <3>. QED BY <3>0, <3>1, NatInduction, Blast + <2>4. FF = [n \in Nat |-> [i \in 0 .. n-1 |-> Def(FF[n-1], i)]] + <3>. HIDE DEF G + <3>. SUFFICES ASSUME NEW k \in Nat, NEW i \in 0 .. k-1 + PROVE FF[k][i] = Def(FF[k-1], i) + OBVIOUS + <3>1. FF[k][i] = G(k)[k][i] OBVIOUS + <3>2. G(k)[k][i] = Def(G(k)[k-1], i) BY <2>2 + <3>. HIDE DEF P + <3>3. \A j \in 0 .. i-1 : G(k)[k-1][j] = FF[k-1][j] BY <2>2, <2>3 + <3>. HIDE DEF FF + <3>4. Def(G(k)[k-1], i) = Def(FF[k-1], i) BY <3>3 + <3>. QED BY <3>1, <3>2, <3>4 + <2>. QED BY <2>4 + +<1>. HIDE DEF F \* from now on, use step <1>1 rather than the definition + + (*************************************************************************) + (* The following step is a trivial consequence of <1>1 but the backend *) + (* provers are currently unable to prove it directly. *) + (*************************************************************************) +<1>2. ASSUME NEW n \in Nat, NEW i \in 0 .. n-1 + PROVE F[n][i] = Def(F[n-1], i) + <2>. DEFINE G == [m \in Nat |-> [j \in 0 .. m-1 |-> Def(F[m-1],j)]] + <2>1. G[n][i] = Def(F[n-1],i) OBVIOUS + <2>2. F = G BY <1>1, Zenon + <2>. QED BY <2>1, <2>2, Zenon + + (*************************************************************************) + (* Any two approximations F[n] and F[m] agree for arguments where they *) + (* are both defined. *) + (*************************************************************************) +<1>. DEFINE P(n) == \A m \in 0 .. n : \A i \in 0 .. m-1 : F[n][i] = F[m][i] +<1>3. \A n \in Nat : P(n) + <2>1. ASSUME NEW n \in Nat, \A k \in 0 .. n-1 : P(k) + PROVE P(n) + <3>. SUFFICES ASSUME NEW m \in 0 .. n, NEW i \in 0 .. m-1 + PROVE F[n][i] = F[m][i] + OBVIOUS + <3>2. CASE m = n BY <3>2 + <3>3. CASE n = 0 BY <3>3, SMT + <3>4. CASE 0 < n /\ m \in 0 .. n-1 + <4>1. F[n][i] = Def(F[n-1],i) BY <1>2 + <4>2. \A j \in 0 .. i-1 : F[n-1][j] = F[m-1][j] BY <2>1, <3>4 + <4>3. Def(F[n-1],i) = Def(F[m-1],i) BY <4>2 + <4>4. Def(F[m-1],i) = F[m][i] BY <1>2 + <4>. QED BY <4>1, <4>3, <4>4 + <3>. QED BY <3>2, <3>3, <3>4, SMT + <2>. HIDE DEF P + <2>. QED BY <2>1, GeneralNatInduction, Blast + + (*************************************************************************) + (* The assertion follows immediately from the two preceding steps. *) + (*************************************************************************) +<1>4. f = [n \in Nat |-> Def(f,n)] + <2>. SUFFICES ASSUME NEW n \in Nat + PROVE f[n] = Def(f,n) + OBVIOUS + <2>1. f[n] = Def(F[n], n) BY <1>2 + <2>2. \A i \in 0 .. n-1 : F[n][i] = f[i] BY <1>3 + <2>3. Def(F[n],n) = Def(f,n) BY <2>2 + <2>. QED BY <2>1, <2>3 + +<1>. QED BY <1>4 + + +(***************************************************************************) +(* The following theorem NatInductiveDef is what you use to justify a *) +(* function defined by primitive recursion over the naturals. *) +(***************************************************************************) +NatInductiveDefHypothesis(f, f0, Def(_,_)) == + (f = CHOOSE g : g = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(g[i-1], i)]) +NatInductiveDefConclusion(f, f0, Def(_,_)) == + f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1], i)] + +THEOREM NatInductiveDef == + ASSUME NEW Def(_,_), NEW f, NEW f0, + NatInductiveDefHypothesis(f, f0, Def) + PROVE NatInductiveDefConclusion(f, f0, Def) +<1>. DEFINE PRDef(g,n) == IF n = 0 THEN f0 ELSE Def(g[n-1], n) + ff[n \in Nat] == PRDef(ff,n) +<1>1. ASSUME NEW n \in Nat, NEW g, NEW h, + \A i \in 0 .. n-1 : g[i] = h[i] + PROVE PRDef(g,n) = PRDef(h,n) + BY <1>1, Z3 +<1>. HIDE DEF PRDef +<1>2. ff = [n \in Nat |-> PRDef(ff,n)] BY <1>1, RecursiveFcnOfNat, Isa +<1>. USE DEF PRDef +<1>3. ff = f BY DEF NatInductiveDefHypothesis +<1>. HIDE DEF ff +<1>. QED BY <1>2, <1>3 DEF NatInductiveDefConclusion + +(***************************************************************************) +(* The following two theorems allow you to prove the type of a recursively *) +(* defined function over the natural numbers. *) +(***************************************************************************) +THEOREM RecursiveFcnOfNatType == + ASSUME NEW f, NEW S, NEW Def(_,_), f = [n \in Nat |-> Def(f,n)], + ASSUME NEW n \in Nat, NEW g, \A i \in 0 .. n-1 : g[i] \in S + PROVE Def(g,n) \in S + PROVE f \in [Nat -> S] +<1>1. SUFFICES \A n \in Nat : f[n] \in S + OBVIOUS +<1>2. ASSUME NEW n \in Nat, \A i \in 0 .. n-1 : f[i] \in S + PROVE f[n] \in S + BY <1>2, Zenon +<1>. QED BY <1>2, GeneralNatInduction, Isa + +THEOREM NatInductiveDefType == + ASSUME NEW Def(_,_), NEW S, NEW f, NEW f0 \in S, + NatInductiveDefConclusion(f, f0, Def), + f0 \in S, + \A v \in S, n \in Nat \ {0} : Def(v, n) \in S + PROVE f \in [Nat -> S] +<1>. USE DEF NatInductiveDefConclusion +<1> SUFFICES \A n \in Nat : f[n] \in S + OBVIOUS +<1>1. f[0] \in S OBVIOUS +<1>2. ASSUME NEW n \in Nat, f[n] \in S + PROVE f[n+1] \in S + <2>1. /\ n+1 \in Nat \ {0} + /\ (n+1)-1 = n + OBVIOUS + <2>. QED BY <2>1, <1>2 +<1>. QED BY <1>1, <1>2, NatInduction, Isa + +(***************************************************************************) +(* The following theorems show uniqueness of functions recursively defined *) +(* over Nat. *) +(***************************************************************************) +THEOREM RecursiveFcnOfNatUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, + f = [n \in Nat |-> Def(f,n)], + g = [n \in Nat |-> Def(g,n)], + ASSUME NEW n \in Nat, NEW ff, NEW gg, + \A i \in 0..(n-1) : ff[i] = gg[i] + PROVE Def(ff, n) = Def(gg, n) + PROVE f = g +<1>1. SUFFICES \A n \in Nat : f[n] = g[n] + OBVIOUS +<1>2. ASSUME NEW n \in Nat, \A i \in 0 .. n-1 : f[i] = g[i] + PROVE f[n] = g[n] + <2>1. Def(f,n) = Def(g,n) BY <1>2 + <2>. QED BY <2>1, Zenon +<1>. QED + BY <1>2, GeneralNatInduction, Isa + +THEOREM NatInductiveUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, NEW f0, + NatInductiveDefConclusion(f, f0, Def), + NatInductiveDefConclusion(g, f0, Def) + PROVE f = g +<1>. USE DEF NatInductiveDefConclusion +<1>1. SUFFICES \A n \in Nat : f[n] = g[n] + OBVIOUS +<1>2. f[0] = g[0] OBVIOUS +<1>3. ASSUME NEW n \in Nat, f[n] = g[n] + PROVE f[n+1] = g[n+1] + BY <1>3 +<1>. QED + BY <1>2, <1>3, NatInduction, Isa + +(***************************************************************************) +(* The following theorems are analogous to the preceding ones but for *) +(* functions defined over intervals of natural numbers. *) +(***************************************************************************) + +FiniteNatInductiveDefHypothesis(f, c, Def(_,_), m, n) == + (f = CHOOSE g : g = [i \in m..n |-> IF i = m THEN c ELSE Def(g[i-1], i)]) +FiniteNatInductiveDefConclusion(f, c, Def(_,_), m, n) == + f = [i \in m..n |-> IF i = m THEN c ELSE Def(f[i-1], i)] + +THEOREM FiniteNatInductiveDef == + ASSUME NEW Def(_,_), NEW f, NEW c, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefHypothesis(f, c, Def, m, n) + PROVE FiniteNatInductiveDefConclusion(f, c, Def, m, n) +<1>. DEFINE PRDef(g,i) == IF i <= m THEN c ELSE Def(g[i-1], i) + ff[i \in Nat] == PRDef(ff,i) + gg == [i \in m..n |-> ff[i]] +<1>1. ASSUME NEW i \in Nat, NEW g, NEW h, + \A j \in 0 .. i-1 : g[j] = h[j] + PROVE PRDef(g,i) = PRDef(h,i) + BY <1>1, Z3 +<1>. HIDE DEF PRDef +<1>2. ff = [i \in Nat |-> PRDef(ff,i)] + BY <1>1, RecursiveFcnOfNat, Isa +<1>. HIDE DEF ff +<1>. USE DEF PRDef +<1>3. gg = [i \in m..n |-> IF i=m THEN c ELSE Def(gg[i-1],i)] + BY <1>2, Z3 +<1>. HIDE DEF gg +<1>. QED + BY <1>3 DEF FiniteNatInductiveDefHypothesis, FiniteNatInductiveDefConclusion + +THEOREM FiniteNatInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW c \in S, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefConclusion(f, c, Def, m, n), + \A v \in S, i \in (m+1) .. n : Def(v,i) \in S + PROVE f \in [m..n -> S] +<1>. USE DEF FiniteNatInductiveDefConclusion +<1>. DEFINE P(i) == i \in m..n => f[i] \in S +<1>1. SUFFICES \A i \in Nat : P(i) + OBVIOUS +<1>2. P(0) + OBVIOUS +<1>3. ASSUME NEW i \in Nat, P(i) + PROVE P(i+1) + BY <1>3 +<1>. QED + BY <1>2, <1>3, NatInduction, Isa + +THEOREM FiniteNatInductiveUnique == + ASSUME NEW Def(_,_), NEW f, NEW g, NEW c, NEW m \in Nat, NEW n \in Nat, + FiniteNatInductiveDefConclusion(f, c, Def, m, n), + FiniteNatInductiveDefConclusion(g, c, Def, m, n) + PROVE f = g +<1>. USE DEF FiniteNatInductiveDefConclusion +<1>. DEFINE P(i) == i \in m..n => f[i] = g[i] +<1>1. SUFFICES \A i \in Nat : P(i) + BY m..n \subseteq Nat +<1>2. P(0) + OBVIOUS +<1>3. ASSUME NEW i \in Nat, P(i) + PROVE P(i+1) + BY <1>3 +<1>. QED + BY <1>2, <1>3, NatInduction, Isa + +============================================================================= + +(***************************************************************************) +(* The following example shows how this module is used. *) +(***************************************************************************) + +factorial[n \in Nat] == IF n = 0 THEN 1 ELSE n * factorial[n-1] + +THEOREM FactorialDefConclusion == NatInductiveDefConclusion(factorial, 1, LAMBDA v,n : n*v) +<1>1. NatInductiveDefHypothesis(factorial, 1, LAMBDA v,n : n*v) + BY DEF NatInductiveDefHypothesis, factorial +<1>2. QED + BY <1>1, NatInductiveDef + +THEOREM FactorialDef == \A n \in Nat : factorial[n] = IF n = 0 THEN 1 ELSE n * factorial[n-1] +BY FactorialDefConclusion DEFS NatInductiveDefConclusion + +THEOREM FactorialType == factorial \in [Nat -> Nat] +<1>1. \A v \in Nat, n \in Nat \ {0} : n * v \in Nat + BY SMT +<1>2. QED + BY <1>1, 1 \in Nat, NatInductiveDefType, FactorialDefConclusion, Auto + +\* Modification History +\* Last modified Mon Oct 20 09:16:03 CEST 2014 by merz +\* Last modified Tue Oct 15 12:06:48 CEST 2013 by shaolin +\* Last modified Sat Nov 26 08:49:59 CET 2011 by merz +\* Last modified Mon Nov 07 08:58:05 PST 2011 by lamport +\* Created Mon Oct 31 02:52:05 PDT 2011 by lamport diff --git a/x/ccv/provider/keeper/prototyping/model/library/RealTime.tla b/x/ccv/provider/keeper/prototyping/model/library/RealTime.tla new file mode 100644 index 0000000000..1026c66a4b --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/RealTime.tla @@ -0,0 +1,22 @@ +----------------------------- MODULE RealTime ------------------------------- +EXTENDS Reals +VARIABLE now + +RTBound(A, v, D, E) == + LET TNext(t) == t' = IF <>_v \/ ~(ENABLED <>_v)' + THEN 0 + ELSE t + (now'-now) + + Timer(t) == (t=0) /\ [][TNext(t)]_<> + + MaxTime(t) == [](t \leq E) + + MinTime(t) == [][A => t \geq D]_v + IN \EE t : Timer(t) /\ MaxTime(t) /\ MinTime(t) +----------------------------------------------------------------------------- +RTnow(v) == LET NowNext == /\ now' \in {r \in Real : r > now} + /\ UNCHANGED v + IN /\ now \in Real + /\ [][NowNext]_now + /\ \A r \in Real : WF_now(NowNext /\ (now'>r)) +============================================================================= diff --git a/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems.tla b/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems.tla new file mode 100644 index 0000000000..790578210f --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems.tla @@ -0,0 +1,636 @@ +----------------------- MODULE SequenceTheorems ----------------------------- +(***************************************************************************) +(* This module contains a library of theorems about sequences and the *) +(* corresponding operations. *) +(***************************************************************************) +EXTENDS Sequences, Integers, WellFoundedInduction, Functions, TLAPS + + +(***************************************************************************) +(* Elementary properties about Seq(S) *) +(***************************************************************************) + +LEMMA SeqDef == \A S : Seq(S) = UNION {[1..n -> S] : n \in Nat} + +THEOREM ElementOfSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW n \in 1..Len(seq) + PROVE seq[n] \in S + +THEOREM EmptySeq == + ASSUME NEW S + PROVE /\ << >> \in Seq(S) + /\ \A seq \in Seq(S) : (seq = << >>) <=> (Len(seq) = 0) + +THEOREM LenProperties == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ Len(seq) \in Nat + /\ seq \in [1..Len(seq) -> S] + /\ DOMAIN seq = 1 .. Len(seq) + +THEOREM ExceptSeq == + ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq), NEW e \in S + PROVE /\ [seq EXCEPT ![i] = e] \in Seq(S) + /\ Len([seq EXCEPT ![i] = e]) = Len(seq) + /\ \A j \in 1 .. Len(seq) : [seq EXCEPT ![i] = e][j] = IF j=i THEN e ELSE seq[j] + +THEOREM IsASeq == + ASSUME NEW n \in Nat, NEW e(_), NEW S, + \A i \in 1..n : e(i) \in S + PROVE [i \in 1..n |-> e(i)] \in Seq(S) + +THEOREM SeqEqual == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), + Len(s) = Len(t), \A i \in 1 .. Len(s) : s[i] = t[i] + PROVE s = t + +(*************************************************************************** + Concatenation (\o) And Properties +***************************************************************************) + +THEOREM ConcatProperties == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) + PROVE /\ s1 \o s2 \in Seq(S) + /\ Len(s1 \o s2) = Len(s1) + Len(s2) + /\ \A i \in 1 .. Len(s1) + Len(s2) : (s1 \o s2)[i] = + IF i <= Len(s1) THEN s1[i] ELSE s2[i - Len(s1)] + +THEOREM ConcatEmptySeq == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ seq \o << >> = seq + /\ << >> \o seq = seq + +THEOREM ConcatAssociative == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S), NEW s3 \in Seq(S) + PROVE (s1 \o s2) \o s3 = s1 \o (s2 \o s3) + +THEOREM ConcatSimplifications == + ASSUME NEW S + PROVE /\ \A s,t \in Seq(S) : s \o t = s <=> t = <<>> + /\ \A s,t \in Seq(S) : s \o t = t <=> s = <<>> + /\ \A s,t \in Seq(S) : s \o t = <<>> <=> s = <<>> /\ t = <<>> + /\ \A s,t,u \in Seq(S) : s \o t = s \o u <=> t = u + /\ \A s,t,u \in Seq(S) : s \o u = t \o u <=> s = t + +(***************************************************************************) +(* SubSeq, Head and Tail *) +(***************************************************************************) + +THEOREM SubSeqProperties == + ASSUME NEW S, + NEW s \in Seq(S), + NEW m \in 1 .. Len(s)+1, + NEW n \in m-1 .. Len(s) + PROVE /\ SubSeq(s,m,n) \in Seq(S) + /\ Len(SubSeq(s, m, n)) = n-m+1 + /\ \A i \in 1 .. n-m+1 : SubSeq(s,m,n)[i] = s[m+i-1] + +THEOREM SubSeqEmpty == + ASSUME NEW s, NEW m \in Int, NEW n \in Int, n < m + PROVE SubSeq(s,m,n) = << >> + +THEOREM HeadTailProperties == + ASSUME NEW S, + NEW seq \in Seq(S), seq # << >> + PROVE /\ Head(seq) \in S + /\ Tail(seq) \in Seq(S) + /\ Len(Tail(seq)) = Len(seq)-1 + /\ \A i \in 1 .. Len(Tail(seq)) : Tail(seq)[i] = seq[i+1] + +THEOREM TailIsSubSeq == + ASSUME NEW S, + NEW seq \in Seq(S), seq # << >> + PROVE Tail(seq) = SubSeq(seq, 2, Len(seq)) + +THEOREM SubSeqRestrict == + ASSUME NEW S, NEW seq \in Seq(S), NEW n \in 0 .. Len(seq) + PROVE SubSeq(seq, 1, n) = Restrict(seq, 1 .. n) + +THEOREM HeadTailOfSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE /\ Head(SubSeq(seq,m,n)) = seq[m] + /\ Tail(SubSeq(seq,m,n)) = SubSeq(seq, m+1, n) + +THEOREM SubSeqRecursiveFirst == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE SubSeq(seq, m, n) = << seq[m] >> \o SubSeq(seq, m+1, n) + +THEOREM SubSeqRecursiveSecond == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE SubSeq(seq, m, n) = SubSeq(seq, m, n-1) \o << seq[n] >> + +THEOREM SubSeqFull == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE SubSeq(seq, 1, Len(seq)) = seq + +(*****************************************************************************) +(* Adjacent subsequences can be concatenated to obtain a longer subsequence. *) +(*****************************************************************************) +THEOREM ConcatAdjacentSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq)+1, + NEW k \in m-1 .. Len(seq), + NEW n \in k .. Len(seq) + PROVE SubSeq(seq, m, k) \o SubSeq(seq, k+1, n) = SubSeq(seq, m, n) + +(***************************************************************************) +(* Append, InsertAt, Cons & RemoveAt *) +(* Append(seq, elt) appends element elt at the end of sequence seq *) +(* Cons(elt, seq) prepends element elt at the beginning of sequence seq *) +(* InsertAt(seq, i, elt) inserts element elt in the position i and pushes *) +(* the *) +(* original element at i to i+1 and so on *) +(* RemoveAt(seq, i) removes the element at position i *) +(***************************************************************************) + +THEOREM AppendProperties == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S + PROVE /\ Append(seq, elt) \in Seq(S) + /\ Append(seq, elt) # << >> + /\ Len(Append(seq, elt)) = Len(seq)+1 + /\ \A i \in 1.. Len(seq) : Append(seq, elt)[i] = seq[i] + /\ Append(seq, elt)[Len(seq)+1] = elt + +THEOREM AppendIsConcat == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S + PROVE Append(seq, elt) = seq \o <> + +THEOREM HeadTailAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt + PROVE /\ Head(Append(seq, elt)) = IF seq = <<>> THEN elt ELSE Head(seq) + /\ Tail(Append(seq, elt)) = IF seq = <<>> THEN <<>> ELSE Append(Tail(seq), elt) + +Cons(elt, seq) == <> \o seq + +THEOREM ConsProperties == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S + PROVE /\ Cons(elt, seq) \in Seq(S) + /\ Cons(elt, seq) # <<>> + /\ Len(Cons(elt, seq)) = Len(seq)+1 + /\ Head(Cons(elt, seq)) = elt + /\ Tail(Cons(elt, seq)) = seq + /\ Cons(elt, seq)[1] = elt + /\ \A i \in 1 .. Len(seq) : Cons(elt, seq)[i+1] = seq[i] + +THEOREM ConsEmpty == + \A x : Cons(x, << >>) = << x >> + +THEOREM ConsHeadTail == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE Cons(Head(seq), Tail(seq)) = seq + +THEOREM ConsAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW x \in S, NEW y \in S + PROVE Cons(x, Append(seq, y)) = Append(Cons(x,seq), y) + +THEOREM ConsInjective == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) + PROVE Cons(e,s) = Cons(f,t) <=> e = f /\ s = t + +InsertAt(seq,i,elt) == SubSeq(seq, 1, i-1) \o <> \o SubSeq(seq, i, Len(seq)) + +THEOREM InsertAtProperties == + ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq)+1, NEW elt \in S + PROVE /\ InsertAt(seq,i,elt) \in Seq(S) + /\ Len(InsertAt(seq,i,elt)) = Len(seq)+1 + /\ \A j \in 1 .. Len(seq)+1 : InsertAt(seq,i,elt)[j] = + IF j> THEN 0 ELSE Len(seq)-1 + /\ \A i \in 1 .. Len(seq)-1 : Front(seq)[i] = seq[i] + +THEOREM FrontOfEmpty == Front(<< >>) = << >> + +THEOREM LastProperties == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE /\ Last(seq) \in S + /\ Append(Front(seq), Last(seq)) = seq + +THEOREM FrontLastOfSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE /\ Front(SubSeq(seq,m,n)) = SubSeq(seq, m, n-1) + /\ Last(SubSeq(seq,m,n)) = seq[n] + +THEOREM FrontLastAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S + PROVE /\ Front(Append(seq, e)) = seq + /\ Last(Append(seq, e)) = e + +THEOREM AppendInjective == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) + PROVE Append(s,e) = Append(t,f) <=> s = t /\ e = f + +(***************************************************************************) +(* As a corollary of the previous theorems it follows that a sequence is *) +(* either empty or can be obtained by appending an element to a sequence. *) +(***************************************************************************) +THEOREM SequenceEmptyOrAppend == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE \E s \in Seq(S), elt \in S : seq = Append(s, elt) + +(***************************************************************************) +(* REVERSE SEQUENCE And Properties *) +(* Reverse(seq) --> Reverses the sequence seq *) +(***************************************************************************) + +Reverse(seq) == [j \in 1 .. Len(seq) |-> seq[Len(seq)-j+1] ] + +THEOREM ReverseProperties == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ Reverse(seq) \in Seq(S) + /\ Len(Reverse(seq)) = Len(seq) + /\ Reverse(Reverse(seq)) = seq + +THEOREM ReverseEmpty == Reverse(<< >>) = << >> + +THEOREM ReverseEqual == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), Reverse(s) = Reverse(t) + PROVE s = t + +THEOREM ReverseEmptyIffEmpty == + ASSUME NEW S, NEW seq \in Seq(S), Reverse(seq) = <<>> + PROVE seq = <<>> + +THEOREM ReverseConcat == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) + PROVE Reverse(s1 \o s2) = Reverse(s2) \o Reverse(s1) + +THEOREM ReverseAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S + PROVE Reverse(Append(seq,e)) = Cons(e, Reverse(seq)) + +THEOREM ReverseCons == + ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S + PROVE Reverse(Cons(e,seq)) = Append(Reverse(seq), e) + +THEOREM ReverseSingleton == \A x : Reverse(<< x >>) = << x >> + +THEOREM ReverseSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1..Len(seq), NEW n \in 1..Len(seq) + PROVE Reverse(SubSeq(seq, m , n)) = SubSeq(Reverse(seq), Len(seq)-n+1, Len(seq)-m+1) + +THEOREM ReversePalindrome == + ASSUME NEW S, NEW seq \in Seq(S), + Reverse(seq) = seq + PROVE Reverse(seq \o seq) = seq \o seq + +THEOREM LastEqualsHeadReverse == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE Last(seq) = Head(Reverse(seq)) + +THEOREM ReverseFrontEqualsTailReverse == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE Reverse(Front(seq)) = Tail(Reverse(seq)) + +(***************************************************************************) +(* Induction principles for sequences *) +(***************************************************************************) + +THEOREM SequencesInductionAppend == + ASSUME NEW P(_), NEW S, + P(<< >>), + \A s \in Seq(S), e \in S : P(s) => P(Append(s,e)) + PROVE \A seq \in Seq(S) : P(seq) + +THEOREM SequencesInductionCons == + ASSUME NEW P(_), NEW S, + P(<< >>), + \A s \in Seq(S), e \in S : P(s) => P(Cons(e,s)) + PROVE \A seq \in Seq(S) : P(seq) + +(***************************************************************************) +(* RANGE OF SEQUENCE *) +(***************************************************************************) + +THEOREM RangeOfSeq == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE Range(seq) \in SUBSET S + +THEOREM RangeEquality == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE Range(seq) = { seq[i] : i \in 1 .. Len(seq) } + +(* The range of the reverse sequence equals that of the original one. *) +THEOREM RangeReverse == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE Range(Reverse(seq)) = Range(seq) + +(* Range of concatenation of sequences is the union of the ranges *) +THEOREM RangeConcatenation == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) + PROVE Range(s1 \o s2) = Range(s1) \cup Range(s2) + +(***************************************************************************) +(* Prefixes and suffixes of sequences. *) +(***************************************************************************) + +IsPrefix(s,t) == \E u \in Seq(Range(t)) : t = s \o u +IsStrictPrefix(s,t) == IsPrefix(s,t) /\ s # t + +IsSuffix(s,t) == \E u \in Seq(Range(t)) : t = u \o s +IsStrictSuffix(s,t) == IsSuffix(s,t) /\ s # t + +(***************************************************************************) +(* The following theorem gives three alternative characterizations of *) +(* prefixes. It also implies that any prefix of a sequence t is at most *) +(* as long as t. *) +(***************************************************************************) +THEOREM IsPrefixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsPrefix(s,t) <=> \E u \in Seq(S) : t = s \o u + /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) + /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = Restrict(t, DOMAIN s) + +THEOREM IsStrictPrefixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsStrictPrefix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = s \o u + /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, 1, Len(s)) + /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = Restrict(t, DOMAIN s) + /\ IsStrictPrefix(s,t) <=> IsPrefix(s,t) /\ Len(s) < Len(t) + +THEOREM IsPrefixElts == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), + IsPrefix(s,t) + PROVE s[i] = t[i] + +THEOREM EmptyIsPrefix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsPrefix(<<>>, s) + /\ IsPrefix(s, <<>>) <=> s = <<>> + /\ IsStrictPrefix(<<>>, s) <=> s # <<>> + /\ ~ IsStrictPrefix(s, <<>>) + +THEOREM IsPrefixConcat == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsPrefix(s, s \o t) + +THEOREM IsPrefixAppend == + ASSUME NEW S, NEW s \in Seq(S), NEW e \in S + PROVE IsPrefix(s, Append(s,e)) + +THEOREM FrontIsPrefix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsPrefix(Front(s), s) + /\ s # <<>> => IsStrictPrefix(Front(s), s) + +(***************************************************************************) +(* (Strict) prefixes on sequences form a (strict) partial order, and *) +(* the strict ordering is well-founded. *) +(***************************************************************************) +THEOREM IsPrefixPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : IsPrefix(s,s) + /\ \A s,t \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,s) => s = t + /\ \A s,t,u \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,u) => IsPrefix(s,u) + +THEOREM ConcatIsPrefix == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + IsPrefix(s \o t, u) + PROVE IsPrefix(s, u) + +THEOREM ConcatIsPrefixCancel == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) + PROVE IsPrefix(s \o t, s \o u) <=> IsPrefix(t, u) + +THEOREM ConsIsPrefixCancel == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsPrefix(Cons(e,s), Cons(e,t)) <=> IsPrefix(s,t) + +THEOREM ConsIsPrefix == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), + IsPrefix(Cons(e,s), u) + PROVE /\ e = Head(u) + /\ IsPrefix(s, Tail(u)) + +THEOREM IsStrictPrefixStrictPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : ~ IsStrictPrefix(s,s) + /\ \A s,t \in Seq(S) : IsStrictPrefix(s,t) => ~ IsStrictPrefix(t,s) + /\ \A s,t,u \in Seq(S) : IsStrictPrefix(s,t) /\ IsStrictPrefix(t,u) => IsStrictPrefix(s,u) + +THEOREM IsStrictPrefixWellFounded == + ASSUME NEW S + PROVE IsWellFoundedOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) + +THEOREM SeqStrictPrefixInduction == + ASSUME NEW P(_), NEW S, + \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictPrefix(s,t) => P(s)) => P(t) + PROVE \A s \in Seq(S) : P(s) + +(***************************************************************************) +(* Similar theorems about suffixes. *) +(***************************************************************************) + +THEOREM IsSuffixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsSuffix(s,t) <=> \E u \in Seq(S) : t = u \o s + /\ IsSuffix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + /\ IsSuffix(s,t) <=> IsPrefix(Reverse(s), Reverse(t)) + +THEOREM IsStrictSuffixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsStrictSuffix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = u \o s + /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ IsSuffix(s,t) + /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + /\ IsStrictSuffix(s,t) <=> IsStrictPrefix(Reverse(s), Reverse(t)) + +THEOREM IsSuffixElts == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), + IsSuffix(s,t) + PROVE s[i] = t[Len(t) - Len(s) + i] + +THEOREM EmptyIsSuffix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsSuffix(<<>>, s) + /\ IsSuffix(s, <<>>) <=> s = <<>> + /\ IsStrictSuffix(<<>>, s) <=> s # <<>> + /\ ~ IsStrictSuffix(s, <<>>) + +THEOREM IsSuffixConcat == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsSuffix(s, t \o s) + +THEOREM IsStrictSuffixCons == + ASSUME NEW S, NEW s \in Seq(S), NEW e \in S + PROVE IsStrictSuffix(s, Cons(e,s)) + +THEOREM TailIsSuffix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsSuffix(Tail(s), s) + /\ s # <<>> => IsStrictSuffix(Tail(s), s) + +THEOREM IsSuffixPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : IsSuffix(s,s) + /\ \A s,t \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,s) => s = t + /\ \A s,t,u \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,u) => IsSuffix(s,u) + +THEOREM ConcatIsSuffix == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + IsSuffix(s \o t, u) + PROVE IsSuffix(t, u) + +THEOREM ConcatIsSuffixCancel == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) + PROVE IsSuffix(s \o t, u \o t) <=> IsSuffix(s, u) + +THEOREM AppendIsSuffixCancel == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsSuffix(Append(s,e), Append(t,e)) <=> IsSuffix(s,t) + +THEOREM AppendIsSuffix == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), + IsSuffix(Append(s,e), u) + PROVE /\ e = Last(u) + /\ IsSuffix(s, Front(u)) + +THEOREM IsStrictSuffixStrictPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : ~ IsStrictSuffix(s,s) + /\ \A s,t \in Seq(S) : IsStrictSuffix(s,t) => ~ IsStrictSuffix(t,s) + /\ \A s,t,u \in Seq(S) : IsStrictSuffix(s,t) /\ IsStrictSuffix(t,u) => IsStrictSuffix(s,u) + +THEOREM IsStrictSuffixWellFounded == + ASSUME NEW S + PROVE IsWellFoundedOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) + +THEOREM SeqStrictSuffixInduction == + ASSUME NEW P(_), NEW S, + \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictSuffix(s,t) => P(s)) => P(t) + PROVE \A s \in Seq(S) : P(s) + +(***************************************************************************) +(* Since the (strict) prefix and suffix orderings on sequences are *) +(* well-founded, they can be used for defining recursive functions. *) +(* The operators OpDefinesFcn, WFInductiveDefines, and WFInductiveUnique *) +(* are defined in module WellFoundedInduction. *) +(***************************************************************************) + +StrictPrefixesDetermineDef(S, Def(_,_)) == + \A g,h : \A seq \in Seq(S) : + (\A pre \in Seq(S) : IsStrictPrefix(pre,seq) => g[pre] = h[pre]) + => Def(g, seq) = Def(h, seq) + +LEMMA StrictPrefixesDetermineDef_WFDefOn == + ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) + PROVE WFDefOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S), Def) + +THEOREM PrefixRecursiveSequenceFunctionUnique == + ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) + PROVE WFInductiveUnique(Seq(S), Def) + +THEOREM PrefixRecursiveSequenceFunctionDef == + ASSUME NEW S, NEW Def(_,_), NEW f, + StrictPrefixesDetermineDef(S, Def), + OpDefinesFcn(f, Seq(S), Def) + PROVE WFInductiveDefines(f, Seq(S), Def) + +THEOREM PrefixRecursiveSequenceFunctionType == + ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, + T # {}, + StrictPrefixesDetermineDef(S, Def), + WFInductiveDefines(f, Seq(S), Def), + \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T + PROVE f \in [Seq(S) -> T] + +StrictSuffixesDetermineDef(S, Def(_,_)) == + \A g,h : \A seq \in Seq(S) : + (\A suf \in Seq(S) : IsStrictSuffix(suf,seq) => g[suf] = h[suf]) + => Def(g, seq) = Def(h, seq) + +LEMMA StrictSuffixesDetermineDef_WFDefOn == + ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) + PROVE WFDefOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S), Def) + +THEOREM SuffixRecursiveSequenceFunctionUnique == + ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) + PROVE WFInductiveUnique(Seq(S), Def) + +THEOREM SuffixRecursiveSequenceFunctionDef == + ASSUME NEW S, NEW Def(_,_), NEW f, + StrictSuffixesDetermineDef(S, Def), + OpDefinesFcn(f, Seq(S), Def) + PROVE WFInductiveDefines(f, Seq(S), Def) + +THEOREM SuffixRecursiveSequenceFunctionType == + ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, + T # {}, + StrictSuffixesDetermineDef(S, Def), + WFInductiveDefines(f, Seq(S), Def), + \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T + PROVE f \in [Seq(S) -> T] + +(***************************************************************************) +(* The following theorems justify ``primitive recursive'' functions over *) +(* sequences, with a base case for the empty sequence and recursion along *) +(* either the Tail or the Front of a non-empty sequence. *) +(***************************************************************************) + +TailInductiveDefHypothesis(f, S, f0, Def(_,_)) == + f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Tail(s)], s)] + +TailInductiveDefConclusion(f, S, f0, Def(_,_)) == + f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Tail(s)], s)] + +THEOREM TailInductiveDef == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, + TailInductiveDefHypothesis(f, S, f0, Def) + PROVE TailInductiveDefConclusion(f, S, f0, Def) + +THEOREM TailInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, + TailInductiveDefConclusion(f, S, f0, Def), + f0 \in T, + \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T + PROVE f \in [Seq(S) -> T] + +FrontInductiveDefHypothesis(f, S, f0, Def(_,_)) == + f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Front(s)], s)] + +FrontInductiveDefConclusion(f, S, f0, Def(_,_)) == + f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Front(s)], s)] + +THEOREM FrontInductiveDef == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, + FrontInductiveDefHypothesis(f, S, f0, Def) + PROVE FrontInductiveDefConclusion(f, S, f0, Def) + +THEOREM FrontInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, + FrontInductiveDefConclusion(f, S, f0, Def), + f0 \in T, + \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T + PROVE f \in [Seq(S) -> T] + +============================================================================= diff --git a/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems_proofs.tla b/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems_proofs.tla new file mode 100644 index 0000000000..f639a4c6d1 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems_proofs.tla @@ -0,0 +1,1446 @@ +----------------------- MODULE SequenceTheorems_proofs ---------------------- +(***************************************************************************) +(* This module contains the proofs for theorems about sequences and the *) +(* corresponding operations. *) +(***************************************************************************) +EXTENDS Sequences, Integers, WellFoundedInduction, Functions, TLAPS + + +(***************************************************************************) +(* Elementary properties about Seq(S) *) +(***************************************************************************) + +LEMMA SeqDef == \A S : Seq(S) = UNION {[1..n -> S] : n \in Nat} +OBVIOUS + +THEOREM ElementOfSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW n \in 1..Len(seq) + PROVE seq[n] \in S +OBVIOUS + +THEOREM EmptySeq == + ASSUME NEW S + PROVE /\ << >> \in Seq(S) + /\ \A seq \in Seq(S) : (seq = << >>) <=> (Len(seq) = 0) +OBVIOUS + +THEOREM LenProperties == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ Len(seq) \in Nat + /\ seq \in [1..Len(seq) -> S] + /\ DOMAIN seq = 1 .. Len(seq) +OBVIOUS + +THEOREM ExceptSeq == + ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq), NEW e \in S + PROVE /\ [seq EXCEPT ![i] = e] \in Seq(S) + /\ Len([seq EXCEPT ![i] = e]) = Len(seq) + /\ \A j \in 1 .. Len(seq) : [seq EXCEPT ![i] = e][j] = IF j=i THEN e ELSE seq[j] +<1>. DEFINE exc == [seq EXCEPT ![i] = e] +<1>1. \A j \in 1 .. Len(seq) : exc[j] = IF j=i THEN e ELSE seq[j] + BY DOMAIN exc = 1 .. Len(seq), Zenon +<1>. QED + BY <1>1 + +THEOREM IsASeq == + ASSUME NEW n \in Nat, NEW e(_), NEW S, + \A i \in 1..n : e(i) \in S + PROVE [i \in 1..n |-> e(i)] \in Seq(S) +OBVIOUS + +THEOREM SeqEqual == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), + Len(s) = Len(t), \A i \in 1 .. Len(s) : s[i] = t[i] + PROVE s = t +<1>1. /\ DOMAIN s = 1 .. Len(s) + /\ DOMAIN t = 1 .. Len(s) + /\ s = [i \in DOMAIN s |-> s[i]] + /\ t = [i \in DOMAIN t |-> t[i]] + OBVIOUS +<1>. QED + BY <1>1, Zenon + +(*************************************************************************** + Concatenation (\o) And Properties +***************************************************************************) + +THEOREM ConcatProperties == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) + PROVE /\ s1 \o s2 \in Seq(S) + /\ Len(s1 \o s2) = Len(s1) + Len(s2) + /\ \A i \in 1 .. Len(s1) + Len(s2) : (s1 \o s2)[i] = + IF i <= Len(s1) THEN s1[i] ELSE s2[i - Len(s1)] +OBVIOUS + +THEOREM ConcatEmptySeq == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ seq \o << >> = seq + /\ << >> \o seq = seq +OBVIOUS + +THEOREM ConcatAssociative == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S), NEW s3 \in Seq(S) + PROVE (s1 \o s2) \o s3 = s1 \o (s2 \o s3) +OBVIOUS + +THEOREM ConcatSimplifications == + ASSUME NEW S + PROVE /\ \A s,t \in Seq(S) : s \o t = s <=> t = <<>> + /\ \A s,t \in Seq(S) : s \o t = t <=> s = <<>> + /\ \A s,t \in Seq(S) : s \o t = <<>> <=> s = <<>> /\ t = <<>> + /\ \A s,t,u \in Seq(S) : s \o t = s \o u <=> t = u + /\ \A s,t,u \in Seq(S) : s \o u = t \o u <=> s = t +<1>1. /\ \A s,t \in Seq(S) : s \o t = s <=> t = <<>> + /\ \A s,t \in Seq(S) : s \o t = t <=> s = <<>> + /\ \A s,t \in Seq(S) : s \o t = <<>> <=> s = <<>> /\ t = <<>> + OBVIOUS +<1>2. \A s,t,u \in Seq(S) : s \o t = s \o u <=> t = u + <2>. SUFFICES ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + s \o t = s \o u + PROVE t = u + BY Zenon + <2>1. Len(t) = Len(u) OBVIOUS + <2>2. \A i \in 1 .. Len(t) : t[i] = (s \o t)[i + Len(s)] OBVIOUS + <2>3. \A i \in 1 .. Len(u) : u[i] = (s \o u)[i + Len(s)] OBVIOUS + <2>. QED BY <2>1, <2>2, <2>3, SeqEqual +<1>3. \A s,t,u \in Seq(S) : s \o u = t \o u <=> s = t + <2>. SUFFICES ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + s \o u = t \o u + PROVE s = t + BY Zenon + <2>1. Len(s) = Len(t) OBVIOUS + <2>2. \A i \in 1 .. Len(s) : s[i] = (s \o u)[i] OBVIOUS + <2>3. \A i \in 1 .. Len(t) : t[i] = (t \o u)[i] OBVIOUS + <2>. QED BY <2>1, <2>2, <2>3, SeqEqual +<1>. QED BY <1>1, <1>2, <1>3, Zenon + +(***************************************************************************) +(* SubSeq, Head and Tail *) +(***************************************************************************) + +THEOREM SubSeqProperties == + ASSUME NEW S, + NEW s \in Seq(S), + NEW m \in 1 .. Len(s)+1, + NEW n \in m-1 .. Len(s) + PROVE /\ SubSeq(s,m,n) \in Seq(S) + /\ Len(SubSeq(s, m, n)) = n-m+1 + /\ \A i \in 1 .. n-m+1 : SubSeq(s,m,n)[i] = s[m+i-1] +<1>1. CASE n \in m .. Len(s) + BY <1>1 +<1>2. CASE n = m-1 + <2>. DEFINE sub == SubSeq(s,m,m-1) + <2>1. /\ sub = << >> + /\ n-m+1 = 0 + /\ \A i \in 1 .. n-m+1 : sub[i] \in S /\ SubSeq(s,m,n)[i] = s[m+i-1] + BY <1>2 + <2>2. Len(sub) = n-m+1 + BY <2>1, Zenon + <2>. QED + BY <1>2, <2>1, <2>2, Isa +<1>. QED + BY <1>1, <1>2 + +THEOREM SubSeqEmpty == + ASSUME NEW s, NEW m \in Int, NEW n \in Int, n < m + PROVE SubSeq(s,m,n) = << >> +OBVIOUS + +THEOREM HeadTailProperties == + ASSUME NEW S, + NEW seq \in Seq(S), seq # << >> + PROVE /\ Head(seq) \in S + /\ Tail(seq) \in Seq(S) + /\ Len(Tail(seq)) = Len(seq)-1 + /\ \A i \in 1 .. Len(Tail(seq)) : Tail(seq)[i] = seq[i+1] +OBVIOUS + + +THEOREM TailIsSubSeq == + ASSUME NEW S, + NEW seq \in Seq(S), seq # << >> + PROVE Tail(seq) = SubSeq(seq, 2, Len(seq)) +OBVIOUS + +THEOREM SubSeqRestrict == + ASSUME NEW S, NEW seq \in Seq(S), NEW n \in 0 .. Len(seq) + PROVE SubSeq(seq, 1, n) = Restrict(seq, 1 .. n) +BY DEF Restrict + +THEOREM HeadTailOfSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE /\ Head(SubSeq(seq,m,n)) = seq[m] + /\ Tail(SubSeq(seq,m,n)) = SubSeq(seq, m+1, n) +OBVIOUS + +THEOREM SubSeqRecursiveFirst == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE SubSeq(seq, m, n) = << seq[m] >> \o SubSeq(seq, m+1, n) +<1>. DEFINE lhs == SubSeq(seq, m, n) + rhs == << seq[m] >> \o SubSeq(seq, m+1, n) +<1>1. /\ lhs \in Seq(S) + /\ rhs \in Seq(S) + OBVIOUS +<1>2. Len(lhs) = Len(rhs) + <2>1. Len(lhs) = n-m+1 + BY SubSeqProperties + <2>2. /\ m+1 \in 1 .. Len(seq)+1 + /\ n \in (m+1)-1 .. Len(seq) + OBVIOUS + <2>3. Len(SubSeq(seq, m+1, n)) = n - (m+1) + 1 + BY <2>2, SubSeqProperties, Zenon + <2>. QED + BY <2>1, <2>3 +<1>3. ASSUME NEW i \in 1 .. Len(lhs) + PROVE lhs[i] = rhs[i] + OBVIOUS +<1>. QED + BY <1>1, <1>2, <1>3, SeqEqual + +THEOREM SubSeqRecursiveSecond == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE SubSeq(seq, m, n) = SubSeq(seq, m, n-1) \o << seq[n] >> +<1>. DEFINE lhs == SubSeq(seq, m, n) + mid == SubSeq(seq, m, n-1) + rhs == mid \o << seq[n] >> +<1>1. /\ lhs \in Seq(S) + /\ mid \in Seq(S) + /\ rhs \in Seq(S) + /\ <> \in Seq(S) + OBVIOUS +<1>2. Len(lhs) = n-m+1 + BY SubSeqProperties +<1>3. Len(mid) = (n-1) - m + 1 + BY m \in 1 .. Len(seq)+1, n-1 \in m-1 .. Len(seq), SubSeqProperties +<1>4. Len(lhs) = Len(rhs) + BY <1>2, <1>3 +<1>5. ASSUME NEW i \in 1 .. Len(lhs) + PROVE lhs[i] = rhs[i] + <2>1. lhs[i] = seq[m+i-1] + OBVIOUS + <2>2. rhs[i] = seq[m+i-1] + <3>1. i \in 1 .. (Len(mid) + Len(<>)) + BY <1>4, <1>5 + <3>2. CASE i \in 1 .. (Len(lhs)-1) + BY <3>2 + <3>3. CASE ~(i \in 1 .. (Len(lhs)-1)) + <4>1. i = Len(lhs) /\ ~(i <= Len(mid)) + BY <3>3, <1>2, <1>3 + <4>2. rhs[i] = <>[i - Len(mid)] + BY <1>1, <3>1, <4>1, ConcatProperties, Zenon + <4>3. /\ i - Len(mid) = 1 + /\ n = m+i-1 + BY <4>1, <1>2, <1>3 + <4>. QED + BY <4>2, <4>3, Isa + <3>. QED + BY <3>2, <3>3 + <2>. QED + BY <2>1, <2>2 +<1>. QED + BY <1>1, <1>4, <1>5, SeqEqual + +THEOREM SubSeqFull == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE SubSeq(seq, 1, Len(seq)) = seq +OBVIOUS + +(*****************************************************************************) +(* Adjacent subsequences can be concatenated to obtain a longer subsequence. *) +(*****************************************************************************) +THEOREM ConcatAdjacentSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq)+1, + NEW k \in m-1 .. Len(seq), + NEW n \in k .. Len(seq) + PROVE SubSeq(seq, m, k) \o SubSeq(seq, k+1, n) = SubSeq(seq, m, n) +<1>. DEFINE lhs == SubSeq(seq, m, k) \o SubSeq(seq, k+1, n) +<1>. /\ SubSeq(seq, m, k) \in Seq(S) + /\ SubSeq(seq, k+1, n) \in Seq(S) + /\ SubSeq(seq, m, n) \in Seq(S) + /\ lhs \in Seq(S) + OBVIOUS +<1>1. Len(SubSeq(seq, m, k)) = k-m+1 + BY SubSeqProperties +<1>2. Len(SubSeq(seq, k+1,n)) = n-k + BY k+1 \in 1 .. Len(seq)+1, n \in (k+1)-1 .. Len(seq), n-k = n-(k+1)+1, SubSeqProperties +<1>3. Len(SubSeq(seq, m, n)) = n-m+1 + BY n \in m-1 .. Len(seq), SubSeqProperties +<1>4. Len(lhs) = Len(SubSeq(seq, m, n)) + BY <1>1, <1>2, <1>3 +<1>5. ASSUME NEW i \in 1 .. Len(lhs) + PROVE lhs[i] = SubSeq(seq, m, n)[i] + <2>0. 1 .. Len(lhs) = (1 .. k-m+1) \cup (k-m+2 .. n-m+1) + BY <1>4, <1>3 + <2>1. CASE i \in 1 .. k-m+1 + <3>1. lhs[i] = SubSeq(seq, m, k)[i] + BY <2>1, <1>1, <1>2, ConcatProperties, i <= Len(SubSeq(seq, m, k)) + <3>2. SubSeq(seq, m, k)[i] = seq[m+i-1] BY <2>1, SubSeqProperties + <3>3. SubSeq(seq, m, n)[i] = seq[m+i-1] BY <2>1, SubSeqProperties + <3>. QED BY <3>1, <3>2, <3>3 + <2>2. CASE i \in k-m+2 .. n-m+1 + <3>1. /\ i \in 1 .. Len(SubSeq(seq,m,k)) + Len(SubSeq(seq,k+1,n)) + /\ ~(i <= Len(SubSeq(seq, m, k))) + BY <1>1, <1>2, <2>2 + <3>2. lhs[i] = SubSeq(seq, k+1, n)[i - Len(SubSeq(seq,m,k))] + BY <3>1, ConcatProperties + <3>3. i - Len(SubSeq(seq,m,k)) \in 1 .. n-k + BY <2>2, <1>1 + <3>4. SubSeq(seq, k+1, n)[i - Len(SubSeq(seq,m,k))] = seq[m+i-1] + BY <3>3, <1>1, SubSeqProperties + <3>5. SubSeq(seq, m, n)[i] = seq[m+i-1] + BY <1>4, <1>3, SubSeqProperties + <3>. QED BY <3>2, <3>4, <3>5 + <2>. QED BY <2>0, <2>1, <2>2 +<1>. QED BY <1>4, <1>5, SeqEqual + +(***************************************************************************) +(* Append, InsertAt, Cons & RemoveAt *) +(* Append(seq, elt) appends element elt at the end of sequence seq *) +(* Cons(elt, seq) prepends element elt at the beginning of sequence seq *) +(* InsertAt(seq, i, elt) inserts element elt in the position i and pushes *) +(* the *) +(* original element at i to i+1 and so on *) +(* RemoveAt(seq, i) removes the element at position i *) +(***************************************************************************) + +THEOREM AppendProperties == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S + PROVE /\ Append(seq, elt) \in Seq(S) + /\ Append(seq, elt) # << >> + /\ Len(Append(seq, elt)) = Len(seq)+1 + /\ \A i \in 1.. Len(seq) : Append(seq, elt)[i] = seq[i] + /\ Append(seq, elt)[Len(seq)+1] = elt +OBVIOUS + +THEOREM AppendIsConcat == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S + PROVE Append(seq, elt) = seq \o <> +OBVIOUS + +THEOREM HeadTailAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt + PROVE /\ Head(Append(seq, elt)) = IF seq = <<>> THEN elt ELSE Head(seq) + /\ Tail(Append(seq, elt)) = IF seq = <<>> THEN <<>> ELSE Append(Tail(seq), elt) +<1>1. CASE seq = <<>> + <2>1. Append(seq, elt) = <> BY <1>1 + <2>. QED BY <1>1, <2>1 +<1>2. CASE seq # <<>> + <2>1. Head(Append(seq, elt)) = Head(seq) BY <1>2 + <2>2. Tail(Append(seq, elt)) = Append(Tail(seq), elt) BY <1>2 + <2>. QED BY <2>1, <2>2, <1>2 +<1>. QED BY <1>1, <1>2 + +Cons(elt, seq) == <> \o seq + +THEOREM ConsProperties == + ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S + PROVE /\ Cons(elt, seq) \in Seq(S) + /\ Cons(elt, seq) # <<>> + /\ Len(Cons(elt, seq)) = Len(seq)+1 + /\ Head(Cons(elt, seq)) = elt + /\ Tail(Cons(elt, seq)) = seq + /\ Cons(elt, seq)[1] = elt + /\ \A i \in 1 .. Len(seq) : Cons(elt, seq)[i+1] = seq[i] +BY DEF Cons + +THEOREM ConsEmpty == + \A x : Cons(x, << >>) = << x >> +BY DEF Cons + +THEOREM ConsHeadTail == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE Cons(Head(seq), Tail(seq)) = seq +BY DEF Cons + +THEOREM ConsAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW x \in S, NEW y \in S + PROVE Cons(x, Append(seq, y)) = Append(Cons(x,seq), y) +BY AppendIsConcat DEF Cons + +THEOREM ConsInjective == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) + PROVE Cons(e,s) = Cons(f,t) <=> e = f /\ s = t +<1>1. SUFFICES ASSUME Cons(e,s) = Cons(f,t) PROVE e=f /\ s=t + OBVIOUS +<1>2. /\ Head(Cons(e,s)) = Head(Cons(f,t)) + /\ Tail(Cons(e,s)) = Tail(Cons(f,t)) + BY <1>1 +<1>. QED BY ONLY <1>2, ConsProperties, Isa + +InsertAt(seq,i,elt) == SubSeq(seq, 1, i-1) \o <> \o SubSeq(seq, i, Len(seq)) + +THEOREM InsertAtProperties == + ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq)+1, NEW elt \in S + PROVE /\ InsertAt(seq,i,elt) \in Seq(S) + /\ Len(InsertAt(seq,i,elt)) = Len(seq)+1 + /\ \A j \in 1 .. Len(seq)+1 : InsertAt(seq,i,elt)[j] = + IF j. DEFINE left == SubSeq(seq, 1, i-1) + mid == <> + right == SubSeq(seq, i, Len(seq)) +<1>1. /\ left \in Seq(S) + /\ mid \in Seq(S) + /\ right \in Seq(S) + /\ InsertAt(seq,i,elt) \in Seq(S) + BY DEF InsertAt +<1>l. Len(left) = (i-1) - 1 + 1 + BY 1 \in 1 .. (Len(seq)+1), i-1 \in (1-1) .. Len(seq), SubSeqProperties, Zenon +<1>r. Len(right) = Len(seq) - i + 1 + BY Len(seq) \in (i-1) .. Len(seq), SubSeqProperties, Zenon +<1>2. Len(InsertAt(seq,i,elt)) = Len(seq)+1 + BY <1>l, <1>r DEF InsertAt +<1>3. ASSUME NEW j \in 1 .. Len(seq)+1 + PROVE InsertAt(seq,i,elt)[j] = IF j1. CASE j \in 1 .. i-1 + BY <2>1 DEF InsertAt + <2>2. CASE j = i + <3>1. /\ j \in 1 .. Len(left) + Len(mid) + /\ ~(j <= Len(left)) + /\ <>[j - Len(left)] = elt + BY <2>2, <1>l + <3>2. (left \o mid)[j] = elt + BY <1>1, <3>1, ConcatProperties + <3>3. /\ j \in 1 .. (Len(left \o mid) + Len(right)) + /\ j <= Len(left \o mid) + /\ left \o mid \in Seq(S) + BY <2>2, <1>l, <1>r + <3>4. ((left \o mid) \o right)[j] = (left \o mid)[j] + BY <1>1, <3>3, ConcatProperties DEF InsertAt + <3>. QED + BY <3>4, <3>2, <2>2 DEF InsertAt + <2>3. CASE j \in i+1 .. Len(seq)+1 + <3>1. ~(j < i) /\ j # i + BY <2>3 + <3>2. /\ j \in 1 .. (Len(left \o mid) + Len(right)) + /\ ~(j <= Len(left \o mid)) + /\ left \o mid \in Seq(S) + BY <1>l, <1>r, <2>3 + <3>3. ((left \o mid) \o right)[j] = right[j - Len(left \o mid)] + BY <1>1, <3>2, ConcatProperties + <3>4. /\ Len(seq) \in i-1 .. Len(seq) + /\ j - Len(left \o mid) \in 1 .. (Len(seq) - i + 1) + BY <2>3, <1>l + <3>5. right[j - Len(left \o mid)] = seq[i + (j - Len(left \o mid)) - 1] + BY <3>4, SubSeqProperties + <3>6. right[j - Len(left \o mid)] = seq[j-1] + BY <3>5, <1>l + <3>. QED + BY <3>1, <3>3, <3>6 DEF InsertAt + <2>. QED + BY <2>1, <2>2, <2>3 +<1>. QED + BY <1>1, <1>2, <1>3 + +RemoveAt(seq, i) == SubSeq(seq, 1, i-1) \o SubSeq(seq, i+1, Len(seq)) + +THEOREM RemoveAtProperties == + ASSUME NEW S, NEW seq \in Seq(S), + NEW i \in 1..Len(seq) + PROVE /\ RemoveAt(seq,i) \in Seq(S) + /\ Len(RemoveAt(seq,i)) = Len(seq) - 1 + /\ \A j \in 1 .. Len(seq)-1 : RemoveAt(seq,i)[j] = IF j. DEFINE left == SubSeq(seq, 1, i-1) + right == SubSeq(seq, i+1, Len(seq)) +<1>1. Len(left) = i-1 + BY 1 \in 1 .. Len(seq)+1, i-1 \in (1-1) .. Len(seq), (i-1) - 1 + 1 = i-1, + SubSeqProperties, Zenon +<1>2. Len(right) = Len(seq) - i + BY i+1 \in 1 .. Len(seq)+1, Len(seq) \in (i+1)-1 .. Len(seq), Len(seq) - (i+1) + 1 = Len(seq)-i, + SubSeqProperties, Zenon +<1>3. /\ left \in Seq(S) + /\ right \in Seq(S) + /\ RemoveAt(seq,i) \in Seq(S) + BY DEF RemoveAt +<1>4. Len(RemoveAt(seq,i)) = Len(seq) - 1 + BY <1>1, <1>2 DEF RemoveAt +<1>5. ASSUME NEW j \in 1 .. Len(seq)-1 + PROVE RemoveAt(seq,i)[j] = IF j1. CASE j \in 1 .. i-1 + BY <2>1 DEF RemoveAt + <2>2. CASE j \in i .. Len(seq)-1 + <3>1. /\ j \in 1 .. Len(left) + Len(right) + /\ ~(j <= Len(left)) + BY <2>2, <1>1, <1>2 + <3>2. RemoveAt(seq,i)[j] = right[j - Len(left)] + BY <1>3, <3>1, ConcatProperties, Zenon DEF RemoveAt + <3>3. /\ i+1 \in 1 .. Len(seq)+1 + /\ Len(seq) \in (i+1)-1 .. Len(seq) + /\ j - (i-1) \in 1 .. Len(seq) - (i+1) + 1 + BY <2>2 + <3>4. right[j - (i-1)] = seq[(i+1) + (j - (i-1)) - 1] + BY <3>3, SubSeqProperties, Zenon + <3>. QED + BY <3>2, <3>4, <2>2, <1>1 + <2>. QED + BY <2>1, <2>2 +<1>. QED + BY <1>3, <1>4, <1>5 + +(***************************************************************************) +(* Front & Last *) +(* *) +(* Front(seq) sequence formed by removing the last element *) +(* Last(seq) last element of the sequence *) +(* *) +(* These operators are to Append what Head and Tail are to Cons. *) +(***************************************************************************) + +Front(seq) == SubSeq(seq, 1, Len(seq)-1) +Last(seq) == seq[Len(seq)] + +THEOREM FrontProperties == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ Front(seq) \in Seq(S) + /\ Len(Front(seq)) = IF seq = << >> THEN 0 ELSE Len(seq)-1 + /\ \A i \in 1 .. Len(seq)-1 : Front(seq)[i] = seq[i] +<1>1. CASE seq = << >> + <2>1. /\ Len(seq) = 0 + /\ Front(seq) = << >> + BY <1>1 DEF Front + <2>. QED BY <2>1 +<1>2. CASE seq # << >> + <2>1. /\ 1 \in 1 .. (Len(seq)+1) + /\ Len(seq)-1 \in (1-1) .. Len(seq) + BY <1>2 + <2>2. /\ SubSeq(seq, 1, Len(seq)-1) \in Seq(S) + /\ Len(SubSeq(seq, 1, Len(seq)-1)) = Len(seq)-1-1+1 + /\ \A i \in 1 .. Len(seq)-1-1+1 : SubSeq(seq,1,Len(seq)-1)[i] = seq[1+i-1] + BY <2>1, SubSeqProperties, Zenon + <2>. QED + BY <1>2, <2>2 DEF Front +<1>. QED BY <1>1, <1>2 + +THEOREM FrontOfEmpty == Front(<< >>) = << >> +BY SubSeqEmpty DEF Front + +THEOREM LastProperties == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE /\ Last(seq) \in S + /\ Append(Front(seq), Last(seq)) = seq +<1>1. Last(seq) \in S + BY DEF Last +<1>2. Append(Front(seq), Last(seq)) = seq + <2>1. /\ 1 \in 1 .. Len(seq) + /\ Len(seq) \in 1 .. Len(seq) + OBVIOUS + <2>2. Front(seq) \o << Last(seq) >> = SubSeq(seq, 1, Len(seq)) + BY <2>1, SubSeqRecursiveSecond, Zenon DEF Front, Last + <2>. QED BY <2>2 +<1>. QED BY <1>1, <1>2 + +THEOREM FrontLastOfSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) + PROVE /\ Front(SubSeq(seq,m,n)) = SubSeq(seq, m, n-1) + /\ Last(SubSeq(seq,m,n)) = seq[n] +BY DEF Front, Last + +THEOREM FrontLastAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S + PROVE /\ Front(Append(seq, e)) = seq + /\ Last(Append(seq, e)) = e +BY DEF Front, Last + +THEOREM AppendInjective == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) + PROVE Append(s,e) = Append(t,f) <=> s = t /\ e = f +<1>1. SUFFICES ASSUME Append(s,e) = Append(t,f) PROVE s=t /\ e=f + OBVIOUS +<1>2. /\ Front(Append(s,e)) = Front(Append(t,f)) + /\ Last(Append(s,e)) = Last(Append(t,f)) + BY <1>1 +<1>. QED + BY ONLY <1>2, FrontLastAppend + +(***************************************************************************) +(* As a corollary of the previous theorems it follows that a sequence is *) +(* either empty or can be obtained by appending an element to a sequence. *) +(***************************************************************************) +THEOREM SequenceEmptyOrAppend == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE \E s \in Seq(S), elt \in S : seq = Append(s, elt) +BY FrontProperties, LastProperties + +(***************************************************************************) +(* REVERSE SEQUENCE And Properties *) +(* Reverse(seq) --> Reverses the sequence seq *) +(***************************************************************************) + +Reverse(seq) == [j \in 1 .. Len(seq) |-> seq[Len(seq)-j+1] ] + +THEOREM ReverseProperties == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE /\ Reverse(seq) \in Seq(S) + /\ Len(Reverse(seq)) = Len(seq) + /\ Reverse(Reverse(seq)) = seq +BY DEF Reverse + +THEOREM ReverseEmpty == Reverse(<< >>) = << >> +BY DEF Reverse + +THEOREM ReverseEqual == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), Reverse(s) = Reverse(t) + PROVE s = t +<1>1. Len(s) = Len(t) BY DEF Reverse +<1>2. ASSUME NEW i \in 1 .. Len(s) + PROVE s[i] = t[i] + <2>1. Reverse(s)[Len(s)-i+1] = Reverse(t)[Len(s)-i+1] OBVIOUS + <2>. QED BY <2>1 DEF Reverse +<1>. QED BY <1>1, <1>2, SeqEqual + +THEOREM ReverseEmptyIffEmpty == + ASSUME NEW S, NEW seq \in Seq(S), Reverse(seq) = <<>> + PROVE seq = <<>> +BY <<>> \in Seq(S), ReverseEmpty, ReverseEqual, Zenon + +THEOREM ReverseConcat == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) + PROVE Reverse(s1 \o s2) = Reverse(s2) \o Reverse(s1) +BY DEF Reverse + +THEOREM ReverseAppend == + ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S + PROVE Reverse(Append(seq,e)) = Cons(e, Reverse(seq)) +BY DEF Reverse, Cons + +THEOREM ReverseCons == + ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S + PROVE Reverse(Cons(e,seq)) = Append(Reverse(seq), e) +BY DEF Reverse, Cons + +THEOREM ReverseSingleton == \A x : Reverse(<< x >>) = << x >> +BY DEF Reverse + +THEOREM ReverseSubSeq == + ASSUME NEW S, NEW seq \in Seq(S), + NEW m \in 1..Len(seq), NEW n \in 1..Len(seq) + PROVE Reverse(SubSeq(seq, m , n)) = SubSeq(Reverse(seq), Len(seq)-n+1, Len(seq)-m+1) +BY DEF Reverse + +THEOREM ReversePalindrome == + ASSUME NEW S, NEW seq \in Seq(S), + Reverse(seq) = seq + PROVE Reverse(seq \o seq) = seq \o seq +BY ReverseConcat, Zenon + +THEOREM LastEqualsHeadReverse == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE Last(seq) = Head(Reverse(seq)) +BY DEF Last, Reverse + +THEOREM ReverseFrontEqualsTailReverse == + ASSUME NEW S, NEW seq \in Seq(S), seq # << >> + PROVE Reverse(Front(seq)) = Tail(Reverse(seq)) +<1>. DEFINE lhs == Reverse(Front(seq)) + rhs == Tail(Reverse(seq)) +<1>1. /\ lhs \in Seq(S) + /\ rhs \in Seq(S) + /\ Len(lhs) = Len(seq) - 1 + /\ Len(rhs) = Len(seq) - 1 + BY FrontProperties, ReverseProperties +<1>3. ASSUME NEW i \in 1 .. Len(seq)-1 + PROVE lhs[i] = rhs[i] + <2>1. /\ Len(Front(seq)) = Len(seq)-1 + /\ i \in 1 .. Len(Front(seq)) + BY FrontProperties + <2>2. lhs[i] = Front(seq)[Len(seq)-i] + BY <2>1 DEF Reverse + <2>4. Front(seq)[Len(seq)-i] = seq[Len(seq)-i] + BY FrontProperties + <2>5. rhs[i] = seq[Len(seq)-i] + BY DEF Reverse + <2>. QED + BY <2>2, <2>4, <2>5 +<1>. QED + BY <1>1, <1>3, SeqEqual + + +(***************************************************************************) +(* Induction principles for sequences *) +(***************************************************************************) + +THEOREM SequencesInductionAppend == + ASSUME NEW P(_), NEW S, + P(<< >>), + \A s \in Seq(S), e \in S : P(s) => P(Append(s,e)) + PROVE \A seq \in Seq(S) : P(seq) +<1>. DEFINE Q(n) == \A seq \in Seq(S) : Len(seq) = n => P(seq) +<1>1. SUFFICES \A k \in Nat : Q(k) + OBVIOUS +<1>2. Q(0) + OBVIOUS +<1>3. ASSUME NEW n \in Nat, Q(n) + PROVE Q(n+1) + <2>1. ASSUME NEW s \in Seq(S), Len(s) = n+1 + PROVE P(s) + <3>1. /\ Front(s) \in Seq(S) + /\ Last(s) \in S + /\ Len(Front(s)) = n + /\ Append(Front(s), Last(s)) = s + BY <2>1, FrontProperties, LastProperties + <3>2. P(Front(s)) + BY <1>3, <3>1 + <3>3. QED + BY <3>1, <3>2, Zenon + <2>. QED + BY <2>1 +<1>4. QED + BY <1>2, <1>3, NatInduction, Isa + +THEOREM SequencesInductionCons == + ASSUME NEW P(_), NEW S, + P(<< >>), + \A s \in Seq(S), e \in S : P(s) => P(Cons(e,s)) + PROVE \A seq \in Seq(S) : P(seq) +<1>. DEFINE Q(n) == \A seq \in Seq(S) : Len(seq) = n => P(seq) +<1>1. SUFFICES \A k \in Nat : Q(k) + OBVIOUS +<1>2. Q(0) + OBVIOUS +<1>3. ASSUME NEW n \in Nat, Q(n) + PROVE Q(n+1) + <2>1. ASSUME NEW s \in Seq(S), Len(s) = n+1 + PROVE P(s) + <3>1. /\ Tail(s) \in Seq(S) + /\ Head(s) \in S + /\ Len(Tail(s)) = n + /\ Cons(Head(s), Tail(s)) = s + BY <2>1, ConsHeadTail + <3>2. P(Tail(s)) + BY <1>3, <3>1, Zenon + <3>3. QED + BY <3>1, <3>2, Zenon + <2>. QED + BY <2>1 +<1>4. QED + BY <1>2, <1>3, NatInduction, Isa + +(***************************************************************************) +(* RANGE OF SEQUENCE *) +(***************************************************************************) + +THEOREM RangeOfSeq == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE Range(seq) \in SUBSET S +BY DEF Range + +THEOREM RangeEquality == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE Range(seq) = { seq[i] : i \in 1 .. Len(seq) } +<1>1. DOMAIN seq = 1 .. Len(seq) + OBVIOUS +<1>2. QED + BY <1>1, Zenon DEF Range + +(* The range of the reverse sequence equals that of the original one. *) +THEOREM RangeReverse == + ASSUME NEW S, NEW seq \in Seq(S) + PROVE Range(Reverse(seq)) = Range(seq) +<1>1. Range(Reverse(seq)) \subseteq Range(seq) + BY RangeEquality DEF Reverse +<1>2. Range(seq) \subseteq Range(Reverse(seq)) + BY RangeEquality DEF Reverse +<1>3. QED + BY <1>1, <1>2, Zenon + +(* Range of concatenation of sequences is the union of the ranges *) +THEOREM RangeConcatenation == + ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) + PROVE Range(s1 \o s2) = Range(s1) \cup Range(s2) +<1>1. Range(s1) \subseteq Range(s1 \o s2) + BY DEF Range +<1>2. Range(s2) \subseteq Range(s1 \o s2) + <2>1. SUFFICES ASSUME NEW i \in 1 .. Len(s2) + PROVE s2[i] \in Range(s1 \o s2) + BY RangeEquality + <2>2. /\ Len(s1)+i \in 1 .. Len(s1 \o s2) + /\ (s1 \o s2)[Len(s1)+i] = s2[i] + OBVIOUS + <2>. QED + BY <2>2, RangeEquality +<1>3. Range(s1 \o s2) \subseteq Range(s1) \cup Range(s2) + <2>1. SUFFICES ASSUME NEW i \in 1 .. Len(s1 \o s2) + PROVE (s1 \o s2)[i] \in Range(s1) \cup Range(s2) + BY LenProperties, ConcatProperties, Zenon DEF Range + <2>2. CASE i \in 1 .. Len(s1) + BY RangeEquality + <2>3. CASE i \in Len(s1)+1 .. Len(s1 \o s2) + BY RangeEquality + <2>. QED + BY <2>2, <2>3 +<1>. QED + BY <1>1, <1>2, <1>3, Zenon + +(***************************************************************************) +(* Prefixes and suffixes of sequences. *) +(***************************************************************************) + +IsPrefix(s,t) == \E u \in Seq(Range(t)) : t = s \o u +IsStrictPrefix(s,t) == IsPrefix(s,t) /\ s # t + +IsSuffix(s,t) == \E u \in Seq(Range(t)) : t = u \o s +IsStrictSuffix(s,t) == IsSuffix(s,t) /\ s # t + +(***************************************************************************) +(* The following theorem gives three alternative characterizations of *) +(* prefixes. It also implies that any prefix of a sequence t is at most *) +(* as long as t. *) +(***************************************************************************) +THEOREM IsPrefixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsPrefix(s,t) <=> \E u \in Seq(S) : t = s \o u + /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) + /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = Restrict(t, DOMAIN s) +<1>1. ASSUME IsPrefix(s,t) PROVE Len(s) <= Len(t) + BY <1>1 DEF IsPrefix +<1>2. IsPrefix(s,t) <=> \E u \in Seq(S) : t = s \o u + <2>1. ASSUME NEW u \in Seq(Range(t)), t = s \o u + PROVE u \in Seq(S) + BY DEF Range + <2>2. ASSUME NEW u \in Seq(S), t = s \o u + PROVE u \in Seq(Range(t)) + <3>1. \A i \in 1 .. Len(u) : u[i] \in Range(u) + BY DOMAIN u = 1 .. Len(u) DEF Range + <3>2. \A i \in 1 .. Len(u) : u[i] \in Range(t) + BY <2>2, <3>1, RangeConcatenation + <3>. QED BY <3>2 + <2>. QED BY <2>1, <2>2 DEF IsPrefix +<1>3. IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) + <2>1. ASSUME IsPrefix(s,t) + PROVE Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) + <3>1. Len(s) <= Len(t) BY <2>1, <1>1 + <3>2. /\ 1 \in 1 .. Len(t)+1 + /\ Len(s) \in 1-1 .. Len(t) + /\ Len(s) = Len(s) - 1 + 1 + BY <3>1 + <3>3. Len(s) = Len(SubSeq(t, 1, Len(s))) + BY <3>2, SubSeqProperties, Zenon + <3>4. ASSUME NEW i \in 1 .. Len(s) + PROVE s[i] = SubSeq(t, 1, Len(s))[i] + BY <3>2, <2>1, SubSeqProperties DEF IsPrefix + <3>. QED BY <3>1, <3>3, <3>4, SeqEqual + <2>2. ASSUME Len(s) <= Len(t), s = SubSeq(t, 1, Len(s)) + PROVE IsPrefix(s,t) + <3>1. /\ 1 \in 1 .. Len(t)+1 + /\ Len(s) \in 1-1 .. Len(t) + /\ Len(t) \in Len(s) .. Len(t) + BY <2>2 + <3>2. t = s \o SubSeq(t, Len(s)+1, Len(t)) + BY <2>2, <3>1, ConcatAdjacentSubSeq, SubSeqFull, Zenon + <3>3. SubSeq(t, Len(s)+1, Len(t)) \in Seq(S) OBVIOUS + <3>. QED BY <3>2, <3>3, <1>2 + <2>. QED BY <2>1, <2>2 +<1>4. IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = Restrict(t, DOMAIN s) + <2>1. /\ DOMAIN s = 1 .. Len(s) + /\ Len(s) <= Len(t) <=> Len(s) \in 0 .. Len(t) + OBVIOUS + <2>. QED + BY <1>3, <2>1, SubSeqRestrict, Zenon +<1>. QED BY <1>2, <1>3, <1>4 + +THEOREM IsStrictPrefixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsStrictPrefix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = s \o u + /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, 1, Len(s)) + /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = Restrict(t, DOMAIN s) + /\ IsStrictPrefix(s,t) <=> IsPrefix(s,t) /\ Len(s) < Len(t) +BY IsPrefixProperties DEF IsStrictPrefix + +THEOREM IsPrefixElts == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), + IsPrefix(s,t) + PROVE s[i] = t[i] +BY IsPrefixProperties + +THEOREM EmptyIsPrefix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsPrefix(<<>>, s) + /\ IsPrefix(s, <<>>) <=> s = <<>> + /\ IsStrictPrefix(<<>>, s) <=> s # <<>> + /\ ~ IsStrictPrefix(s, <<>>) +BY IsPrefixProperties, IsStrictPrefixProperties + +THEOREM IsPrefixConcat == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsPrefix(s, s \o t) +BY IsPrefixProperties, ConcatProperties, Zenon + +THEOREM IsPrefixAppend == + ASSUME NEW S, NEW s \in Seq(S), NEW e \in S + PROVE IsPrefix(s, Append(s,e)) +<1>1. /\ <> \in Seq(S) + /\ Append(s,e) = s \o <> + OBVIOUS +<1>. QED BY <1>1, IsPrefixConcat, Zenon + +THEOREM FrontIsPrefix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsPrefix(Front(s), s) + /\ s # <<>> => IsStrictPrefix(Front(s), s) +<1>1. CASE s = << >> + BY <1>1, FrontOfEmpty, EmptyIsPrefix +<1>2. CASE s # << >> + BY <1>2, IsPrefixProperties, FrontProperties DEF Front, IsStrictPrefix +<1>. QED BY <1>1, <1>2 + +(***************************************************************************) +(* (Strict) prefixes on sequences form a (strict) partial order, and *) +(* the strict ordering is well-founded. *) +(***************************************************************************) +THEOREM IsPrefixPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : IsPrefix(s,s) + /\ \A s,t \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,s) => s = t + /\ \A s,t,u \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,u) => IsPrefix(s,u) +BY IsPrefixProperties + +THEOREM ConcatIsPrefix == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + IsPrefix(s \o t, u) + PROVE IsPrefix(s, u) +<1>1. /\ s \o t \in Seq(S) + /\ IsPrefix(s, s \o t) + BY IsPrefixConcat +<1>. QED BY <1>1, IsPrefixPartialOrder, Zenon + +THEOREM ConcatIsPrefixCancel == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) + PROVE IsPrefix(s \o t, s \o u) <=> IsPrefix(t, u) +<1>1. ASSUME IsPrefix(t,u) PROVE IsPrefix(s \o t, s \o u) + <2>1. PICK v \in Seq(S) : u = t \o v BY <1>1, IsPrefixProperties + <2>2. s \o u = (s \o t) \o v BY <2>1 + <2>. QED BY s \o t \in Seq(S), s \o u \in Seq(S), <2>2, IsPrefixProperties, Zenon +<1>2. ASSUME IsPrefix(s \o t, s \o u) PROVE IsPrefix(t,u) + <2>1. PICK v \in Seq(S) : s \o u = (s \o t) \o v + BY <1>2, s \o t \in Seq(S), s \o u \in Seq(S), IsPrefixProperties, Isa + <2>2. s \o u = s \o (t \o v) + BY <2>1 + <2>3. u = t \o v + BY t \o v \in Seq(S), <2>2, ConcatSimplifications, IsaM("blast") + <2>. QED BY <2>3, IsPrefixProperties, Zenon +<1>. QED BY <1>1, <1>2 + +THEOREM ConsIsPrefixCancel == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsPrefix(Cons(e,s), Cons(e,t)) <=> IsPrefix(s,t) +BY <> \in Seq(S), ConcatIsPrefixCancel, Zenon DEF Cons + +THEOREM ConsIsPrefix == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), + IsPrefix(Cons(e,s), u) + PROVE /\ e = Head(u) + /\ IsPrefix(s, Tail(u)) +<1>. <> \in Seq(S) + OBVIOUS +<1>1. IsPrefix(<>, u) + BY ConcatIsPrefix, Zenon DEF Cons +<1>2. PICK v \in Seq(S) : u = Cons(e, v) + BY <1>1, IsPrefixProperties, Isa DEF Cons +<1>3. /\ e = Head(u) + /\ v = Tail(u) + /\ IsPrefix(Cons(e,s), Cons(e, Tail(u))) + BY <1>2, ConsProperties, Isa +<1>. QED + BY <1>3, ConsIsPrefixCancel, Zenon + +THEOREM IsStrictPrefixStrictPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : ~ IsStrictPrefix(s,s) + /\ \A s,t \in Seq(S) : IsStrictPrefix(s,t) => ~ IsStrictPrefix(t,s) + /\ \A s,t,u \in Seq(S) : IsStrictPrefix(s,t) /\ IsStrictPrefix(t,u) => IsStrictPrefix(s,u) +BY IsStrictPrefixProperties + +THEOREM IsStrictPrefixWellFounded == + ASSUME NEW S + PROVE IsWellFoundedOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) +<1>1. IsWellFoundedOn(PreImage(Len, Seq(S), OpToRel(<, Nat)), Seq(S)) + BY NatLessThanWellFounded, PreImageWellFounded, \A s \in Seq(S) : Len(s) \in Nat, Blast +<1>2. OpToRel(IsStrictPrefix, Seq(S)) \subseteq PreImage(Len, Seq(S), OpToRel(<, Nat)) + BY IsStrictPrefixProperties DEF PreImage, OpToRel +<1>. QED + BY <1>1, <1>2, IsWellFoundedOnSubrelation, Zenon + +THEOREM SeqStrictPrefixInduction == + ASSUME NEW P(_), NEW S, + \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictPrefix(s,t) => P(s)) => P(t) + PROVE \A s \in Seq(S) : P(s) +<1>1. \A t \in Seq(S) : + (\A s \in SetLessThan(t, OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) : P(s)) + => P(t) + BY DEF SetLessThan, OpToRel +<1>. QED BY WFInduction, IsStrictPrefixWellFounded, <1>1, Blast + +(***************************************************************************) +(* Similar theorems about suffixes. *) +(***************************************************************************) + +THEOREM IsSuffixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsSuffix(s,t) <=> \E u \in Seq(S) : t = u \o s + /\ IsSuffix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + /\ IsSuffix(s,t) <=> IsPrefix(Reverse(s), Reverse(t)) +<1>1. IsSuffix(s,t) <=> \E u \in Seq(S) : t = u \o s + <2>1. ASSUME NEW u \in Seq(Range(t)), t = u \o s + PROVE u \in Seq(S) + BY DEF Range + <2>2. ASSUME NEW u \in Seq(S), t = u \o s + PROVE u \in Seq(Range(t)) + <3>1. \A i \in 1 .. Len(u) : u[i] \in Range(u) + BY DOMAIN u = 1 .. Len(u) DEF Range + <3>2. \A i \in 1 .. Len(u) : u[i] \in Range(t) + BY <2>2, <3>1, RangeConcatenation + <3>. QED BY <3>2 + <2>. QED BY <2>1, <2>2 DEF IsSuffix +<1>2. IsSuffix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + <2>1. ASSUME IsSuffix(s,t) + PROVE Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + <3>1. Len(s) <= Len(t) + BY <2>1 DEF IsSuffix + <3>2. /\ Len(t) - Len(s) + 1 \in 1 .. Len(t)+1 + /\ Len(t) \in (Len(t) - Len(s) + 1) - 1 .. Len(t) + /\ Len(t) - (Len(t) - Len(s) + 1) + 1 = Len(s) + BY <3>1 + <3>3. Len(s) = Len(SubSeq(t, Len(t)-Len(s)+1, Len(t))) + BY <3>2, SubSeqProperties, Zenon + <3>4. ASSUME NEW i \in 1 .. Len(s) + PROVE s[i] = SubSeq(t, Len(t)-Len(s)+1, Len(t))[i] + BY <3>2, <2>1, SubSeqProperties DEF IsSuffix + <3>. QED BY <3>1, <3>3, <3>4, SeqEqual + <2>2. ASSUME Len(s) <= Len(t), s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + PROVE IsSuffix(s,t) + <3>1. /\ 1 \in 1 .. Len(t)+1 + /\ Len(t)-Len(s) \in 1-1 .. Len(t) + /\ Len(t) \in Len(t)-Len(s) .. Len(t) + BY <2>2 + <3>2. t = SubSeq(t, 1, Len(t) - Len(s)) \o s + BY <2>2, <3>1, ConcatAdjacentSubSeq, SubSeqFull, Zenon + <3>3. SubSeq(t, 1, Len(t) - Len(s)) \in Seq(S) OBVIOUS + <3>. QED BY <3>2, <3>3, <1>1 + <2>. QED BY <2>1, <2>2 +<1>3. IsSuffix(s,t) <=> IsPrefix(Reverse(s), Reverse(t)) + <2>. /\ Reverse(s) \in Seq(S) + /\ Reverse(t) \in Seq(S) + BY ReverseProperties + <2>1. ASSUME IsSuffix(s,t) + PROVE IsPrefix(Reverse(s), Reverse(t)) + <3>1. PICK u \in Seq(S) : t = u \o s + BY <2>1, <1>1 + <3>2. /\ Reverse(u) \in Seq(S) + /\ Reverse(t) = Reverse(s) \o Reverse(u) + BY <3>1, ReverseProperties, ReverseConcat, Zenon + <3>. QED BY <3>2, IsPrefixProperties, Zenon + <2>2. ASSUME IsPrefix(Reverse(s), Reverse(t)) + PROVE IsSuffix(s,t) + <3>1. PICK u \in Seq(S) : Reverse(t) = Reverse(s) \o u + BY <2>2, IsPrefixProperties + <3>2. /\ Reverse(u) \in Seq(S) + /\ Reverse(Reverse(t)) = Reverse(u) \o Reverse(Reverse(s)) + BY <3>1, ReverseProperties, ReverseConcat, Zenon + <3>. QED BY <3>2, <1>1, ReverseProperties, Zenon + <2>. QED BY <2>1, <2>2 +<1>. QED BY <1>1, <1>2, <1>3 + +THEOREM IsStrictSuffixProperties == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE /\ IsStrictSuffix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = u \o s + /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ IsSuffix(s,t) + /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + /\ IsStrictSuffix(s,t) <=> IsStrictPrefix(Reverse(s), Reverse(t)) +<1>1. ASSUME IsStrictSuffix(s,t) + PROVE /\ \E u \in Seq(S) : u # << >> /\ t = u \o s + /\ Len(s) < Len(t) + /\ IsSuffix(s,t) + /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + /\ IsStrictPrefix(Reverse(s), Reverse(t)) + <2>1. IsSuffix(s,t) /\ s # t + BY <1>1 DEF IsStrictSuffix + <2>2. PICK u \in Seq(S) : t = u \o s + BY <2>1, IsSuffixProperties + <2>3. u # << >> + BY <2>2, <1>1 DEF IsStrictSuffix + <2>4. Len(s) < Len(t) + BY <2>2, <2>3 + <2>5. s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + BY <2>1, IsSuffixProperties + <2>6. IsStrictPrefix(Reverse(s), Reverse(t)) + BY <2>1, IsSuffixProperties, ReverseEqual DEF IsStrictPrefix + <2>. QED BY <2>1, <2>2, <2>3, <2>4, <2>5, <2>6 +<1>2. ASSUME NEW u \in Seq(S), u # << >>, t = u \o s + PROVE IsStrictSuffix(s,t) + <2>1. IsSuffix(s,t) BY <1>2, IsSuffixProperties, Zenon + <2>2. s # t BY <1>2 + <2>. QED BY <2>1, <2>2 DEF IsStrictSuffix +<1>3. ASSUME Len(s) < Len(t), IsSuffix(s,t) + PROVE IsStrictSuffix(s,t) + BY <1>3, IsSuffixProperties DEF IsStrictSuffix +<1>4. ASSUME Len(s) < Len(t), s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) + PROVE IsStrictSuffix(s,t) + BY <1>4, IsSuffixProperties DEF IsStrictSuffix +<1>5. ASSUME IsStrictPrefix(Reverse(s), Reverse(t)) + PROVE IsStrictSuffix(s,t) + BY <1>5, IsSuffixProperties DEF IsStrictPrefix, IsStrictSuffix +<1>. QED BY <1>1, <1>2, <1>3, <1>4, <1>5 + +THEOREM IsSuffixElts == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), + IsSuffix(s,t) + PROVE s[i] = t[Len(t) - Len(s) + i] +BY IsSuffixProperties + +THEOREM EmptyIsSuffix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsSuffix(<<>>, s) + /\ IsSuffix(s, <<>>) <=> s = <<>> + /\ IsStrictSuffix(<<>>, s) <=> s # <<>> + /\ ~ IsStrictSuffix(s, <<>>) +BY IsSuffixProperties, IsStrictSuffixProperties + +THEOREM IsSuffixConcat == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsSuffix(s, t \o s) +BY IsSuffixProperties, ConcatProperties, Zenon + +THEOREM IsStrictSuffixCons == + ASSUME NEW S, NEW s \in Seq(S), NEW e \in S + PROVE IsStrictSuffix(s, Cons(e,s)) +BY IsStrictSuffixProperties DEF Cons + +THEOREM TailIsSuffix == + ASSUME NEW S, NEW s \in Seq(S) + PROVE /\ IsSuffix(Tail(s), s) + /\ s # <<>> => IsStrictSuffix(Tail(s), s) +<1>1. CASE s = <<>> + BY <1>1, Tail(<<>>) = <<>>, EmptyIsSuffix +<1>2. CASE s # <<>> + <2>. Head(s) \in S /\ Tail(s) \in Seq(S) + BY <1>2 + <2>1. IsStrictSuffix(Tail(s), Cons(Head(s), Tail(s))) + BY IsStrictSuffixCons, Zenon + <2>. QED BY <1>2, <2>1, ConsHeadTail DEF IsStrictSuffix +<1>. QED BY <1>1, <1>2 + +THEOREM IsSuffixPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : IsSuffix(s,s) + /\ \A s,t \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,s) => s = t + /\ \A s,t,u \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,u) => IsSuffix(s,u) +<1>1. ASSUME NEW s \in Seq(S) PROVE IsSuffix(s,s) + BY IsSuffixProperties +<1>2. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), IsSuffix(s,t), IsSuffix(t,s) + PROVE s = t + <2>1. PICK v \in Seq(S) : t = v \o s + BY <1>2, IsSuffixProperties + <2>2. PICK w \in Seq(S) : s = w \o t + BY <1>2, IsSuffixProperties + <2>3. /\ v \o w \in Seq(S) + /\ (v \o w) \o t = t + BY <2>1, <2>2 + <2>. QED BY <2>2, <2>3 +<1>3. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + IsSuffix(s,t), IsSuffix(t,u) + PROVE IsSuffix(s,u) + <2>1. PICK v \in Seq(S) : t = v \o s + BY <1>3, IsSuffixProperties + <2>2. PICK w \in Seq(S) : u = w \o t + BY <1>3, IsSuffixProperties + <2>3. /\ w \o v \in Seq(S) + /\ u = (w \o v) \o s + BY <2>1, <2>2 + <2>. QED BY <2>3, IsSuffixProperties, Zenon +<1>. QED BY <1>1, <1>2, <1>3 + +THEOREM ConcatIsSuffix == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + IsSuffix(s \o t, u) + PROVE IsSuffix(t, u) +<1>1. /\ s \o t \in Seq(S) + /\ IsSuffix(t, s \o t) + BY IsSuffixConcat +<1>. QED BY <1>1, IsSuffixPartialOrder, Zenon + +THEOREM ConcatIsSuffixCancel == + ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) + PROVE IsSuffix(s \o t, u \o t) <=> IsSuffix(s, u) +<1>1. ASSUME IsSuffix(s, u) PROVE IsSuffix(s \o t, u \o t) + <2>1. PICK v \in Seq(S) : u = v \o s BY <1>1, IsSuffixProperties + <2>2. u \o t = v \o (s \o t) BY <2>1 + <2>. QED BY s \o t \in Seq(S), u \o t \in Seq(S), <2>2, IsSuffixProperties, ZenonT(20) +<1>2. ASSUME IsSuffix(s \o t, u \o t) PROVE IsSuffix(s, u) + <2>1. PICK v \in Seq(S) : u \o t = v \o (s \o t) + BY <1>2, s \o t \in Seq(S), u \o t \in Seq(S), IsSuffixProperties, Isa + <2>2. u \o t = (v \o s) \o t + BY <2>1 + <2>3. u = v \o s + BY v \o s \in Seq(S), <2>2, ConcatSimplifications + <2>. QED BY <2>3, IsSuffixProperties, Zenon +<1>. QED BY <1>1, <1>2 + +THEOREM AppendIsSuffixCancel == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) + PROVE IsSuffix(Append(s,e), Append(t,e)) <=> IsSuffix(s,t) +BY <> \in Seq(S), ConcatIsSuffixCancel, AppendIsConcat, Isa + +THEOREM AppendIsSuffix == + ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), + IsSuffix(Append(s,e), u) + PROVE /\ e = Last(u) + /\ IsSuffix(s, Front(u)) +<1>. <> \in Seq(S) + OBVIOUS +<1>1. IsSuffix(<>, u) + BY ConcatIsSuffix, AppendIsConcat, Isa +<1>2. PICK v \in Seq(S) : u = Append(v,e) + BY <1>1, IsSuffixProperties, AppendIsConcat, Isa +<1>3. /\ e = Last(u) + /\ v = Front(u) + /\ IsSuffix(Append(s,e), Append(Front(u),e)) + BY <1>2, FrontLastAppend +<1>. QED + BY <1>3, AppendIsSuffixCancel, Zenon + +THEOREM IsStrictSuffixStrictPartialOrder == + ASSUME NEW S + PROVE /\ \A s \in Seq(S) : ~ IsStrictSuffix(s,s) + /\ \A s,t \in Seq(S) : IsStrictSuffix(s,t) => ~ IsStrictSuffix(t,s) + /\ \A s,t,u \in Seq(S) : IsStrictSuffix(s,t) /\ IsStrictSuffix(t,u) => IsStrictSuffix(s,u) +<1>1. ASSUME NEW s \in Seq(S) PROVE ~ IsStrictSuffix(s,s) + BY DEF IsStrictSuffix +<1>2. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), IsStrictSuffix(s,t) + PROVE ~ IsStrictSuffix(t,s) + BY <1>2, IsSuffixPartialOrder DEF IsStrictSuffix +<1>3. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), + IsStrictSuffix(s,t), IsStrictSuffix(t,u) + PROVE IsStrictSuffix(s,u) + <2>1. /\ IsSuffix(s,t) /\ Len(s) < Len(t) + /\ IsSuffix(t,u) /\ Len(t) < Len(u) + BY <1>3, IsStrictSuffixProperties + <2>2. IsSuffix(s,u) + BY <2>1, IsSuffixPartialOrder, Zenon + <2>3. Len(s) < Len(u) + BY <2>1 + <2>. QED BY <2>2, <2>3, IsStrictSuffixProperties +<1>4. QED BY <1>1, <1>2, <1>3 + +THEOREM IsStrictSuffixWellFounded == + ASSUME NEW S + PROVE IsWellFoundedOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) +<1>1. IsWellFoundedOn(PreImage(Len, Seq(S), OpToRel(<, Nat)), Seq(S)) + BY NatLessThanWellFounded, PreImageWellFounded, \A s \in Seq(S) : Len(s) \in Nat, Blast +<1>2. OpToRel(IsStrictSuffix, Seq(S)) \subseteq PreImage(Len, Seq(S), OpToRel(<, Nat)) + BY IsStrictSuffixProperties DEF PreImage, OpToRel +<1>. QED + BY <1>1, <1>2, IsWellFoundedOnSubrelation, Zenon + +THEOREM SeqStrictSuffixInduction == + ASSUME NEW P(_), NEW S, + \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictSuffix(s,t) => P(s)) => P(t) + PROVE \A s \in Seq(S) : P(s) +<1>1. \A t \in Seq(S) : + (\A s \in SetLessThan(t, OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) : P(s)) + => P(t) + BY DEF SetLessThan, OpToRel +<1>. QED BY WFInduction, IsStrictSuffixWellFounded, <1>1, Blast + +(***************************************************************************) +(* Since the (strict) prefix and suffix orderings on sequences are *) +(* well-founded, they can be used for defining recursive functions. *) +(* The operators OpDefinesFcn, WFInductiveDefines, and WFInductiveUnique *) +(* are defined in module WellFoundedInduction. *) +(***************************************************************************) + +StrictPrefixesDetermineDef(S, Def(_,_)) == + \A g,h : \A seq \in Seq(S) : + (\A pre \in Seq(S) : IsStrictPrefix(pre,seq) => g[pre] = h[pre]) + => Def(g, seq) = Def(h, seq) + +LEMMA StrictPrefixesDetermineDef_WFDefOn == + ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) + PROVE WFDefOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S), Def) +BY Isa DEF StrictPrefixesDetermineDef, WFDefOn, OpToRel, SetLessThan + +THEOREM PrefixRecursiveSequenceFunctionUnique == + ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) + PROVE WFInductiveUnique(Seq(S), Def) +BY StrictPrefixesDetermineDef_WFDefOn, IsStrictPrefixWellFounded, WFDefOnUnique + +THEOREM PrefixRecursiveSequenceFunctionDef == + ASSUME NEW S, NEW Def(_,_), NEW f, + StrictPrefixesDetermineDef(S, Def), + OpDefinesFcn(f, Seq(S), Def) + PROVE WFInductiveDefines(f, Seq(S), Def) +BY StrictPrefixesDetermineDef_WFDefOn, IsStrictPrefixWellFounded, WFInductiveDef + +THEOREM PrefixRecursiveSequenceFunctionType == + ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, + T # {}, + StrictPrefixesDetermineDef(S, Def), + WFInductiveDefines(f, Seq(S), Def), + \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T + PROVE f \in [Seq(S) -> T] +<1>1. IsWellFoundedOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) + BY IsStrictPrefixWellFounded +<1>2. WFDefOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S), Def) + BY StrictPrefixesDetermineDef_WFDefOn +<1>. QED + BY <1>1, <1>2, WFInductiveDefType, Isa + +StrictSuffixesDetermineDef(S, Def(_,_)) == + \A g,h : \A seq \in Seq(S) : + (\A suf \in Seq(S) : IsStrictSuffix(suf,seq) => g[suf] = h[suf]) + => Def(g, seq) = Def(h, seq) + +LEMMA StrictSuffixesDetermineDef_WFDefOn == + ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) + PROVE WFDefOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S), Def) +BY Isa DEF StrictSuffixesDetermineDef, WFDefOn, OpToRel, SetLessThan + +THEOREM SuffixRecursiveSequenceFunctionUnique == + ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) + PROVE WFInductiveUnique(Seq(S), Def) +BY StrictSuffixesDetermineDef_WFDefOn, IsStrictSuffixWellFounded, WFDefOnUnique + +THEOREM SuffixRecursiveSequenceFunctionDef == + ASSUME NEW S, NEW Def(_,_), NEW f, + StrictSuffixesDetermineDef(S, Def), + OpDefinesFcn(f, Seq(S), Def) + PROVE WFInductiveDefines(f, Seq(S), Def) +BY StrictSuffixesDetermineDef_WFDefOn, IsStrictSuffixWellFounded, WFInductiveDef + +THEOREM SuffixRecursiveSequenceFunctionType == + ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, + T # {}, + StrictSuffixesDetermineDef(S, Def), + WFInductiveDefines(f, Seq(S), Def), + \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T + PROVE f \in [Seq(S) -> T] +<1>1. IsWellFoundedOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) + BY IsStrictSuffixWellFounded +<1>2. WFDefOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S), Def) + BY StrictSuffixesDetermineDef_WFDefOn +<1>. QED + BY <1>1, <1>2, WFInductiveDefType, Isa + +(***************************************************************************) +(* The following theorems justify ``primitive recursive'' functions over *) +(* sequences, with a base case for the empty sequence and recursion along *) +(* either the Tail or the Front of a non-empty sequence. *) +(***************************************************************************) + +TailInductiveDefHypothesis(f, S, f0, Def(_,_)) == + f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Tail(s)], s)] + +TailInductiveDefConclusion(f, S, f0, Def(_,_)) == + f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Tail(s)], s)] + +THEOREM TailInductiveDef == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, + TailInductiveDefHypothesis(f, S, f0, Def) + PROVE TailInductiveDefConclusion(f, S, f0, Def) +<1>. DEFINE Op(h,s) == IF s = <<>> THEN f0 ELSE Def(h[Tail(s)], s) +<1>1. StrictSuffixesDetermineDef(S, Op) + <2>. SUFFICES ASSUME NEW g, NEW h, NEW seq \in Seq(S), + \A suf \in Seq(S) : IsStrictSuffix(suf, seq) => g[suf] = h[suf] + PROVE Op(g, seq) = Op(h, seq) + BY DEF StrictSuffixesDetermineDef, Zenon + <2>1. CASE seq = <<>> + BY <2>1 + <2>2. CASE seq # <<>> + <3>1. /\ Tail(seq) \in Seq(S) + /\ IsStrictSuffix(Tail(seq), seq) + BY <2>2, TailIsSuffix + <3>2. g[Tail(seq)] = h[Tail(seq)] + BY <3>1, Zenon + <3>. QED + BY <2>2, <3>2 + <2>. QED BY <2>1, <2>2 +<1>2. OpDefinesFcn(f, Seq(S), Op) + BY DEF OpDefinesFcn, TailInductiveDefHypothesis +<1>3. WFInductiveDefines(f, Seq(S), Op) + BY <1>1, <1>2, SuffixRecursiveSequenceFunctionDef +<1>. QED BY <1>3 DEF WFInductiveDefines, TailInductiveDefConclusion + +THEOREM TailInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, + TailInductiveDefConclusion(f, S, f0, Def), + f0 \in T, + \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T + PROVE f \in [Seq(S) -> T] +<1>. SUFFICES \A s \in Seq(S) : f[s] \in T + BY DEF TailInductiveDefConclusion +<1>1. f[<<>>] \in T + BY <<>> \in Seq(S) DEF TailInductiveDefConclusion +<1>2. ASSUME NEW seq \in Seq(S), NEW e \in S, f[seq] \in T + PROVE f[Cons(e, seq)] \in T + <2>1. /\ Cons(e, seq) \in Seq(S) + /\ Cons(e, seq) # <<>> + /\ Tail(Cons(e, seq)) = seq + BY ConsProperties + <2>2. f[Cons(e, seq)] = Def(f[seq], Cons(e,seq)) + BY <2>1 DEF TailInductiveDefConclusion + <2>. QED BY <1>2, <2>1, <2>2 +<1>. QED BY <1>1, <1>2, SequencesInductionCons, Isa + +FrontInductiveDefHypothesis(f, S, f0, Def(_,_)) == + f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Front(s)], s)] + +FrontInductiveDefConclusion(f, S, f0, Def(_,_)) == + f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Front(s)], s)] + +THEOREM FrontInductiveDef == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, + FrontInductiveDefHypothesis(f, S, f0, Def) + PROVE FrontInductiveDefConclusion(f, S, f0, Def) +<1>. DEFINE Op(h,s) == IF s = <<>> THEN f0 ELSE Def(h[Front(s)], s) +<1>1. StrictPrefixesDetermineDef(S, Op) + <2>. SUFFICES ASSUME NEW g, NEW h, NEW seq \in Seq(S), + \A pre \in Seq(S) : IsStrictPrefix(pre, seq) => g[pre] = h[pre] + PROVE Op(g, seq) = Op(h, seq) + BY DEF StrictPrefixesDetermineDef, Zenon + <2>1. CASE seq = <<>> + BY <2>1 + <2>2. CASE seq # <<>> + <3>1. /\ Front(seq) \in Seq(S) + /\ IsStrictPrefix(Front(seq), seq) + BY <2>2, FrontProperties, FrontIsPrefix + <3>2. g[Front(seq)] = h[Front(seq)] + BY <3>1, Zenon + <3>. QED + BY <2>2, <3>2 + <2>. QED BY <2>1, <2>2 +<1>2. OpDefinesFcn(f, Seq(S), Op) + BY DEF OpDefinesFcn, FrontInductiveDefHypothesis +<1>3. WFInductiveDefines(f, Seq(S), Op) + BY <1>1, <1>2, PrefixRecursiveSequenceFunctionDef +<1>. QED BY <1>3 DEF WFInductiveDefines, FrontInductiveDefConclusion + +THEOREM FrontInductiveDefType == + ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, + FrontInductiveDefConclusion(f, S, f0, Def), + f0 \in T, + \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T + PROVE f \in [Seq(S) -> T] +<1>. SUFFICES \A s \in Seq(S) : f[s] \in T + BY DEF FrontInductiveDefConclusion +<1>1. f[<<>>] \in T + BY <<>> \in Seq(S) DEF FrontInductiveDefConclusion +<1>2. ASSUME NEW seq \in Seq(S), NEW e \in S, f[seq] \in T + PROVE f[Append(seq, e)] \in T + <2>1. /\ Append(seq, e) \in Seq(S) + /\ Append(seq, e) # <<>> + /\ Front(Append(seq, e)) = seq + BY AppendProperties, FrontLastAppend + <2>2. f[Append(seq, e)] = Def(f[seq], Append(seq, e)) + BY <2>1 DEF FrontInductiveDefConclusion + <2>. QED BY <1>2, <2>1, <2>2 +<1>. QED BY <1>1, <1>2, SequencesInductionAppend, Isa + +============================================================================= diff --git a/x/ccv/provider/keeper/prototyping/model/library/TLAPS.tla b/x/ccv/provider/keeper/prototyping/model/library/TLAPS.tla new file mode 100644 index 0000000000..3abf4b1b2e --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/TLAPS.tla @@ -0,0 +1,411 @@ +------------------------------- MODULE TLAPS -------------------------------- + +(* Backend pragmas. *) + + +(***************************************************************************) +(* Each of these pragmas can be cited with a BY or a USE. The pragma that *) +(* is added to the context of an obligation most recently is the one whose *) +(* effects are triggered. *) +(***************************************************************************) + +(***************************************************************************) +(* The following pragmas should be used only as a last resource. They are *) +(* dependent upon the particular backend provers, and are unlikely to have *) +(* any effect if the set of backend provers changes. Moreover, they are *) +(* meaningless to a reader of the proof. *) +(***************************************************************************) + + +(**************************************************************************) +(* Backend pragma: use the SMT solver for arithmetic. *) +(* *) +(* This method exists under this name for historical reasons. *) +(**************************************************************************) + +SimpleArithmetic == TRUE (*{ by (prover:"smt3") }*) + + +(**************************************************************************) +(* Backend pragma: SMT solver *) +(* *) +(* This method translates the proof obligation to SMTLIB2. The supported *) +(* fragment includes first-order logic, set theory, functions and *) +(* records. *) +(* SMT calls the smt-solver with the default timeout of 5 seconds *) +(* while SMTT(n) calls the smt-solver with a timeout of n seconds. *) +(**************************************************************************) + +SMT == TRUE (*{ by (prover:"smt3") }*) +SMTT(X) == TRUE (*{ by (prover:"smt3"; timeout:@) }*) + + +(**************************************************************************) +(* Backend pragma: CVC3 SMT solver *) +(* *) +(* CVC3 is used by default but you can also explicitly call it. *) +(**************************************************************************) + +CVC3 == TRUE (*{ by (prover: "cvc33") }*) +CVC3T(X) == TRUE (*{ by (prover:"cvc33"; timeout:@) }*) + +(**************************************************************************) +(* Backend pragma: Yices SMT solver *) +(* *) +(* This method translates the proof obligation to Yices native language. *) +(**************************************************************************) + +Yices == TRUE (*{ by (prover: "yices3") }*) +YicesT(X) == TRUE (*{ by (prover:"yices3"; timeout:@) }*) + +(**************************************************************************) +(* Backend pragma: veriT SMT solver *) +(* *) +(* This method translates the proof obligation to SMTLIB2 and calls veriT.*) +(**************************************************************************) + +veriT == TRUE (*{ by (prover: "verit") }*) +veriTT(X) == TRUE (*{ by (prover:"verit"; timeout:@) }*) + +(**************************************************************************) +(* Backend pragma: Z3 SMT solver *) +(* *) +(* This method translates the proof obligation to SMTLIB2 and calls Z3. *) +(**************************************************************************) + +Z3 == TRUE (*{ by (prover: "z33") }*) +Z3T(X) == TRUE (*{ by (prover:"z33"; timeout:@) }*) + +(**************************************************************************) +(* Backend pragma: SPASS superposition prover *) +(* *) +(* This method translates the proof obligation to the DFG format language *) +(* supported by the ATP SPASS. The translation is based on the SMT one. *) +(**************************************************************************) + +Spass == TRUE (*{ by (prover: "spass") }*) +SpassT(X) == TRUE (*{ by (prover:"spass"; timeout:@) }*) + +(**************************************************************************) +(* Backend pragma: The PTL propositional linear time temporal logic *) +(* prover. It currently is the LS4 backend. *) +(* *) +(* This method translates the negetation of the proof obligation to *) +(* Seperated Normal Form (TRP++ format) and checks for unsatisfiability *) +(**************************************************************************) + +LS4 == TRUE (*{ by (prover: "ls4") }*) +PTL == TRUE (*{ by (prover: "ls4") }*) + +(**************************************************************************) +(* Backend pragma: Zenon with different timeouts (default is 10 seconds) *) +(* *) +(**************************************************************************) + +Zenon == TRUE (*{ by (prover:"zenon") }*) +ZenonT(X) == TRUE (*{ by (prover:"zenon"; timeout:@) }*) + +(********************************************************************) +(* Backend pragma: Isabelle with different timeouts and tactics *) +(* (default is 30 seconds/auto) *) +(********************************************************************) + +Isa == TRUE (*{ by (prover:"isabelle") }*) +IsaT(X) == TRUE (*{ by (prover:"isabelle"; timeout:@) }*) +IsaM(X) == TRUE (*{ by (prover:"isabelle"; tactic:@) }*) +IsaMT(X,Y) == TRUE (*{ by (prover:"isabelle"; tactic:@; timeout:@) }*) + +(***************************************************************************) +(* The following theorem expresses the (useful implication of the) law of *) +(* set extensionality, which can be written as *) +(* *) +(* THEOREM \A S, T : (S = T) <=> (\A x : (x \in S) <=> (x \in T)) *) +(* *) +(* Theorem SetExtensionality is sometimes required by the SMT backend for *) +(* reasoning about sets. It is usually counterproductive to include *) +(* theorem SetExtensionality in a BY clause for the Zenon or Isabelle *) +(* backends. Instead, use the pragma IsaWithSetExtensionality to instruct *) +(* the Isabelle backend to use the rule of set extensionality. *) +(***************************************************************************) +IsaWithSetExtensionality == TRUE + (*{ by (prover:"isabelle"; tactic:"(auto intro: setEqualI)")}*) + +THEOREM SetExtensionality == \A S,T : (\A x : x \in S <=> x \in T) => S = T +OBVIOUS + +(***************************************************************************) +(* The following theorem is needed to deduce NotInSetS \notin SetS from *) +(* the definition *) +(* *) +(* NotInSetS == CHOOSE v : v \notin SetS *) +(***************************************************************************) +THEOREM NoSetContainsEverything == \A S : \E x : x \notin S +OBVIOUS (*{by (isabelle "(auto intro: inIrrefl)")}*) +----------------------------------------------------------------------------- + + + +(********************************************************************) +(********************************************************************) +(********************************************************************) + + +(********************************************************************) +(* Old versions of Zenon and Isabelle pragmas below *) +(* (kept for compatibility) *) +(********************************************************************) + + +(**************************************************************************) +(* Backend pragma: Zenon with different timeouts (default is 10 seconds) *) +(* *) +(**************************************************************************) + +SlowZenon == TRUE (*{ by (prover:"zenon"; timeout:20) }*) +SlowerZenon == TRUE (*{ by (prover:"zenon"; timeout:40) }*) +VerySlowZenon == TRUE (*{ by (prover:"zenon"; timeout:80) }*) +SlowestZenon == TRUE (*{ by (prover:"zenon"; timeout:160) }*) + + + +(********************************************************************) +(* Backend pragma: Isabelle's automatic search ("auto") *) +(* *) +(* This pragma bypasses Zenon. It is useful in situations involving *) +(* essentially simplification and equational reasoning. *) +(* Default imeout for all isabelle tactics is 30 seconds. *) +(********************************************************************) +Auto == TRUE (*{ by (prover:"isabelle"; tactic:"auto") }*) +SlowAuto == TRUE (*{ by (prover:"isabelle"; tactic:"auto"; timeout:120) }*) +SlowerAuto == TRUE (*{ by (prover:"isabelle"; tactic:"auto"; timeout:480) }*) +SlowestAuto == TRUE (*{ by (prover:"isabelle"; tactic:"auto"; timeout:960) }*) + +(********************************************************************) +(* Backend pragma: Isabelle's "force" tactic *) +(* *) +(* This pragma bypasses Zenon. It is useful in situations involving *) +(* quantifier reasoning. *) +(********************************************************************) +Force == TRUE (*{ by (prover:"isabelle"; tactic:"force") }*) +SlowForce == TRUE (*{ by (prover:"isabelle"; tactic:"force"; timeout:120) }*) +SlowerForce == TRUE (*{ by (prover:"isabelle"; tactic:"force"; timeout:480) }*) +SlowestForce == TRUE (*{ by (prover:"isabelle"; tactic:"force"; timeout:960) }*) + +(***********************************************************************) +(* Backend pragma: Isabelle's "simplification" tactics *) +(* *) +(* These tactics simplify the goal before running one of the automated *) +(* tactics. They are often necessary for obligations involving record *) +(* or tuple projections. Use the SimplfyAndSolve tactic unless you're *) +(* sure you can get away with just Simplification *) +(***********************************************************************) +SimplifyAndSolve == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp auto?") }*) +SlowSimplifyAndSolve == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:120) }*) +SlowerSimplifyAndSolve == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:480) }*) +SlowestSimplifyAndSolve == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:960) }*) + +Simplification == TRUE (*{ by (prover:"isabelle"; tactic:"clarsimp") }*) +SlowSimplification == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp"; timeout:120) }*) +SlowerSimplification == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp"; timeout:480) }*) +SlowestSimplification == TRUE + (*{ by (prover:"isabelle"; tactic:"clarsimp"; timeout:960) }*) + +(**************************************************************************) +(* Backend pragma: Isabelle's tableau prover ("blast") *) +(* *) +(* This pragma bypasses Zenon and uses Isabelle's built-in theorem *) +(* prover, Blast. It is almost never better than Zenon by itself, but *) +(* becomes very useful in combination with the Auto pragma above. The *) +(* AutoBlast pragma first attempts Auto and then uses Blast to prove what *) +(* Auto could not prove. (There is currently no way to use Zenon on the *) +(* results left over from Auto.) *) +(**************************************************************************) +Blast == TRUE (*{ by (prover:"isabelle"; tactic:"blast") }*) +SlowBlast == TRUE (*{ by (prover:"isabelle"; tactic:"blast"; timeout:120) }*) +SlowerBlast == TRUE (*{ by (prover:"isabelle"; tactic:"blast"; timeout:480) }*) +SlowestBlast == TRUE (*{ by (prover:"isabelle"; tactic:"blast"; timeout:960) }*) + +AutoBlast == TRUE (*{ by (prover:"isabelle"; tactic:"auto, blast") }*) + + +(**************************************************************************) +(* Backend pragmas: multi-back-ends *) +(* *) +(* These pragmas just run a bunch of back-ends one after the other in the *) +(* hope that one will succeed. This saves time and effort for the user at *) +(* the expense of computation time. *) +(**************************************************************************) + +(* CVC3 goes first because it's bundled with TLAPS, then the other SMT + solvers are unlikely to succeed if CVC3 fails, so we run zenon and + Isabelle before them. *) +AllProvers == TRUE (*{ + by (prover:"cvc33") + by (prover:"zenon") + by (prover:"isabelle"; tactic:"auto") + by (prover:"spass") + by (prover:"smt3") + by (prover:"yices3") + by (prover:"verit") + by (prover:"z33") + by (prover:"isabelle"; tactic:"force") + by (prover:"isabelle"; tactic:"(auto intro: setEqualI)") + by (prover:"isabelle"; tactic:"clarsimp auto?") + by (prover:"isabelle"; tactic:"clarsimp") + by (prover:"isabelle"; tactic:"auto, blast") + }*) +AllProversT(X) == TRUE (*{ + by (prover:"cvc33"; timeout:@) + by (prover:"zenon"; timeout:@) + by (prover:"isabelle"; tactic:"auto"; timeout:@) + by (prover:"spass"; timeout:@) + by (prover:"smt3"; timeout:@) + by (prover:"yices3"; timeout:@) + by (prover:"verit"; timeout:@) + by (prover:"z33"; timeout:@) + by (prover:"isabelle"; tactic:"force"; timeout:@) + by (prover:"isabelle"; tactic:"(auto intro: setEqualI)"; timeout:@) + by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:@) + by (prover:"isabelle"; tactic:"clarsimp"; timeout:@) + by (prover:"isabelle"; tactic:"auto, blast"; timeout:@) + }*) + +AllSMT == TRUE (*{ + by (prover:"cvc33") + by (prover:"smt3") + by (prover:"yices3") + by (prover:"verit") + by (prover:"z33") + }*) +AllSMTT(X) == TRUE (*{ + by (prover:"cvc33"; timeout:@) + by (prover:"smt3"; timeout:@) + by (prover:"yices3"; timeout:@) + by (prover:"verit"; timeout:@) + by (prover:"z33"; timeout:@) + }*) + +AllIsa == TRUE (*{ + by (prover:"isabelle"; tactic:"auto") + by (prover:"isabelle"; tactic:"force") + by (prover:"isabelle"; tactic:"(auto intro: setEqualI)") + by (prover:"isabelle"; tactic:"clarsimp auto?") + by (prover:"isabelle"; tactic:"clarsimp") + by (prover:"isabelle"; tactic:"auto, blast") + }*) +AllIsaT(X) == TRUE (*{ + by (prover:"isabelle"; tactic:"auto"; timeout:@) + by (prover:"isabelle"; tactic:"force"; timeout:@) + by (prover:"isabelle"; tactic:"(auto intro: setEqualI)"; timeout:@) + by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:@) + by (prover:"isabelle"; tactic:"clarsimp"; timeout:@) + by (prover:"isabelle"; tactic:"auto, blast"; timeout:@) + }*) + +---------------------------------------------------------------------------- +(***************************************************************************) +(* TEMPORAL LOGIC *) +(* *) +(* The following rules are intended to be used when TLAPS handles temporal *) +(* logic. They will not work now. Moreover when temporal reasoning is *) +(* implemented, these rules may be changed or omitted, and additional *) +(* rules will probably be added. However, they are included mainly so *) +(* their names will be defined, preventing the use of identifiers that are *) +(* likely to produce name clashes with future versions of this module. *) +(***************************************************************************) + + +(***************************************************************************) +(* The following proof rules (and their names) are from the paper "The *) +(* Temporal Logic of Actions". *) +(***************************************************************************) +THEOREM RuleTLA1 == ASSUME STATE P, STATE f, + P /\ (f' = f) => P' + PROVE []P <=> P /\ [][P => P']_f + +THEOREM RuleTLA2 == ASSUME STATE P, STATE Q, STATE f, STATE g, + ACTION A, ACTION B, + P /\ [A]_f => Q /\ [B]_g + PROVE []P /\ [][A]_f => []Q /\ [][B]_g + +THEOREM RuleINV1 == ASSUME STATE I, STATE F, ACTION N, + I /\ [N]_F => I' + PROVE I /\ [][N]_F => []I + +THEOREM RuleINV2 == ASSUME STATE I, STATE f, ACTION N + PROVE []I => ([][N]_f <=> [][N /\ I /\ I']_f) + +THEOREM RuleWF1 == ASSUME STATE P, STATE Q, STATE f, ACTION N, ACTION A, + P /\ [N]_f => (P' \/ Q'), + P /\ <>_f => Q', + P => ENABLED <>_f + PROVE [][N]_f /\ WF_f(A) => (P ~> Q) + +THEOREM RuleSF1 == ASSUME STATE P, STATE Q, STATE f, + ACTION N, ACTION A, TEMPORAL F, + P /\ [N]_f => (P' \/ Q'), + P /\ <>_f => Q', + []P /\ [][N]_f /\ []F => <> ENABLED <>_f + PROVE [][N]_f /\ SF_f(A) /\ []F => (P ~> Q) + +(***************************************************************************) +(* The rules WF2 and SF2 in "The Temporal Logic of Actions" are obtained *) +(* from the following two rules by the following substitutions: `. *) +(* *) +(* ___ ___ _______________ *) +(* M <- M , g <- g , EM <- ENABLED <>_g .' *) +(***************************************************************************) +THEOREM RuleWF2 == ASSUME STATE P, STATE f, STATE g, STATE EM, + ACTION A, ACTION B, ACTION N, ACTION M, + TEMPORAL F, + <>_f => <>_g, + P /\ P' /\ <>_f /\ EM => B, + P /\ EM => ENABLED A, + [][N /\ ~B]_f /\ WF_f(A) /\ []F /\ <>[]EM => <>[]P + PROVE [][N]_f /\ WF_f(A) /\ []F => []<><>_g \/ []<>(~EM) + +THEOREM RuleSF2 == ASSUME STATE P, STATE f, STATE g, STATE EM, + ACTION A, ACTION B, ACTION N, ACTION M, + TEMPORAL F, + <>_f => <>_g, + P /\ P' /\ <>_f /\ EM => B, + P /\ EM => ENABLED A, + [][N /\ ~B]_f /\ SF_f(A) /\ []F /\ []<>EM => <>[]P + PROVE [][N]_f /\ SF_f(A) /\ []F => []<><>_g \/ <>[](~EM) + + +(***************************************************************************) +(* The following rule is a special case of the general temporal logic *) +(* proof rule STL4 from the paper "The Temporal Logic of Actions". The *) +(* general rule is for arbitrary temporal formulas F and G, but it cannot *) +(* yet be handled by TLAPS. *) +(***************************************************************************) +THEOREM RuleInvImplication == + ASSUME STATE F, STATE G, + F => G + PROVE []F => []G +PROOF OMITTED + +(***************************************************************************) +(* The following rule is a special case of rule TLA2 from the paper "The *) +(* Temporal Logic of Actions". *) +(***************************************************************************) +THEOREM RuleStepSimulation == + ASSUME STATE I, STATE f, STATE g, + ACTION M, ACTION N, + I /\ I' /\ [M]_f => [N]_g + PROVE []I /\ [][M]_f => [][N]_g +PROOF OMITTED + +(***************************************************************************) +(* The following may be used to invoke a decision procedure for *) +(* propositional temporal logic. *) +(***************************************************************************) +PropositionalTemporalLogic == TRUE +============================================================================= diff --git a/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction.tla b/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction.tla new file mode 100755 index 0000000000..43e4107f30 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction.tla @@ -0,0 +1,328 @@ +------------------------ MODULE WellFoundedInduction ------------------------ +(***************************************************************************) +(* This module contains useful theorems for inductive proofs and recursive *) +(* definitions based on a well-founded ordering. *) +(* *) +(* Most of the statements of the theorems are decomposed in terms of *) +(* definitions. This is done for two reasons: *) +(* *) +(* - It makes it easier for the backends to instantiate the theorems *) +(* when those definitions are not expanded. In fact, at the moment *) +(* the provers can't prove any of those theorems from the theorem *) +(* itself if the definitions are made usable. *) +(* *) +(* - It can be convenient when writing proofs to use those definitions *) +(* rather than having to write out their expansions. *) +(* *) +(* A relation is represented as a set of ordered pairs, where we write *) +(* <> \in R instead of x R y. It is more convenient to represent *) +(* relations this way rather than as operators such as < . *) +(* *) +(* Proofs of these theorems appear in module WellFoundedInduction_proofs. *) +(***************************************************************************) +EXTENDS NaturalsInduction + +(***************************************************************************) +(* The following defines what it means for a relation R to be transitively *) +(* closed on a set S. In this and other definitions, we think of R as a *) +(* relation on S, meaning that it is a subset of S \X S. However, this is *) +(* not necessary. Our results do not require this as a hypothesis, and it *) +(* is often convenient to apply them when R is a relation on a set *) +(* containing S as a subset. They're even true (though uninteresting) if *) +(* R and S \X S are disjoint sets. *) +(***************************************************************************) +IsTransitivelyClosedOn(R, S) == + \A i, j, k \in S : (<> \in R) /\ (<> \in R) + => (<> \in R) +(***************************************************************************) +(* If we think of R as a less-than relation, then R is well founded on S *) +(* iff there is no "infinitely descending" sequence of elements of S. The *) +(* canonical example of a well founded relation is the ordinary less-than *) +(* relation on the natural numbers. *) +(* *) +(* A S with a well-founded ordering is often called well-ordered. *) +(***************************************************************************) +IsWellFoundedOn(R, S) == + ~ \E f \in [Nat -> S] : \A n \in Nat : <> \in R + +LEMMA EmptyIsWellFounded == \A S : IsWellFoundedOn({}, S) + + +LEMMA IsWellFoundedOnSubset == + ASSUME NEW R, NEW S, NEW T \in SUBSET S, + IsWellFoundedOn(R,S) + PROVE IsWellFoundedOn(R,T) + + +LEMMA IsWellFoundedOnSubrelation == + ASSUME NEW S, NEW R, NEW RR, RR \cap (S \X S) \subseteq R, + IsWellFoundedOn(R,S) + PROVE IsWellFoundedOn(RR,S) + + +(***************************************************************************) +(* If we think of R as a less-than relation on S, then the following is *) +(* the set of elements of S that are less than x. *) +(***************************************************************************) +SetLessThan(x, R, S) == {y \in S : <> \in R} + +(***************************************************************************) +(* If we think of R as a less-than relation on S, then R is well-founded *) +(* iff every non-empty subset of S has a minimal element. *) +(***************************************************************************) + +THEOREM WFMin == + ASSUME NEW R, NEW S, + IsWellFoundedOn(R, S), + NEW T, T \subseteq S, T # {} + PROVE \E x \in T : \A y \in T : ~ (<> \in R) + + +THEOREM MinWF == + ASSUME NEW R, NEW S, + \A T \in SUBSET S : T # {} => \E x \in T : \A y \in T : ~ (<> \in R) + PROVE IsWellFoundedOn(R,S) + + +(***************************************************************************) +(* The two following lemmas are simple consequences of theorem WFMin. *) +(***************************************************************************) +LEMMA WellFoundedIsIrreflexive == + ASSUME NEW R, NEW S, NEW x \in S, + IsWellFoundedOn(R, S) + PROVE <> \notin R + + +LEMMA WellFoundedIsAsymmetric == + ASSUME NEW R, NEW S, NEW x \in S, NEW y \in S, + IsWellFoundedOn(R,S), + <> \in R, <> \in R + PROVE FALSE + + +(***************************************************************************) +(* The following lemmas are simple facts about operator SetLessThan. *) +(***************************************************************************) +LEMMA WFSetLessThanIrreflexive == + ASSUME NEW R, NEW S, NEW x \in S, + IsWellFoundedOn(R,S) + PROVE x \notin SetLessThan(x,R,S) + + +LEMMA SetLessTransitive == + ASSUME NEW R, NEW S, NEW x \in S, NEW y \in SetLessThan(x,R,S), + IsTransitivelyClosedOn(R, S) + PROVE SetLessThan(y, R, S) \subseteq SetLessThan(x, R, S) + + +---------------------------------------------------------------------------- +(***************************************************************************) +(* The following theorem is the basis for proof by induction over a *) +(* well-founded set. It generalizes theorem GeneralNatInduction of module *) +(* NaturalsInduction. *) +(***************************************************************************) +THEOREM WFInduction == + ASSUME NEW P(_), NEW R, NEW S, + IsWellFoundedOn(R, S), + \A x \in S : (\A y \in SetLessThan(x, R, S) : P(y)) + => P(x) + PROVE \A x \in S : P(x) + + +(***************************************************************************) +(* Theorem WFInductiveDef below justifies recursive definitions based on a *) +(* well-founded ordering. We first prove it with the hypothesis that the *) +(* ordering is transitively closed. We prove the theorem for an arbitrary *) +(* well-founded relation by applying the special case to its transitive *) +(* closure. *) +(***************************************************************************) +WFDefOn(R, S, Def(_,_)) == + \A g, h : + \A x \in S : + (\A y \in SetLessThan(x, R, S) : g[y] = h[y]) + => (Def(g,x) = Def(h,x)) + +OpDefinesFcn(f, S, Def(_,_)) == + f = CHOOSE g : g = [x \in S |-> Def(g, x)] + +WFInductiveDefines(f, S, Def(_,_)) == + f = [x \in S |-> Def(f, x)] + +WFInductiveUnique(S, Def(_,_)) == + \A g, h : /\ WFInductiveDefines(g, S, Def) + /\ WFInductiveDefines(h, S, Def) + => (g = h) + +THEOREM WFDefOnUnique == + ASSUME NEW Def(_,_), NEW R, NEW S, + IsWellFoundedOn(R, S), WFDefOn(R, S, Def) + PROVE WFInductiveUnique(S, Def) + + +LEMMA WFInductiveDefLemma == + ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, + IsWellFoundedOn(R, S), + IsTransitivelyClosedOn(R, S), + WFDefOn(R, S, Def), + OpDefinesFcn(f, S, Def) + PROVE WFInductiveDefines(f, S, Def) + + +(***************************************************************************) +(* The following defines the transitive closure of the relation R on S. *) +(* More precisely, it is the transitive closure of the restriction of R *) +(* to S. We give an abstract definition of transitive closure as the *) +(* smallest relation that contains R (restricted to S \X S) and that is *) +(* transitively closed, then prove some relevant properties. *) +(***************************************************************************) +TransitiveClosureOn(R,S) == + { ss \in S \X S : + \A U \in SUBSET (S \X S) : + /\ R \cap S \X S \subseteq U + /\ IsTransitivelyClosedOn(U, S) + => ss \in U } + +LEMMA TransitiveClosureThm == + \A R, S : + /\ R \cap S \X S \subseteq TransitiveClosureOn(R, S) + /\ IsTransitivelyClosedOn(TransitiveClosureOn(R, S), S) + + +LEMMA TransitiveClosureMinimal == + ASSUME NEW R, NEW S, NEW U \in SUBSET (S \X S), + R \cap S \X S \subseteq U, + IsTransitivelyClosedOn(U,S) + PROVE TransitiveClosureOn(R,S) \subseteq U + + +(***************************************************************************) +(* The following lemmas are consequences of the two previous ones. The *) +(* first three state closure properties of transitive closure, the fourth *) +(* lemma allows one to chop off a step in the underlying relation for any *) +(* pair in the transitive closure. *) +(***************************************************************************) + +LEMMA TCTCTC == + ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, + <> \in TransitiveClosureOn(R,S), + <> \in TransitiveClosureOn(R,S) + PROVE <> \in TransitiveClosureOn(R,S) + + +LEMMA TCRTC == + ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, + <> \in TransitiveClosureOn(R,S), <> \in R + PROVE <> \in TransitiveClosureOn(R,S) + + +LEMMA RTCTC == + ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, + <> \in R, <> \in TransitiveClosureOn(R,S) + PROVE <> \in TransitiveClosureOn(R,S) + + +LEMMA TransitiveClosureChopLast == + ASSUME NEW R, NEW S, NEW i \in S, NEW k \in S, <> \in TransitiveClosureOn(R,S) + PROVE \E j \in S : /\ <> \in R + /\ i = j \/ <> \in TransitiveClosureOn(R,S) + + +THEOREM TransitiveClosureWF == + ASSUME NEW R, NEW S, IsWellFoundedOn(R,S) + PROVE IsWellFoundedOn(TransitiveClosureOn(R, S), S) + + +THEOREM WFInductiveDef == + ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, + IsWellFoundedOn(R, S), + WFDefOn(R, S, Def), + OpDefinesFcn(f, S, Def) + PROVE WFInductiveDefines(f, S, Def) + + +(***************************************************************************) +(* Theorem WFInductiveDef allows us to conclude that a recursively defined *) +(* function satisfies its recursion equation. The following result allows *) +(* us to deduce the range of this function. *) +(***************************************************************************) +THEOREM WFInductiveDefType == + ASSUME NEW Def(_,_), NEW f, NEW R, NEW S, NEW T, + T # {}, + IsWellFoundedOn(R, S), + WFDefOn(R, S, Def), + WFInductiveDefines(f, S, Def), + \A g \in [S -> T], s \in S : Def(g, s) \in T + PROVE f \in [S -> T] + + + ---------------------------------------------------------------------------- +(***************************************************************************) +(* Below are some theorems that allow us to derive some useful *) +(* well-founded relations from a given well-founded relation. First, we *) +(* define the operator OpToRel that constructs a relation (a set of *) +(* ordered pairs) from a relation expressed as an operator. *) +(***************************************************************************) +OpToRel(_\prec_, S) == {ss \in S \X S : ss[1] \prec ss[2]} + +(***************************************************************************) +(* To construct well-founded relations from the less-than relation on the *) +(* natural numbers, we first prove that it is well-founded. *) +(***************************************************************************) +THEOREM NatLessThanWellFounded == IsWellFoundedOn(OpToRel(<,Nat), Nat) + + +(***************************************************************************) +(* The next definition would be easier to read if we used the TLA+ *) +(* construct {<> \in T : ... }. However, TLAPS does not suport *) +(* that notation. (It's meaning is rather complicated in the general case *) +(* when T is not a Cartesian product of sets.) *) +(***************************************************************************) +PreImage(f(_), S, R) == {ss \in S \X S : <> \in R} + +THEOREM PreImageWellFounded == + ASSUME NEW S, NEW T, NEW R, NEW f(_), + \A s \in S : f(s) \in T, + IsWellFoundedOn(R, T) + PROVE IsWellFoundedOn(PreImage(f, S, R), S) + + +(***************************************************************************) +(* We now prove that the lexicographical ordering on the Cartesian product *) +(* of two well-ordered sets is well-ordered. *) +(***************************************************************************) +LexPairOrdering(R1, R2, S1, S2) == + {ss \in (S1 \X S2) \X (S1 \X S2) : + \/ <> \in R1 + \/ /\ ss[1][1] = ss[2][1] + /\ <> \in R2} + +THEOREM WFLexPairOrdering == + ASSUME NEW R1, NEW R2, NEW S1, NEW S2, + IsWellFoundedOn(R1, S1), + IsWellFoundedOn(R2, S2) + PROVE IsWellFoundedOn(LexPairOrdering(R1, R2, S1, S2), S1 \X S2) + + +(***************************************************************************) +(* The preceding theorem generalizes in the obvious way to the Cartesian *) +(* product of a finite number of well-ordered sets. However, the *) +(* statement of the general theorem is rather complicated, so we state it *) +(* for the most useful case: the Cartesian product of n copies of the same *) +(* set. *) +(***************************************************************************) +LexProductOrdering(R, S, n) == + { ff \in [1..n -> S] \X [1..n -> S] : + \E j \in 1..n : + /\ \A i \in 1..(j-1) : ff[1][i] = ff[2][i] + /\ <> \in R } + +THEOREM WFLexProductOrdering == + ASSUME NEW R, NEW S, NEW n \in Nat, + IsWellFoundedOn(R, S) + PROVE IsWellFoundedOn(LexProductOrdering(R, S, n), [1..n -> S]) + +============================================================================= +\* Modification History +\* Last modified Thu Feb 13 18:14:56 GMT-03:00 2014 by merz +\* Last modified Sun Jan 01 18:39:23 CET 2012 by merz +\* Last modified Wed Nov 23 10:13:18 PST 2011 by lamport diff --git a/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction_proofs.tla b/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction_proofs.tla new file mode 100644 index 0000000000..7dce4d04bc --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction_proofs.tla @@ -0,0 +1,738 @@ +--------------------- MODULE WellFoundedInduction_proofs -------------------- +(***************************************************************************) +(* This module contains useful theorems for inductive proofs and recursive *) +(* definitions based on a well-founded ordering. *) +(* *) +(* Most of the statements of the theorems are decomposed in terms of *) +(* definitions. This is done for two reasons: *) +(* *) +(* - It makes it easier for the backends to instantiate the theorems *) +(* when those definitions are not expanded. In fact, at the moment *) +(* the provers can't prove any of those theorems from the theorem *) +(* itself if the definitions are made usable. *) +(* *) +(* - It can be convenient when writing proofs to use those definitions *) +(* rather than having to write out their expansions. *) +(* *) +(* A relation is represented as a set of ordered pairs, where we write *) +(* <> \in R instead of x R y. It is more convenient to represent *) +(* relations this way rather than as operators such as < . *) +(***************************************************************************) +EXTENDS NaturalsInduction, TLAPS + +(***************************************************************************) +(* The following defines what it means for a relation R to be transitively *) +(* closed on a set S. In this and other definitions, we think of R as a *) +(* relation on S, meaning that it is a subset of S \X S. However, this is *) +(* not necessary. Our results do not require this as a hypothesis, and it *) +(* is often convenient to apply them when R is a relation on a set *) +(* containing S as a subset. They're even true (though uninteresting) if *) +(* R and S \X S are disjoint sets. *) +(***************************************************************************) +IsTransitivelyClosedOn(R, S) == + \A i, j, k \in S : (<> \in R) /\ (<> \in R) + => (<> \in R) +(***************************************************************************) +(* If we think of R as a less-than relation, then R is well founded on S *) +(* iff there is no "infinitely descending" sequence of elements of S. The *) +(* canonical example of a well founded relation is the ordinary less-than *) +(* relation on the natural numbers. *) +(* *) +(* A S with a well-founded ordering is often called well-ordered. *) +(***************************************************************************) +IsWellFoundedOn(R, S) == + ~ \E f \in [Nat -> S] : \A n \in Nat : <> \in R + +LEMMA EmptyIsWellFounded == \A S : IsWellFoundedOn({}, S) +BY DEF IsWellFoundedOn + + +LEMMA IsWellFoundedOnSubset == + ASSUME NEW R, NEW S, NEW T \in SUBSET S, + IsWellFoundedOn(R,S) + PROVE IsWellFoundedOn(R,T) +BY DEF IsWellFoundedOn + + +LEMMA IsWellFoundedOnSubrelation == + ASSUME NEW S, NEW R, NEW RR, RR \cap (S \X S) \subseteq R, + IsWellFoundedOn(R,S) + PROVE IsWellFoundedOn(RR,S) +<1>1. SUFFICES ASSUME NEW f \in [Nat -> S], + \A n \in Nat : <> \in RR + PROVE FALSE + BY DEF IsWellFoundedOn +<1>2. \A n \in Nat : <> \in RR \cap (S \X S) + BY Isa, <1>1 +<1>. QED + BY <1>2 DEF IsWellFoundedOn + +(***************************************************************************) +(* If we think of R as a less-than relation on S, then the following is *) +(* the set of elements of S that are less than x. *) +(***************************************************************************) +SetLessThan(x, R, S) == {y \in S : <> \in R} + +(***************************************************************************) +(* If we think of R as a less-than relation on S, then R is well-founded *) +(* iff every non-empty subset of S has a minimal element. *) +(***************************************************************************) + +THEOREM WFMin == + ASSUME NEW R, NEW S, + IsWellFoundedOn(R, S), + NEW T, T \subseteq S, T # {} + PROVE \E x \in T : \A y \in T : ~ (<> \in R) +<1> SUFFICES ASSUME \A x \in T : \E y \in T : <> \in R + PROVE FALSE + OBVIOUS +<1> DEFINE f0 == CHOOSE x \in T : TRUE + Def(v, n) == CHOOSE x \in T : <> \in R + f[n \in Nat] == IF n = 0 THEN f0 ELSE Def(f[n-1], n) +<1>1. NatInductiveDefConclusion(f, f0, Def) + <2>1. NatInductiveDefHypothesis(f, f0, Def) + BY DEF NatInductiveDefHypothesis + <2>2. QED + BY <2>1, NatInductiveDef +<1>2. f \in [Nat -> T] + <2>1. f0 \in T + OBVIOUS + <2>2. \A v \in T, n \in Nat \ {0} : Def(v, n) \in T + OBVIOUS + <2>3. QED + BY <1>1, <2>1, <2>2, NatInductiveDefType, Isa +<1>3. ASSUME NEW n \in Nat + PROVE <> \in R + <2>1. /\ n+1 \in Nat + /\ n+1 # 0 + /\ (n+1)-1 = n + BY Isa + <2>2. f[n+1] = Def(f[(n+1)-1], n+1) + BY <2>1, <1>1 DEF NatInductiveDefConclusion + <2>3. QED + BY <2>1, <2>2, <1>2 +<1>4. QED + BY <1>2, <1>3 DEF IsWellFoundedOn + + +THEOREM MinWF == + ASSUME NEW R, NEW S, + \A T \in SUBSET S : T # {} => \E x \in T : \A y \in T : ~ (<> \in R) + PROVE IsWellFoundedOn(R,S) +<1> SUFFICES ASSUME NEW f \in [Nat -> S], + \A n \in Nat : <> \in R + PROVE FALSE + BY DEF IsWellFoundedOn +<1> DEFINE T == { f[n] : n \in Nat } +<1>1. T \subseteq S + OBVIOUS +<1>2. \A x \in T : \E y \in T : <> \in R + BY Isa +<1> QED + BY <1>1, <1>2 + +(***************************************************************************) +(* The two following lemmas are simple consequences of theorem WFMin. *) +(***************************************************************************) +LEMMA WellFoundedIsIrreflexive == + ASSUME NEW R, NEW S, NEW x \in S, + IsWellFoundedOn(R, S) + PROVE <> \notin R +<1>1. \E z \in {x} : \A y \in {x} : <> \notin R + BY WFMin, IsaM("blast") +<1>2. QED + BY <1>1 + + +LEMMA WellFoundedIsAsymmetric == + ASSUME NEW R, NEW S, NEW x \in S, NEW y \in S, + IsWellFoundedOn(R,S), + <> \in R, <> \in R + PROVE FALSE +<1>1. \E u \in {x,y} : \A v \in {x,y} : <> \notin R + BY WFMin, IsaM("blast") +<1>2. QED + BY <1>1 + +(***************************************************************************) +(* The following lemmas are simple facts about operator SetLessThan. *) +(***************************************************************************) +LEMMA WFSetLessThanIrreflexive == + ASSUME NEW R, NEW S, NEW x \in S, + IsWellFoundedOn(R,S) + PROVE x \notin SetLessThan(x,R,S) +BY WellFoundedIsIrreflexive DEF SetLessThan + + +LEMMA SetLessTransitive == + ASSUME NEW R, NEW S, NEW x \in S, NEW y \in SetLessThan(x,R,S), + IsTransitivelyClosedOn(R, S) + PROVE SetLessThan(y, R, S) \subseteq SetLessThan(x, R, S) +BY DEF SetLessThan, IsTransitivelyClosedOn + +---------------------------------------------------------------------------- +(***************************************************************************) +(* The following theorem is the basis for proof by induction over a *) +(* well-founded set. It generalizes theorem GeneralNatInduction of module *) +(* NaturalsInduction. *) +(***************************************************************************) +THEOREM WFInduction == + ASSUME NEW P(_), NEW R, NEW S, + IsWellFoundedOn(R, S), + \A x \in S : (\A y \in SetLessThan(x, R, S) : P(y)) + => P(x) + PROVE \A x \in S : P(x) +<1> DEFINE T == {x \in S : ~P(x)} +<1>1. SUFFICES ASSUME T # {} + PROVE FALSE + OBVIOUS +<1>2. PICK x \in T : \A y \in T : ~ (<> \in R) + BY <1>1, WFMin +<1>3. QED + BY <1>2 DEF SetLessThan + +(***************************************************************************) +(* Theorem WFInductiveDef below justifies recursive definitions based on a *) +(* well-founded ordering. We first prove it with the hypothesis that the *) +(* ordering is transitively closed. We prove the theorem for an arbitrary *) +(* well-founded relation by applying the special case to its transitive *) +(* closure. *) +(***************************************************************************) +WFDefOn(R, S, Def(_,_)) == + \A g, h : + \A x \in S : + (\A y \in SetLessThan(x, R, S) : g[y] = h[y]) + => (Def(g,x) = Def(h,x)) + +OpDefinesFcn(f, S, Def(_,_)) == + f = CHOOSE g : g = [x \in S |-> Def(g, x)] + +WFInductiveDefines(f, S, Def(_,_)) == + f = [x \in S |-> Def(f, x)] + +WFInductiveUnique(S, Def(_,_)) == + \A g, h : /\ WFInductiveDefines(g, S, Def) + /\ WFInductiveDefines(h, S, Def) + => (g = h) + +THEOREM WFDefOnUnique == + ASSUME NEW Def(_,_), NEW R, NEW S, + IsWellFoundedOn(R, S), WFDefOn(R, S, Def) + PROVE WFInductiveUnique(S, Def) +<1>0. SUFFICES ASSUME NEW g, NEW h, + WFInductiveDefines(g, S, Def), + WFInductiveDefines(h, S, Def) + PROVE g = h + BY DEF WFInductiveUnique +<1> SUFFICES \A x \in S : g[x] = h[x] + BY <1>0 DEF WFInductiveDefines +<1>1. ASSUME NEW x \in S, + \A y \in SetLessThan(x, R, S) : g[y] = h[y] + PROVE g[x] = h[x] + <2>1. Def(g,x) = Def(h,x) + BY <1>1 DEF WFDefOn + <2>2. QED + BY <1>0, <2>1 DEF WFInductiveDefines +<1>2. QED + BY <1>1, WFInduction, Isa + +LEMMA WFInductiveDefLemma == + ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, + IsWellFoundedOn(R, S), + IsTransitivelyClosedOn(R, S), + WFDefOn(R, S, Def), + OpDefinesFcn(f, S, Def) + PROVE WFInductiveDefines(f, S, Def) +<1> DEFINE LT(x) == {x} \cup SetLessThan(x, R, S) +<1>1. ASSUME NEW x \in S + PROVE /\ LT(x) = {x} \cup UNION {LT(y) : y \in SetLessThan(x, R, S)} + /\ (x \in LT(x)) /\ (SetLessThan(x, R, S) \subseteq LT(x)) + /\ \A y \in LT(x) : SetLessThan(y, R, S) \subseteq LT(x) + /\ \A y \in LT(x) : LT(y) \subseteq LT(x) + /\ LT(x) \subseteq S + BY Isa DEF SetLessThan, IsTransitivelyClosedOn +<1> HIDE DEF LT \** from now on, (mostly) use properties in step <1>1 rather than the definition + +<1> DEFINE F(x) == CHOOSE g : g = [y \in LT(x) |-> Def(g, y)] + ff == [x \in S |-> F(x)[x]] +<1> HIDE DEF ff + +<1>2. \A x \in S : ff[x] = Def(ff,x) + <2>1. SUFFICES ASSUME NEW x \in S, + \A y \in SetLessThan(x, R, S) : ff[y] = Def(ff,y) + PROVE ff[x] = Def(ff,x) + BY WFInduction, Isa + <2>2. WFInductiveUnique(LT(x), Def) + <3>1. LT(x) \subseteq S + BY <1>1 + <3>2. IsWellFoundedOn(R, LT(x)) + BY <3>1, IsWellFoundedOnSubset + <3>3. \A z \in LT(x) : SetLessThan(z, R, LT(x)) = SetLessThan(z, R, S) + BY DEF LT, SetLessThan, IsTransitivelyClosedOn + <3>4. WFDefOn(R, LT(x), Def) + BY <3>1, <3>3, IsaM("blast") DEF WFDefOn + <3>. QED + BY <3>2, <3>4, WFDefOnUnique + <2> DEFINE g == [y \in LT(x) |-> Def(ff, y)] + <2>3. Def(ff,x) = Def(g,x) + BY <1>1 (* x \in LT(x) *), <2>1 DEF WFDefOn + <2>4. ASSUME NEW y \in SetLessThan(x, R, S) + PROVE Def(ff,y) = Def(g,y) + <3>1. y \in S + BY DEF SetLessThan + <3>2. \A z \in SetLessThan(y, R, S) : ff[z] = g[z] + BY <2>1, SetLessTransitive DEF LT + <3>3. QED + BY <3>1, <3>2 DEF WFDefOn + <2>5. WFInductiveDefines(g, LT(x), Def) + BY <2>3, <2>4 DEF WFInductiveDefines, LT + <2>6. WFInductiveDefines(F(x), LT(x), Def) + BY <2>5 DEF WFInductiveDefines + <2>7. g = F(x) + BY <2>5, <2>6, <2>2 DEF WFInductiveUnique + <2>. QED + BY <1>1, <2>7 DEF ff + +<1>3. QED + <2>1. WFInductiveDefines(ff, S, Def) + BY <1>2 DEF WFInductiveDefines, ff + <2>2. QED + BY <2>1 DEF WFInductiveDefines, OpDefinesFcn + +(***************************************************************************) +(* The following defines the transitive closure of the relation R on S. *) +(* More precisely, it is the transitive closure of the restriction of R *) +(* to S. We give an abstract definition of transitive closure as the *) +(* smallest relation that contains R (restricted to S \X S) and that is *) +(* transitively closed, then prove some relevant properties. *) +(***************************************************************************) +TransitiveClosureOn(R,S) == + { ss \in S \X S : + \A U \in SUBSET (S \X S) : + /\ R \cap S \X S \subseteq U + /\ IsTransitivelyClosedOn(U, S) + => ss \in U } + +LEMMA TransitiveClosureThm == + \A R, S : + /\ R \cap S \X S \subseteq TransitiveClosureOn(R, S) + /\ IsTransitivelyClosedOn(TransitiveClosureOn(R, S), S) +<1> TAKE R, S +<1>1. R \cap S \X S \subseteq TransitiveClosureOn(R, S) + BY DEF TransitiveClosureOn +<1>2. IsTransitivelyClosedOn(TransitiveClosureOn(R, S), S) + BY DEF TransitiveClosureOn, IsTransitivelyClosedOn +<1>3. QED + BY <1>1, <1>2 + +LEMMA TransitiveClosureMinimal == + ASSUME NEW R, NEW S, NEW U \in SUBSET (S \X S), + R \cap S \X S \subseteq U, + IsTransitivelyClosedOn(U,S) + PROVE TransitiveClosureOn(R,S) \subseteq U +BY DEF TransitiveClosureOn + +(***************************************************************************) +(* The following lemmas are consequences of the two previous ones. The *) +(* first three state closure properties of transitive closure, the fourth *) +(* lemma allows one to chop off a step in the underlying relation for any *) +(* pair in the transitive closure. *) +(***************************************************************************) + +LEMMA TCTCTC == + ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, + <> \in TransitiveClosureOn(R,S), + <> \in TransitiveClosureOn(R,S) + PROVE <> \in TransitiveClosureOn(R,S) +BY TransitiveClosureThm DEF IsTransitivelyClosedOn + +LEMMA TCRTC == + ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, + <> \in TransitiveClosureOn(R,S), <> \in R + PROVE <> \in TransitiveClosureOn(R,S) +BY TransitiveClosureThm, TCTCTC + +LEMMA RTCTC == + ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, + <> \in R, <> \in TransitiveClosureOn(R,S) + PROVE <> \in TransitiveClosureOn(R,S) +BY TransitiveClosureThm, TCTCTC + +LEMMA TransitiveClosureChopLast == + ASSUME NEW R, NEW S, NEW i \in S, NEW k \in S, <> \in TransitiveClosureOn(R,S) + PROVE \E j \in S : /\ <> \in R + /\ i = j \/ <> \in TransitiveClosureOn(R,S) +<1> DEFINE U == { ss \in S \X S : \E s \in S : /\ <> \in R + /\ ss[1] = s \/ <> \in TransitiveClosureOn(R,S) } +<1>1. R \cap S \X S \subseteq U + <2> SUFFICES ASSUME NEW x \in S, NEW y \in S, <> \in R + PROVE <> \in U + BY IsaM("blast") + <2> QED + OBVIOUS +<1>2. U \subseteq TransitiveClosureOn(R,S) + <2> SUFFICES ASSUME NEW x \in S, NEW y \in S, <> \in U + PROVE <> \in TransitiveClosureOn(R,S) + BY IsaM("blast") + <2> QED + BY TransitiveClosureThm DEF IsTransitivelyClosedOn +<1>3. IsTransitivelyClosedOn(U,S) + <2>1. SUFFICES ASSUME NEW x \in S, NEW y \in S, NEW z \in S, + <> \in U, <> \in U + PROVE <> \in U + BY DEF IsTransitivelyClosedOn + <2>2. <> \in TransitiveClosureOn(R,S) + BY <2>1, <1>2 + <2>3. PICK s \in S : /\ <> \in R + /\ y=s \/ <> \in TransitiveClosureOn(R,S) + BY <2>1 + <2>4. <> \in TransitiveClosureOn(R,S) + BY <2>2, <2>3, TransitiveClosureThm DEF IsTransitivelyClosedOn + <2> QED + BY <2>3, <2>4 +<1>4. QED + <2>1. TransitiveClosureOn(R,S) \subseteq U + BY <1>1, <1>3, TransitiveClosureMinimal + <2>2. QED + BY <2>1 + +(***************************************************************************) +(* NB: In a similar way to the preceding lemma, one could prove *) +(* ASSUME NEW R, NEW S, NEW x \in S, NEW y \in S, *) +(* <> \in TransitiveClosureOn(R,S) *) +(* PROVE \E n \in Nat : \E f \in [0..(n+1) -> S] : *) +(* /\ \A i \in 0..n : <> \in R *) +(* /\ x = f[0] /\ y = f[n+1] *) +(* which provides a more constructive characterization of transitive *) +(* closure. The converse theorem would be proved by induction on n, *) +(* using the above closure properties. *) +(***************************************************************************) + +THEOREM TransitiveClosureWF == + ASSUME NEW R, NEW S, IsWellFoundedOn(R,S) + PROVE IsWellFoundedOn(TransitiveClosureOn(R, S), S) +<1> SUFFICES ASSUME NEW T \in SUBSET S, T # {} + PROVE \E x \in T : \A y \in T : ~(<> \in TransitiveClosureOn(R, S)) + BY MinWF +(* It is tempting to simply pick a minimal element x in T w.r.t. relation R as the witness, + but that wouldn't work in general because there may be elements below x in the transitive + closure of R. So we complete T w.r.t. the transitive closure in an appropriate way and + pick a minimal element in that larger set. *) +<1> DEFINE TT == T \cup { j \in S : \E i,k \in T : /\ <> \in TransitiveClosureOn(R,S) + /\ <> \in TransitiveClosureOn(R,S) } +<1>1. PICK x \in TT : \A y \in TT : ~(<> \in R) + BY WFMin +<1>2. x \in T + <2>1. ASSUME NEW i \in T, NEW k \in T, + <> \in TransitiveClosureOn(R,S), + <> \in TransitiveClosureOn(R,S) + PROVE FALSE + <3>1. PICK j \in S : /\ <> \in R + /\ i=j \/ <> \in TransitiveClosureOn(R,S) + BY <2>1, TransitiveClosureChopLast + <3>2. j \in TT + <4>1. CASE <> \in TransitiveClosureOn(R,S) + BY <3>1, <4>1, <2>1, RTCTC + <4>2. QED + BY <3>1, <4>1 + <3>3. QED + BY <3>1, <3>2, <1>1 + <2>2. QED + BY <2>1 +<1>3. ASSUME NEW y \in T, <> \in TransitiveClosureOn(R, S) + PROVE FALSE + <2>1. PICK j \in S : /\ <> \in R + /\ y=j \/ <> \in TransitiveClosureOn(R,S) + BY <1>3, TransitiveClosureChopLast + <2>2. j \in TT + <3>1. CASE <> \in TransitiveClosureOn(R,S) + BY <1>2, <3>1, <2>1, TransitiveClosureThm + <3>2. QED + BY <2>1, <3>1 + <2>3. QED + BY <2>1, <2>2, <1>1 +<1> QED + BY <1>2, <1>3 + +THEOREM WFInductiveDef == + ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, + IsWellFoundedOn(R, S), + WFDefOn(R, S, Def), + OpDefinesFcn(f, S, Def) + PROVE WFInductiveDefines(f, S, Def) +<1> DEFINE TC == TransitiveClosureOn(R, S) +<1>1. IsTransitivelyClosedOn(TC, S) + BY TransitiveClosureThm +<1>2. IsWellFoundedOn(TC, S) + BY TransitiveClosureWF +<1>3. WFDefOn(TC, S, Def) + <2>1. \A x \in S : SetLessThan(x, R, S) \subseteq SetLessThan(x, TC, S) + BY TransitiveClosureThm DEF SetLessThan + <2>2. QED + BY <2>1 DEF WFDefOn +<1>4. QED + BY <1>1, <1>2, <1>3, WFInductiveDefLemma + +(***************************************************************************) +(* Theorem WFInductiveDef allows us to conclude that a recursively defined *) +(* function satisfies its recursion equation. The following result allows *) +(* us to deduce the range of this function. *) +(***************************************************************************) +THEOREM WFInductiveDefType == + ASSUME NEW Def(_,_), NEW f, NEW R, NEW S, NEW T, + T # {}, + IsWellFoundedOn(R, S), + WFDefOn(R, S, Def), + WFInductiveDefines(f, S, Def), + \A g \in [S -> T], s \in S : Def(g, s) \in T + PROVE f \in [S -> T] +<1>1. \A s \in S : f[s] \in T + <2>1. SUFFICES ASSUME NEW s \in S, + \A x \in SetLessThan(s, R, S) : f[x] \in T + PROVE f[s] \in T + BY ONLY <2>1, IsWellFoundedOn(R, S), WFInduction, IsaM("auto") + <2>2. PICK t0 : t0 \in T + OBVIOUS + <2> DEFINE g == [x \in S |-> IF x \in SetLessThan(s, R, S) THEN f[x] ELSE t0] + <2>3. /\ g \in [S -> T] + /\ \A x \in SetLessThan(s, R, S) : g[x] = f[x] + <3> SetLessThan(s, R, S) \subseteq S + BY DEF SetLessThan + <3> QED + BY <2>1, <2>2 + <2>4. Def(f,s) = Def(g,s) + BY <2>3 DEF WFDefOn + <2>5. QED + BY <2>3, <2>4 DEF WFInductiveDefines, WFDefOn +<1>2. QED + BY <1>1 DEF WFInductiveDefines + + ---------------------------------------------------------------------------- +(***************************************************************************) +(* Below are some theorems that allow us to derive some useful *) +(* well-founded relations from a given well-founded relation. First, we *) +(* define the operator OpToRel that constructs a relation (a set of *) +(* ordered pairs) from a relation expressed as an operator. *) +(***************************************************************************) +OpToRel(_\prec_, S) == {ss \in S \X S : ss[1] \prec ss[2]} + +(***************************************************************************) +(* To construct well-founded relations from the less-than relation on the *) +(* natural numbers, we first prove that it is well-founded. *) +(***************************************************************************) +THEOREM NatLessThanWellFounded == IsWellFoundedOn(OpToRel(<,Nat), Nat) +<1> DEFINE R == OpToRel(<,Nat) +<1>1. SUFFICES ASSUME NEW ff \in [Nat -> Nat], + \A n \in Nat : ff[n+1] < ff[n] + PROVE FALSE + BY DEF IsWellFoundedOn, OpToRel + +<1> DEFINE P(n) == \E f \in [Nat -> Nat] : + /\ \A m \in Nat : <> \in R + /\ f[0] = n +<1>1a. P(ff[0]) + BY <1>1, IsaM("auto") DEF OpToRel +<1>2. ASSUME NEW n \in Nat, + \A m \in 0..(n-1) : ~ P(m) + PROVE ~ P(n) + <2> SUFFICES ASSUME NEW f \in [Nat -> Nat], + \A m \in Nat : <> \in R , + f[0] = n + PROVE FALSE + OBVIOUS + <2> DEFINE g[i \in Nat] == f[i+1] + <2>1. g \in [Nat -> Nat] + BY ONLY f \in [Nat -> Nat], IsaM("auto") + <2>2. \A i \in Nat : <> \in R + BY IsaM("auto") + <2>3. g[0] \in 0..(n-1) + BY <2>2, Z3 DEF OpToRel + <2>4 QED + BY <2>1, <2>2, <2>3, <1>2 +<1>3. ~ P(ff[0]) + <2> HIDE DEF P + <2> \A n \in Nat : ~ P(n) + BY ONLY <1>2, GeneralNatInduction, IsaM("auto") + <2> QED + BY DEF P +<1>4. QED + BY <1>1a, <1>3 + +(***************************************************************************) +(* The next definition would be easier to read if we used the TLA+ *) +(* construct {<> \in T : ... }. However, TLAPS does not suport *) +(* that notation. (It's meaning is rather complicated in the general case *) +(* when T is not a Cartesian product of sets.) *) +(***************************************************************************) +PreImage(f(_), S, R) == {ss \in S \X S : <> \in R} + +THEOREM PreImageWellFounded == + ASSUME NEW S, NEW T, NEW R, NEW f(_), + \A s \in S : f(s) \in T, + IsWellFoundedOn(R, T) + PROVE IsWellFoundedOn(PreImage(f, S, R), S) +<1> SUFFICES ASSUME NEW g \in [Nat -> S], + \A n \in Nat : <> \in PreImage(f, S, R) + PROVE FALSE + BY DEF IsWellFoundedOn +<1> DEFINE gg[n \in Nat] == f(g[n]) +<1>1. ASSUME NEW n \in Nat + PROVE <> \in R + BY IsaM("auto") DEF PreImage +<1> QED + BY <1>1 DEF IsWellFoundedOn + +(***************************************************************************) +(* We now prove that the lexicographical ordering on the Cartesian product *) +(* of two well-ordered sets is well-ordered. *) +(***************************************************************************) +LexPairOrdering(R1, R2, S1, S2) == + {ss \in (S1 \X S2) \X (S1 \X S2) : + \/ <> \in R1 + \/ /\ ss[1][1] = ss[2][1] + /\ <> \in R2} + +THEOREM WFLexPairOrdering == + ASSUME NEW R1, NEW R2, NEW S1, NEW S2, + IsWellFoundedOn(R1, S1), + IsWellFoundedOn(R2, S2) + PROVE IsWellFoundedOn(LexPairOrdering(R1, R2, S1, S2), S1 \X S2) +<1> SUFFICES ASSUME NEW T \in SUBSET (S1 \X S2), T # {} + PROVE \E x \in T : \A y \in T : <> \notin LexPairOrdering(R1, R2, S1, S2) + BY MinWF +<1> DEFINE T1 == { tt[1] : tt \in T } +<1>1. PICK x1 \in T1 : \A y1 \in T1 : <> \notin R1 + <2>1. T1 \subseteq S1 /\ T1 # {} + OBVIOUS + <2>2. QED + BY <2>1, WFMin +<1> DEFINE T2 == { tt[2] : tt \in { uu \in T : uu[1] = x1 } } +<1>2. PICK x2 \in T2 : \A y2 \in T2 : <> \notin R2 + <2>1. T2 \subseteq S2 /\ T2 # {} + OBVIOUS + <2>2. QED + BY <2>1, WFMin +<1>3. <> \in T + BY IsaM("force") +<1>4. ASSUME NEW t \in T, + << t, <> >> \in LexPairOrdering(R1, R2, S1, S2) + PROVE FALSE + <2>1. CASE << t[1], x1 >> \in R1 + BY <1>1, <2>1 + <2>2. CASE t[1] = x1 /\ << t[2], x2 >> \in R2 + BY <1>2, <2>2 + <2>3. QED + BY <2>1, <2>2, <1>4 DEF LexPairOrdering +<1> QED + BY <1>3, <1>4 + +(***************************************************************************) +(* The preceding theorem generalizes in the obvious way to the Cartesian *) +(* product of a finite number of well-ordered sets. However, the *) +(* statement of the general theorem is rather complicated, so we state it *) +(* for the most useful case: the Cartesian product of n copies of the same *) +(* set. *) +(***************************************************************************) +LexProductOrdering(R, S, n) == + { ff \in [1..n -> S] \X [1..n -> S] : + \E j \in 1..n : + /\ \A i \in 1..(j-1) : ff[1][i] = ff[2][i] + /\ <> \in R } + +THEOREM WFLexProductOrdering == + ASSUME NEW R, NEW S, NEW n \in Nat, + IsWellFoundedOn(R, S) + PROVE IsWellFoundedOn(LexProductOrdering(R, S, n), [1..n -> S]) +<1> DEFINE LPO(m) == LexProductOrdering(R, S, m) +<1> DEFINE P(m) == IsWellFoundedOn(LPO(m), [1..m -> S]) +<1>1. P(0) + BY 1..0 = {}, EmptyIsWellFounded DEF LexProductOrdering +<1>2. ASSUME NEW m \in Nat, P(m) + PROVE P(m+1) + <2>1. IsWellFoundedOn(LexPairOrdering(LPO(m), R, [1..m -> S], S), [1..m -> S] \X S) + BY <1>2, WFLexPairOrdering + (*************************************************************************) + (* Pairs of m-tuples over S in [1..m ->S] and an element of S are *) + (* isomorphic to (m+1)-tuples over S, and the following function *) + (* establishes this isomorphism. We will then apply the theorem about *) + (* preimages to prove the desired result. *) + (*************************************************************************) + <2> DEFINE g(ss) == << [i \in 1..m |-> ss[i]], ss[m+1] >> + <2>2. 1 .. m+1 = 1..m \union {m+1} + OBVIOUS + <2>3. IsWellFoundedOn(PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)), + [1..m+1 -> S]) + <3>1. \A ss \in [1..m+1 -> S] : g(ss) \in [1..m -> S] \X S + BY <2>2 + <3> HIDE DEF g + <3>2. QED + BY <2>1, <3>1, PreImageWellFounded + <2>4. LPO(m+1) = PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) + <3>1. LPO(m+1) \subseteq PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) + <4> SUFFICES ASSUME NEW x \in [1..m+1 -> S], NEW y \in [1..m+1 -> S], + NEW j \in 1 .. m+1, + \A i \in 1..j-1 : x[i] = y[i], + <> \in R + PROVE <> \in PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) + BY Isa DEF LexProductOrdering + <4>1. \A i \in 1 .. j-1 : i \in 1 .. m + OBVIOUS + <4>2. << g(x), g(y) >> \in LexPairOrdering(LPO(m), R, [1..m -> S], S) + <5>1. CASE j \in 1..m + <6>1. << g(x)[1], g(y)[1] >> \in LPO(m) + BY <2>2, <4>1, <5>1 DEF LexProductOrdering + <6>2. QED + BY <6>1, <2>2 DEF LexPairOrdering + <5>2. CASE j = m+1 + <6>1. /\ g(x)[1] = g(y)[1] + /\ << g(x)[2], g(y)[2] >> \in R + BY <2>2, <5>2, IsaM("force") + <6>2. QED + BY <6>1, <2>2 DEF LexPairOrdering + <5>3. QED + BY <2>2, <5>1, <5>2 + <4>3. QED + BY <4>2 DEF PreImage + <3>2. PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) \subseteq LPO(m+1) + <4> SUFFICES ASSUME NEW x \in [1..m+1 -> S], NEW y \in [1..m+1 -> S], + << g(x), g(y) >> \in LexPairOrdering(LPO(m), R, [1..m -> S], S) + PROVE <> \in LPO(m+1) + BY IsaM("auto") DEF PreImage + <4>1. CASE << g(x)[1], g(y)[1] >> \in LPO(m) + <5> HIDE DEF g + <5>1. PICK j \in 1..m : /\ \A i \in 1..j-1 : g(x)[1][i] = g(y)[1][i] + /\ << g(x)[1][j], g(y)[1][j] >> \in R + BY <4>1 DEF LexProductOrdering + <5>3. /\ g(x)[1][j] = x[j] + /\ \A i \in 1..j-1 : g(x)[1][i] = x[i] + /\ g(y)[1][j] = y[j] + /\ \A i \in 1..j-1 : g(y)[1][i] = y[i] + BY <2>2, SMT DEF g + <5> QED + BY <5>1, <5>3, <2>2 DEF LexProductOrdering + <4>2. CASE g(x)[1] = g(y)[1] /\ << g(x)[2], g(y)[2] >> \in R + <5>1. <> \in R + BY <4>2 + <5>2. \A i \in 1..m : /\ g(x)[1][i] = x[i] + /\ g(y)[1][i] = y[i] + OBVIOUS + <5>3. \A i \in 1..(m+1)-1 : x[i] = y[i] + BY <4>2, <5>2, IsaM("auto") + <5> QED + BY <5>1, <5>3 DEF LexProductOrdering + <4> QED + BY <4>1, <4>2 DEF LexPairOrdering + <3>3. QED + BY <3>1, <3>2 + <2> QED + BY <2>3, <2>4 +<1>3. \A m \in Nat : P(m) + BY <1>1, <1>2, NatInduction, IsaM("auto") +<1>4. QED + BY <1>3 + +============================================================================= +\* Modification History +\* Last modified Thu Feb 13 18:26:54 GMT-03:00 2014 by merz +\* Last modified Sun Jan 01 18:39:23 CET 2012 by merz +\* Last modified Wed Nov 23 10:13:18 PST 2011 by lamport diff --git a/x/ccv/provider/keeper/prototyping/model/library/ref/CCV.tla b/x/ccv/provider/keeper/prototyping/model/library/ref/CCV.tla new file mode 100644 index 0000000000..23ad1079d5 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/ref/CCV.tla @@ -0,0 +1,550 @@ +--------------------------- MODULE CCV --------------------------- +(* + * Modeling voting power relay between provider- and consumer chains in ICS. + * + * Simplifications: + * - We only track voting power, not bonded tokens + * - CCV channel creation is atomic and never fails/times out. + * - No new consumers join midway. + * - Block height is not modeled. + * + * Jure Kukovec, 2022 + *) + +EXTENDS Integers, Sequences, Apalache, typedefs + +CONSTANT + \* The set of all nodes, which may take on a validator role. + \* node \in Nodes is a validator <=> node \in DOMAIN votingPowerRunning + \* @type: Set($node); + Nodes, + \* The set of all consumer chains. Consumers may be removed + \* during execution, but not added. + \* @type: Set($chain); + ConsumerChains, + \* Time that needs to elapse, before a received VPC is considered + \* mature on a chain. + \* @type: $time; + UnbondingPeriod, + \* Time that needs to elapse, before a message is considered to have + \* timed out (resulting in the removal of the related consumer chain). + \* @type: $time; + Timeout, + \* Maximal time by which clocks are assumed to differ from the provider chain. + \* Since consumer chains don't communicate, we don't care about + \* drift between tow consumers (though it's implicitly less than MaxDrift, if + \* each differs from the provider chain by at most MaxDrift). + \* The specification doesn't force clocks to maintain bounded drift, + \* but the invariants are only verified in cases where clocks never drift too far. + \* @type: $time; + MaxDrift + +\* Provider chain only +VARIABLES + \* Snapshots of the voting power on the provider chain, at the times + \* when a VPC packet was sent. + \* t \in DOMAIN votingPowerHist <=> VPC packet sent at time t + \* @type: $packet -> $votingPowerOnChain; + votingPowerHist, + \* Current voting power on the provider chain. + \* @type: $votingPowerOnChain; + votingPowerRunning, + \* Current status for each consumer. May be one of: + \* Unused - Initializing - Active - Dropped + \* @type: $chain -> STATUS; + consumerStatus, + \* Set of chains live at the time a packet was sent (who are expected to reply) + \* @type: $packet -> Set($chain); + expectedResponders, + \* The set of MatureVSCPacket sent by consumer chains to the + \* provider chain. + \* @type: Set($matureVSCPacket); + maturePackets + +\* Consumer chains or both +VARIABLES + \* Representation of the current voting power, as understood by consumer chains. + \* Because consumer chains may not arbitrarily modify their own voting power, + \* but must instead update in accordance to VPC packets received from the + \* provider, it is sufficient to only track the last received packet. + \* The voting power on chain c is then equal to votingPowerHist[votingPowerReferences[c]]. + \* @type: $chain -> $time; + votingPowerReferences, + \* The queues of VPC packets, waiting to be received by consumer chains. + \* Note that a packet being placed in the channel is not considered + \* received by the consumer, until the receive-action is taken. + \* @type: $chain -> Seq($packet); + ccvChannelsPending, + \* The queues of VPC packets, that have been received by consumer chains in the past. + \* @type: $chain -> Seq($packet); + ccvChannelsResolved, + \* The current times of all chains (including the provider). + \* @type: $chain -> $time; + currentTimes, + \* Bookkeeping of maturity times for received packets. + \* A consumer may only send a MatureVSCPacket (i.e. notify the provider) after + \* its local time exceeds the time designated in maturityTimes. + \* For each consumer chain c, and VSC packet t sent by the provider, + \* a) t \in DOMAIN maturityTimes[c] <=> c has received packet t + \* b) if t \in DOMAIN maturityTimes[c], then maturity for t on c is + \* guarded by currentTimes[c] >= maturityTimes[c][t] + \* @type: $chain -> $packet -> $time; + maturityTimes + +\* Bookkeeping +VARIABLES + \* Name of last action, for debugging + \* @type: Str; + lastAction, + \* VPC flag; Voting power may be considered to have changed, even if + \* the (TLA) value of votingPowerRunning does not (for example, due to a sequence + \* of delegations and un-delegations, with a net 0 change in voting power). + \* We use this flag to determine whether it is necessary to send a VPC packet. + \* @type: Bool; + votingPowerHasChanged, + \* Invariant flag, TRUE iff clocks never drifted too much + \* @type: Bool; + boundedDrift + +\* Helper tuples for UNCHANGED syntax +\* We don't track consumerStatus and lastAction in var tuples, because +\* they change each round. + +providerVars == + << votingPowerHist, votingPowerRunning, expectedResponders, maturePackets >> + +consumerVars == + << votingPowerReferences, ccvChannelsPending, ccvChannelsResolved, currentTimes, maturityTimes >> + +\* @type: <>; +bookkeepingVars == + << votingPowerHasChanged, boundedDrift >> + + +(*** NON-ACTION DEFINITIONS ***) + +Unused == "Unused_OF_STATUS" +Initializing == "Initializing_OF_STATUS" +Active == "Active_OF_STATUS" +Dropped == "Dropped_OF_STATUS" + +Status == { Unused, Initializing, Active, Dropped } + +ActiveConsumers == {c \in ConsumerChains: consumerStatus[c] = Active } +InitializingConsumers == {c \in ConsumerChains: consumerStatus[c] = Initializing } +LiveConsumers == ActiveConsumers \union InitializingConsumers +LiveNext == { c \in ConsumerChains: consumerStatus'[c] \in {Initializing, Active} } +InactiveConsumers == {c \in ConsumerChains: consumerStatus[c] \in {Unused, Dropped}} + +\* Some value not in Nat, for initialization +UndefinedTime == -1 + +\* Provider chain ID, assumed to be distinct from all consumer chain IDs +ProviderChain == "provider_OF_C" + +\* Some value not in [Nodes -> Nat], for initialization +UndefinedPower == [node \in Nodes |-> -1] + +\* All chains, including the provider. Used for the domain of shared +\* variables, e.g. currentTimes +Chains == ConsumerChains \union {ProviderChain} + +\* According to https://github.com/cosmos/ibc/blob/main/spec/core/ics-004-channel-and-packet-semantics/README.md#receiving-packets +\* we need to use >=. +TimeoutGuard(a,b) == a >= b + +\* @type: (Seq($packet), $time) => Bool; +TimeoutOnReception(channel, consumerT) == + /\ Len(channel) /= 0 + \* Head is always the oldest packet, so if there is a timeout for some packet, + \* there must be one for Head too + /\ TimeoutGuard(consumerT, Head(channel) + Timeout) + + +\* @type: ($chain, $time, $packet -> $time) => Bool; +TimeoutOnMaturity(c, providerT, maturity) == + \E packet \in DOMAIN maturity: + \* Note: Reception time = maturity[packet] - UnbondingPeriod + /\ TimeoutGuard(providerT + UnbondingPeriod, maturity[packet] + Timeout) + \* Not yet matured + /\ \A matureVSCPacket \in maturePackets: + \/ matureVSCPacket.chain /= c + \/ matureVSCPacket.packetTime /= packet + +\* Takes parameters, so primed and non-primed values can be passed +\* @type: ($chain, Seq($packet), $time, $time, $packet -> $time) => Bool; +PacketTimeoutForConsumer(c, channel, consumerT, providerT, maturity) == + \* Option 1: Timeout on reception + \/ TimeoutOnReception(channel, consumerT) + \* Option 2: Timeout on maturity + \/ TimeoutOnMaturity(c, providerT, maturity) + +\* Because we're not using functions with fixed domains, we can't use EXCEPT. +\* Thus, we need a helper method for domain-extension. +\* @type: (a -> b, a, b) => a -> b; +ExtendFnBy(f, k, v) == + [ + x \in DOMAIN f \union {k} |-> + IF x = k + THEN v + ELSE f[x] + ] + +\* Packets are set at unique times, monotonically increasing, the last +\* one is just the max in the votingPowerHist domain. +LastPacketTime == + LET Max2(a,b) == IF a >= b THEN a ELSE b IN + ApaFoldSet(Max2, -1, DOMAIN votingPowerHist) + +\* @type: ($chain, $packet, $time) => $matureVSCPacket; +MatureVSCPacket(c, packet, matT) == + [chain |-> c, packet |-> packet, maturityTime |-> matT] + +\* @type: (Int, Int) => Int; +Delta(a,b) == IF a > b THEN a - b ELSE b - a + +\* @type: (a -> Int, Set(a), Int) => Bool; +BoundedDeltas(fn, dom, bound) == + /\ dom \subseteq DOMAIN fn + /\ \A v1, v2 \in dom: + Delta(fn[v1], fn[v2]) <= bound + +\* All the packets ever sent to c in the order they were sent in +\* @type: ($chain) => Seq($packet); +PacketOrder(c) == ccvChannelsResolved[c] \o ccvChannelsPending[c] + +\* @type: $packet => Set($chain); +RemainingResponders(t) == + { c \in expectedResponders[t]: consumerStatus[c] /= Dropped } + +(*** ACTIONS ***) + +Init == + /\ votingPowerHist = [t \in {} |-> UndefinedPower] + /\ \E initValidators \in SUBSET Nodes: + /\ initValidators /= {} + /\ votingPowerRunning \in [initValidators -> Nat] + /\ \A v \in initValidators: votingPowerRunning[v] > 0 + /\ consumerStatus \in [ConsumerChains -> Status] + /\ expectedResponders = [t \in {} |-> {}] + /\ maturePackets = {} + /\ votingPowerReferences = [chain \in ConsumerChains |-> UndefinedTime] + /\ ccvChannelsPending = [chain \in ConsumerChains |-> <<>>] + /\ ccvChannelsResolved = [chain \in ConsumerChains |-> <<>>] + /\ currentTimes = [c \in Chains |-> 0] + /\ maturityTimes = [c \in ConsumerChains |-> [t \in {} |-> UndefinedTime]] + /\ votingPowerHasChanged = FALSE + /\ boundedDrift = TRUE + /\ lastAction = "Init" + +\* We combine all (un)delegate actions, as well as (un)bonding actions into an +\* abstract VotingPowerChange. +\* Since VPC packets are sent at most once at the end of each block, +\* the granularity wouldn't have added value to the model. +VotingPowerChange == + \E newValidators \in SUBSET Nodes: + /\ newValidators /= {} + /\ votingPowerRunning' \in [newValidators -> Nat] + /\ \A v \in newValidators: votingPowerRunning'[v] > 0 + \* Note: votingPowerHasChanged' is set to true + \* even if votingPowerRunning' = votingPowerRunning + /\ votingPowerHasChanged' = TRUE + /\ UNCHANGED consumerVars + /\ UNCHANGED << votingPowerHist, expectedResponders, maturePackets >> + /\ lastAction' = "VotingPowerChange" + +RcvPacket == + \E c \in ActiveConsumers: + \* There must be a packet to be received + /\ Len(ccvChannelsPending[c]) /= 0 + /\ LET packet == Head(ccvChannelsPending[c]) IN + \* The voting power adjusts immediately, but the matureVSCPacket + \* is sent later, on maturity + /\ votingPowerReferences' = [votingPowerReferences EXCEPT ![c] = packet] + \* Maturity happens after UnbondingPeriod time has elapsed on c + /\ maturityTimes' = [ + maturityTimes EXCEPT ![c] = + ExtendFnBy(maturityTimes[c], packet, currentTimes[c] + UnbondingPeriod) + ] + /\ ccvChannelsResolved' = [ccvChannelsResolved EXCEPT ![c] = Append(@, packet)] + \* Drop from channel, to unblock reception of other packets. + /\ ccvChannelsPending' = [ccvChannelsPending EXCEPT ![c] = Tail(@)] + /\ UNCHANGED providerVars + /\ UNCHANGED currentTimes + /\ UNCHANGED votingPowerHasChanged + /\ lastAction' = "RcvPacket" + +SendMatureVSCPacket == + \E c \in ActiveConsumers: + \* Has been received + \E packet \in DOMAIN maturityTimes[c]: + \* Has matured + /\ currentTimes[c] >= maturityTimes[c][packet] + \* Hasn't been sent before + /\ \A matureVSCPacket \in maturePackets: + \/ matureVSCPacket.chain /= c + \/ matureVSCPacket.packetTime /= packet + /\ maturePackets' = maturePackets \union { MatureVSCPacket(c, packet, currentTimes[c]) } + /\ UNCHANGED consumerVars + /\ UNCHANGED << votingPowerHist, votingPowerRunning, expectedResponders >> + /\ UNCHANGED votingPowerHasChanged + /\ lastAction' = "SendMatureVSCPacket" + +\* Partial action, always happens on Next +\* Each consumer status advances (or is unchanged) in the +\* Unused -> Initializing -> Active -> Dropped order +PromoteConsumers == + \E newStatus \in [ConsumerChains -> Status]: + /\ \A c \in ConsumerChains: + \* No regressions + /\ consumerStatus[c] = Initializing => newStatus[c] /= Unused + /\ consumerStatus[c] = Active => newStatus[c] \in {Active, Dropped} + /\ consumerStatus[c] = Dropped => newStatus[c] = Dropped + \* All timed out chains are dropped + \* Only ACTIVE chains may time out (not Initializing) + /\ ( /\ consumerStatus[c] = Active + /\ PacketTimeoutForConsumer(c, ccvChannelsPending'[c], currentTimes'[c], currentTimes'[ProviderChain], maturityTimes'[c]) + ) => consumerStatus[c] = Dropped + /\ consumerStatus' = newStatus + + +\* Partial action, always happens on EndBlock, may also happen independently +AdvanceTimeCore == + \E newTimes \in [Chains -> Nat]: + \* None regress + \* Does not guarantee strict time progression in AdvanceTime. + \* In EndProviderBlockAndSendPacket, provider time is forced + \* to strictly progress with an additional constraint. + /\ \A c \in Chains: newTimes[c] >= currentTimes[c] + /\ currentTimes' = newTimes + +\* Time may also elapse without EndProviderBlockAndSendPacket. +AdvanceTime == + /\ AdvanceTimeCore + /\ UNCHANGED providerVars + /\ UNCHANGED << votingPowerReferences, ccvChannelsPending, ccvChannelsResolved, maturityTimes >> + /\ UNCHANGED votingPowerHasChanged + /\ lastAction' = "AdvanceTime" + +EndProviderBlockAndSendPacket == + \* Packets are only sent if there is a VPC + /\ votingPowerHasChanged + /\ LET packet == currentTimes[ProviderChain] IN + /\ ccvChannelsPending' = + [ + chain \in ConsumerChains |-> Append( + ccvChannelsPending[chain], + \* a packet is just the current time, the VP can be read from votingPowerHist + currentTimes[ProviderChain] + ) + ] + /\ votingPowerHist' = ExtendFnBy(votingPowerHist, packet, votingPowerRunning) + \* All currently live (= Active or Init) consumers are expected to respond to this packet + /\ expectedResponders' = ExtendFnBy(expectedResponders, packet, LiveConsumers) + \* Reset flag for next block + /\ votingPowerHasChanged' = FALSE + \* packet sending forces time progression on provider + /\ AdvanceTimeCore + /\ currentTimes'[ProviderChain] > currentTimes[ProviderChain] + /\ UNCHANGED <> + /\ UNCHANGED <> + /\ lastAction' = "EndProviderBlockAndSendPacket" + +Next == + /\\/ EndProviderBlockAndSendPacket + \/ VotingPowerChange + \/ RcvPacket + \/ SendMatureVSCPacket + \/ AdvanceTime + \* Drop timed out, possibly promote rest + /\ PromoteConsumers + /\ boundedDrift' = boundedDrift /\ + BoundedDeltas(currentTimes', LiveNext \union {ProviderChain}, MaxDrift) + +(*** PROPERTIES/INVARIANTS ***) + +\* VCS must also mature on provider +LastVCSMatureOnProvider == + LastPacketTime + UnbondingPeriod <= currentTimes[ProviderChain] + +VPCUpdateInProgress == + \* some chain has pending packets + \/ \E c \in LiveConsumers: + \/ Len(ccvChannelsPending[c]) /= 0 + \/ \E packet \in DOMAIN maturityTimes[c]: maturityTimes[c][packet] < currentTimes[c] + \* not enough time has elapsed on provider itself since last update + \/ ~LastVCSMatureOnProvider + +LiveConsumersNotTimedOut == + \A c \in LiveConsumers: + ~PacketTimeoutForConsumer(c, ccvChannelsPending[c], currentTimes[c], currentTimes[ProviderChain], maturityTimes[c]) + +\* Sanity- predicates check that the data structures don't take on unexpected values +SanityVP == + /\ \A t \in DOMAIN votingPowerHist: + LET VP == votingPowerHist[t] IN + VP /= UndefinedPower <=> + \A node \in DOMAIN VP: VP[node] >= 0 + /\ \A node \in DOMAIN votingPowerRunning: votingPowerRunning[node] >= 0 + +SanityRefs == + \A c \in ConsumerChains: + votingPowerReferences[c] < 0 <=> votingPowerReferences[c] = UndefinedTime + +SanityMaturity == + \A c \in ConsumerChains: + \A t \in DOMAIN maturityTimes[c]: + LET mt == maturityTimes[c][t] IN + mt < 0 <=> mt = UndefinedTime + +Sanity == + /\ SanityVP + /\ SanityRefs + /\ SanityMaturity + + +\* Since the clocks may drift, any delay that exceeds +\* Timeout + MaxDrift is perceived as timeout on all chains +AdjustedTimeout == Timeout + MaxDrift + +\* Any packet sent by the provider is either received within Timeout, or +\* the consumer chain is no longer considered active. +ReceptionBeforeTimeout == + \A t \in DOMAIN votingPowerHist: + \A c \in RemainingResponders(t): + \* If c is still live after Timeout has elapsed from packet t broadcast ... + TimeoutGuard(currentTimes[c], t + AdjustedTimeout) => + \* ... then c must have received packet t + t \in DOMAIN maturityTimes[c] + +\* Any packet received by the consumer matures within Timeout of reception, +\* or the consumer is no longer considered active. +MaturedBeforeTimeout == + \A t \in DOMAIN votingPowerHist: + \A c \in RemainingResponders(t): + t \in DOMAIN maturityTimes[c] => + \* If c is still active after Timeout has elapsed from packet t reception ... + \* Note: Reception time = maturityTimes[c][p] - UnbondingPeriod + ( + TimeoutGuard(currentTimes[ProviderChain] + UnbondingPeriod, maturityTimes[c][t] + AdjustedTimeout) => + \* ... then packet t must have matured on c + \E matureVSCPacket \in maturePackets: + /\ matureVSCPacket.chain = c + /\ matureVSCPacket.packetTime = t + ) + + +\* All packets mature at the latest by Timeout, from all +\* active consumers (or those consumers are removed from the active set) +\* It should be the case that ReceptionBeforeTimeout /\ MaturedBeforeTimeout => EventuallyMatureOnProvider +EventuallyMatureOnProvider == + \A t \in DOMAIN votingPowerHist: + \* If a packet was sent at time t and enough time has elapsed, + \* s.t. all consumers should have responded ... + TimeoutGuard(currentTimes[ProviderChain], t + 2 * AdjustedTimeout) => + \* then, all consumers have matured + \A c \in RemainingResponders(t): + \E matureVSCPacket \in maturePackets: + /\ matureVSCPacket.chain = c + /\ matureVSCPacket.packetTime = t + + +\* Invariants from https://github.com/cosmos/interchain-security/blob/main/docs/quality_assurance.md + +(* +4.10 - The provider chain's correctness is not affected by a consumer chain +shutting down + +What is "provider chain correctness"? +*) + +(* +4.11 - The provider chain can graciously handle a CCV packet timing out +(without shutting down) - expected outcome: +consumer chain shuts down and its state in provider CCV module is removed +*) +Inv411 == + boundedDrift => + \A c \in ConsumerChains: + TimeoutOnReception(ccvChannelsPending[c], currentTimes[c]) => + c \notin LiveConsumers + +(* +4.12 - The provider chain can graciously handle a StopConsumerChainProposal - +expected outcome: consumer chain shuts down and its state +in provider CCV module is removed. + +What is "graciously handle"? +*) + +(* +6.01 - Every validator set on any consumer chain MUST either be or have been +a validator set on the provider chain. + +In the current model, implicit through construction (votingPowerReferences) +*) +Inv601 == + \A c \in LiveConsumers: + LET ref == votingPowerReferences[c] IN + ref /= UndefinedTime => ref \in DOMAIN votingPowerHist + +(* +6.02 - Any update in the power of a validator val on the provider, as a result of +- (increase) Delegate() / Redelegate() to val +- (increase) val joining the provider validator set +- (decrease) Undelegate() / Redelegate() from val +- (decrease) Slash(val) +- (decrease) val leaving the provider validator set +MUST be present in a ValidatorSetChangePacket that is sent to all registered consumer chains +*) +Inv602 == + \A packet \in DOMAIN votingPowerHist: + \A c \in LiveConsumers: + LET packetsToC == PacketOrder(c) IN + \E i \in DOMAIN packetsToC: + packetsToC[i] = packet + +(* +6.03 - Every consumer chain receives the same sequence of +ValidatorSetChangePackets in the same order. + +Note: consider only prefixes on received packets (ccvChannelsResolved) +*) +Inv603 == + \A c1,c2 \in LiveConsumers: + \A i \in (DOMAIN ccvChannelsResolved[c1] \intersect DOMAIN ccvChannelsResolved[c2]): + ccvChannelsResolved[c1][i] = ccvChannelsResolved[c2][i] + +(* +7.01 - For every ValidatorSetChangePacket received by a consumer chain at +time t, a MaturedVSCPacket is sent back to the provider in the first block +with a timestamp >= t + UnbondingPeriod + +Modification: not necessarily _first_ block with that timestamp, +since we don't model height _and_ time. +*) +Inv701 == + boundedDrift => MaturedBeforeTimeout + +(* +7.02 - If an unbonding operation resulted in a ValidatorSetChangePacket sent +to all registered consumer chains, then it cannot complete before receiving +matching MaturedVSCPackets from these consumer chains +(unless some of these consumer chains are removed) + +We can define change completion, but we don't model it. Best approximation: +*) +Inv702 == + boundedDrift => EventuallyMatureOnProvider + +Inv == + \* /\ Sanity + \* /\ LiveConsumersNotTimedOut + /\ (boundedDrift => + /\ ReceptionBeforeTimeout + /\ MaturedBeforeTimeout + ) + + + +============================================================================= \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/library/ref/MC_CCV.tla b/x/ccv/provider/keeper/prototyping/model/library/ref/MC_CCV.tla new file mode 100644 index 0000000000..40f9bb2177 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/ref/MC_CCV.tla @@ -0,0 +1,62 @@ +--------------------------- MODULE MC_CCV --------------------------- + +EXTENDS Integers + +Nodes == {"1_OF_N", "2_OF_N", "3_OF_N", "4_OF_N"} +ConsumerChains == {"1_OF_C", "2_OF_C", "3_OF_C", "4_OF_C"} +\* UnbondingPeriod == 3 * 7 * 24 \* h +\* Timeout == 4 * 7 * 24 \* h +\* MaxDrift == 24 \* h + +CONSTANT + \* @type: $time; + UnbondingPeriod, + \* @type: $time; + Timeout, + \* @type: $time; + MaxDrift + +CInit == + /\ UnbondingPeriod \in Nat + /\ Timeout \in Nat + /\ MaxDrift \in Nat + /\ MaxDrift < Timeout + +\* Provider chain only +VARIABLES + \* @type: $time -> $votingPowerOnChain; + votingPowerHist, + \* @type: $votingPowerOnChain; + votingPowerRunning, + \* @type: $chain -> STATUS; + consumerStatus, + \* @type: $packet -> Set($chain); + expectedResponders, + \* @type: Set($matureVSCPacket); + maturePackets + +\* Consumer chains or both +VARIABLES + \* @type: $chain -> $time; + votingPowerReferences, + \* @type: $chain -> Seq($packet); + ccvChannelsPending, + \* @type: $chain -> Seq($packet); + ccvChannelsResolved, + \* @type: $chain -> $time; + currentTimes, + \* @type: $chain -> $time -> $time; + maturityTimes + +\* Bookkeeping +VARIABLES + \* @type: Str; + lastAction, + \* @type: Bool; + votingPowerHasChanged, + \* @type: Bool; + boundedDrift + +INSTANCE CCV + +============================================================================= \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/library/ref/typedefs.tla b/x/ccv/provider/keeper/prototyping/model/library/ref/typedefs.tla new file mode 100644 index 0000000000..62e9ea7501 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/ref/typedefs.tla @@ -0,0 +1,12 @@ +--------------------------- MODULE typedefs --------------------------- +(* + @typeAlias: chain = C; chain type + @typeAlias: node = N; node type + @typeAlias: power = Int; voting power + @typeAlias: time = Int; + @typeAlias: votingPowerOnChain = $node -> $power; + @typeAlias: packet = $time; + @typeAlias: matureVSCPacket = [chain: $chain, packet: $packet, maturityTime: $time]; +*) +AliasesCVV == TRUE +============================================================================= \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/library/tlcFolds.tla b/x/ccv/provider/keeper/prototyping/model/library/tlcFolds.tla new file mode 100644 index 0000000000..d7dc1107ec --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/library/tlcFolds.tla @@ -0,0 +1,27 @@ + +---- MODULE tlcFolds ---- + +EXTENDS Integers, FiniteSets, Sequences + +(*****************************************************************************) +(* The folding operator, used to implement computation over a set. *) +(* Apalache implements a more efficient encoding than the one below. *) +(* (from the community modules). *) +(*****************************************************************************) +RECURSIVE FoldSet(_,_,_) +FoldSet( Op(_,_), v, S ) == IF S = {} + THEN v + ELSE LET w == CHOOSE x \in S: TRUE + IN LET T == S \ {w} + IN FoldSet( Op, Op(v,w), T ) + +(*****************************************************************************) +(* The folding operator, used to implement computation over a sequence. *) +(* Apalache implements a more efficient encoding than the one below. *) +(* (from the community modules). *) +(*****************************************************************************) +RECURSIVE FoldSeq(_,_,_) +FoldSeq( Op(_,_), v, seq ) == IF seq = <<>> + THEN v + ELSE FoldSeq( Op, Op(v,Head(seq)), Tail(seq) ) +==== diff --git a/x/ccv/provider/keeper/prototyping/model/main.cfg b/x/ccv/provider/keeper/prototyping/model/main.cfg new file mode 100644 index 0000000000..ba7a938460 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/main.cfg @@ -0,0 +1,3 @@ +INIT Init +NEXT Next +INVARIANT Inv \ No newline at end of file diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla new file mode 100644 index 0000000000..8ee33a0967 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -0,0 +1,90 @@ +---- MODULE main ---- + +\* EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache +EXTENDS Integers, FiniteSets, Sequences, TLC + +VARIABLES + \* @type: Str; + actionKind, + \* @type: Int; + nextVSCId, + \* @type: Int; + nextConsumerId, + \* @type: Set(Int); + initialisingConsumers, + \* @type: Set(Int); + activeConsumers, + \* Maps consumer -> vscId + \* @type: Set(<>); + awaitedVSCIds + +InitConsumer == + /\ actionKind' = "InitConsumer" + /\ UNCHANGED nextVSCId + /\ nextConsumerId' = nextConsumerId + 1 + /\ initialisingConsumers' = initialisingConsumers \cup {nextConsumerId} + /\ UNCHANGED activeConsumers + /\ UNCHANGED awaitedVSCIds + +ActivateConsumer == + /\ actionKind' = "ActivateConsumer" + /\ UNCHANGED nextVSCId + /\ UNCHANGED nextConsumerId + /\ \E c \in initialisingConsumers: + /\ initialisingConsumers' = initialisingConsumers \ {c} + /\ activeConsumers' = activeConsumers \cup {c} + /\ UNCHANGED awaitedVSCIds + +StopConsumer == + /\ actionKind' = "StopConsumer" + /\ UNCHANGED nextVSCId + /\ UNCHANGED nextConsumerId + /\ \/ \E c \in initialisingConsumers: + /\ initialisingConsumers' = initialisingConsumers \ {c} + /\ UNCHANGED activeConsumers + /\ UNCHANGED awaitedVSCIds + \/ \E c \in activeConsumers: + /\ UNCHANGED initialisingConsumers + /\ activeConsumers' = activeConsumers \ {c} + /\ awaitedVSCIds' = {pair \in awaitedVSCIds: pair[1] # c} + +(* +After EndBlock the SUT will check that the ref cnts are 0 for every +VSCID that does not appear in awaited, and that ref cnts are positive +for every VSCID that does appear in awaited +*) +EndBlock == + /\ actionKind' = "EndBlock" + /\ nextVSCId' = nextVSCId + 1 + /\ UNCHANGED nextConsumerId + /\ UNCHANGED initialisingConsumers + /\ UNCHANGED activeConsumers + /\ awaitedVSCIds' = awaitedVSCIds \cup {<> : c \in activeConsumers} + +RecvMaturity == + /\ actionKind' = "RecvMaturity" + /\ UNCHANGED nextVSCId + /\ UNCHANGED nextConsumerId + /\ UNCHANGED initialisingConsumers + /\ UNCHANGED activeConsumers + /\ \E pair \in awaitedVSCIds: + awaitedVSCIds' = awaitedVSCIds \ {pair} + +Init == + /\ actionKind = "Init" + /\ nextVSCId = 0 + /\ nextConsumerId = 0 + /\ initialisingConsumers = {} + /\ activeConsumers = {} + /\ awaitedVSCIds = {} + +Next == + \/ InitConsumer + \/ ActivateConsumer + \/ StopConsumer + \/ EndBlock + \/ RecvMaturity + +Inv == \A pair \in awaitedVSCIds : pair[1] \in (initialisingConsumers \cup activeConsumers) + +==== diff --git a/x/ccv/provider/keeper/prototyping/model/traceUtil.py b/x/ccv/provider/keeper/prototyping/model/traceUtil.py new file mode 100644 index 0000000000..26f3a345d4 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/model/traceUtil.py @@ -0,0 +1,16 @@ +import json + +PREFIX_DRIVER = "/Users/danwt/Documents/work/interchain-security/tests/difference/consumerStuttering/driver/" +PREFIX_TRACE_OUPUTS = "/Users/danwt/Documents/work/interchain-security/tests/difference/consumerStuttering/model/_apalache-out/main.tla/" +DIR = "2022-09-06T17-55-50_14502121096210630182" +js = [] +for i in range(10): + fn = f"{PREFIX_TRACE_OUPUTS}{DIR}/example{i+1}.itf.json" + with open(fn, 'r') as fd: + content = fd.read() + j = json.loads(content) + js.append(j) + +fn = f"{PREFIX_DRIVER}traces.json" +with open(fn, 'w') as fd: + fd.write(json.dumps(js, indent=2)) diff --git a/x/ccv/provider/keeper/prototyping/prototype.py b/x/ccv/provider/keeper/prototyping/prototype.py new file mode 100644 index 0000000000..f304df2b85 --- /dev/null +++ b/x/ccv/provider/keeper/prototyping/prototype.py @@ -0,0 +1,20 @@ + +class KeyDelegation: + def __init__(self): + pass + + def setConsumerKey(self, p, c): + pa + + def getConsumerKey(self, p, c): + + def recvSlash(self, c): + + +def main(): + print("hello") + pass + + +if __name__ == "__main__": + main() From d8b1db5ec381f8049ffe5f623e2e734b2823ab41 Mon Sep 17 00:00:00 2001 From: Daniel Date: Fri, 9 Sep 2022 17:46:46 +0100 Subject: [PATCH 002/127] cp --- .../keeper/prototyping/model/main.tla | 69 ++++------------- .../provider/keeper/prototyping/prototype.py | 76 ++++++++++++++++++- 2 files changed, 85 insertions(+), 60 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index 8ee33a0967..b121e48303 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -18,57 +18,6 @@ VARIABLES \* @type: Set(<>); awaitedVSCIds -InitConsumer == - /\ actionKind' = "InitConsumer" - /\ UNCHANGED nextVSCId - /\ nextConsumerId' = nextConsumerId + 1 - /\ initialisingConsumers' = initialisingConsumers \cup {nextConsumerId} - /\ UNCHANGED activeConsumers - /\ UNCHANGED awaitedVSCIds - -ActivateConsumer == - /\ actionKind' = "ActivateConsumer" - /\ UNCHANGED nextVSCId - /\ UNCHANGED nextConsumerId - /\ \E c \in initialisingConsumers: - /\ initialisingConsumers' = initialisingConsumers \ {c} - /\ activeConsumers' = activeConsumers \cup {c} - /\ UNCHANGED awaitedVSCIds - -StopConsumer == - /\ actionKind' = "StopConsumer" - /\ UNCHANGED nextVSCId - /\ UNCHANGED nextConsumerId - /\ \/ \E c \in initialisingConsumers: - /\ initialisingConsumers' = initialisingConsumers \ {c} - /\ UNCHANGED activeConsumers - /\ UNCHANGED awaitedVSCIds - \/ \E c \in activeConsumers: - /\ UNCHANGED initialisingConsumers - /\ activeConsumers' = activeConsumers \ {c} - /\ awaitedVSCIds' = {pair \in awaitedVSCIds: pair[1] # c} - -(* -After EndBlock the SUT will check that the ref cnts are 0 for every -VSCID that does not appear in awaited, and that ref cnts are positive -for every VSCID that does appear in awaited -*) -EndBlock == - /\ actionKind' = "EndBlock" - /\ nextVSCId' = nextVSCId + 1 - /\ UNCHANGED nextConsumerId - /\ UNCHANGED initialisingConsumers - /\ UNCHANGED activeConsumers - /\ awaitedVSCIds' = awaitedVSCIds \cup {<> : c \in activeConsumers} - -RecvMaturity == - /\ actionKind' = "RecvMaturity" - /\ UNCHANGED nextVSCId - /\ UNCHANGED nextConsumerId - /\ UNCHANGED initialisingConsumers - /\ UNCHANGED activeConsumers - /\ \E pair \in awaitedVSCIds: - awaitedVSCIds' = awaitedVSCIds \ {pair} Init == /\ actionKind = "Init" @@ -78,13 +27,21 @@ Init == /\ activeConsumers = {} /\ awaitedVSCIds = {} +EndBlock == + +SetKey == + +ReceiveSlash == + +ReceiveMaturity == + Next == - \/ InitConsumer - \/ ActivateConsumer - \/ StopConsumer \/ EndBlock - \/ RecvMaturity + \/ SetKey + \/ ReceiveSlash + \/ ReceiveMaturity -Inv == \A pair \in awaitedVSCIds : pair[1] \in (initialisingConsumers \cup activeConsumers) +\* When a validator had a positive power on the consumer, it was slashable up until UNBONDING_PERIOD later +Inv == ==== diff --git a/x/ccv/provider/keeper/prototyping/prototype.py b/x/ccv/provider/keeper/prototyping/prototype.py index f304df2b85..2b89df86db 100644 --- a/x/ccv/provider/keeper/prototyping/prototype.py +++ b/x/ccv/provider/keeper/prototyping/prototype.py @@ -1,14 +1,78 @@ class KeyDelegation: def __init__(self): + self.localKeyToLastUpdate = {} + self.localKeyToCurrentForeignKey + self.foreignKeyToLocalKey = {} + self.foreignKeyToVSCIDWhenLastSent = {} + self.localKeysForWhichUpdateMustBeSent = set() + + def SetKey(self, v, k): + self.currentKey[v] = k + if v in self.localKeyToLastUpdate: + [_, lastPower] = self.localKeyToLastUpdate[v] + if 0 < lastPower: + # If validator is known to the consumer + self.localKeysForWhichUpdateMustBeSent.add(v) + + def GetLocalKey(self, foreignKey): + return self.foreignKeyToLocalKey[foreignKey] + + def ComputeUpdates(self, vscid, localUpdates): + updates = {} + # Ship updates for any + for v in self.localKeysForWhichUpdateMustBeSent: + currKey = self.localKeyToCurrentForeignKey[v] + [lastKey, lastPower] = self.localKeyToLastUpdate[v] + updates[lastKey] = 0 + updates[currKey] = lastPower + self.localKeysForWhichUpdateMustBeSent = set() + for v, power in localUpdates.items(): # Will happen if power changed since last block + if v in self.localKeyToLastUpdate: + [lastKey, _] = self.localKeyToLastUpdate[v] + updates[lastKey] = 0 + currKey = self.localKeyToCurrentForeignKey[v] + updates[currKey] = power + + for foreignKey, power in updates.items(): + self.foreignKeyToVSCIDWhenLastSent[foreignKey] = vscid + self.localKeyToLastUpdate[self.foreignKeyToLocalKey[foreignKey]] = [ + foreignKey, power] + return updates + + def Prune(self, mostRecentlyMaturedVscid): + removed = [foreignKey for foreignKey, + vscid in self.foreignKeyToVSCIDWhenLastSent if vscid <= mostRecentlyMaturedVscid] + for foreignKey in removed: + del self.foreignKeyToVSCIDWhenLastSent[foreignKey] + del self.foreignKeyToLocalKey[foreignKey] + + +consumers = ["c0", "c1"] +vals = ["v0", "v1"] + + +class Provider: + def __init__(self): + self.keyDelegations = {c: KeyDelegation() for c in consumers} pass - def setConsumerKey(self, p, c): - pa + def SendUpdates(self): + for c in consumers: + updates = {"v0": 42, "v1": 0} + updates = self.keyDelegations[c].computeUpdates(updates) + # ship the updates + + def SetKey(self, c, v, k): + self.keyDelegations[c].SetKey(v, k) - def getConsumerKey(self, p, c): + def Slash(self, c, foreignKey, vscID): + localKey = self.keyDelegations[c].foreignKeyToLocalKey[foreignKey] + # slash - def recvSlash(self, c): + def Mature(self, c, ascendingVscids): + latestVscid = ascendingVscids[-1] + self.keyDelegations[c].Prune(latestVscid) def main(): @@ -17,4 +81,8 @@ def main(): if __name__ == "__main__": + # x = {c: KeyDelegation() for c in consumers} + # x["c0"].lastKeySent = {1: 2} + # print(x["c0"].lastKeySent) + # print(x["c1"].lastKeySent) main() From 05adc1fc65f158d407df352574a9f6ad5ce2cf38 Mon Sep 17 00:00:00 2001 From: Daniel Date: Fri, 9 Sep 2022 18:07:37 +0100 Subject: [PATCH 003/127] cp --- go.mod | 1 + go.sum | 2 + x/ccv/provider/keeper/key_delegation.go | 52 +++++++++++++++++++++++++ 3 files changed, 55 insertions(+) create mode 100644 x/ccv/provider/keeper/key_delegation.go diff --git a/go.mod b/go.mod index 15e2b7af1b..51b04f0d32 100644 --- a/go.mod +++ b/go.mod @@ -127,6 +127,7 @@ require ( gopkg.in/ini.v1 v1.66.2 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect nhooyr.io/websocket v1.8.6 // indirect + pgregory.net/rapid v0.5.2 // indirect ) replace ( diff --git a/go.sum b/go.sum index ac7da1f837..9f5bba4985 100644 --- a/go.sum +++ b/go.sum @@ -1744,6 +1744,8 @@ mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jC mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7/go.mod h1:hBpJkZE8H/sb+VRFvw2+rBpHNsTBcvSpk61hr8mzXZE= nhooyr.io/websocket v1.8.6 h1:s+C3xAMLwGmlI31Nyn/eAehUlZPwfYZu2JXM621Q5/k= nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= +pgregory.net/rapid v0.5.2 h1:zC+jmuzcz5yJvG/igG06aLx8kcGmZY435NcuyhblKjY= +pgregory.net/rapid v0.5.2/go.mod h1:PY5XlDGj0+V1FCq0o192FdRhpKHGTRIWBgqjDBTrq04= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/x/ccv/provider/keeper/key_delegation.go b/x/ccv/provider/keeper/key_delegation.go new file mode 100644 index 0000000000..de2bb76291 --- /dev/null +++ b/x/ccv/provider/keeper/key_delegation.go @@ -0,0 +1,52 @@ +package keeper + +import "errors" + +type LK = int +type FK = int +type VSCID = int + +type update struct { + key FK + power int +} + +type KeyDelegation struct { + localKeyToLastUpdate map[LK]update + localKeyToCurrentForeignKey map[LK]FK + foreignKeyToLocalKey map[FK]LK + foreignKeyToVscidWhenLastSent map[FK]VSCID + localKeysForWhichUpdateMustBeSent []LK +} + +func MakeKeyDelegation() KeyDelegation { + return KeyDelegation{ + localKeyToLastUpdate: map[LK]update{}, + localKeyToCurrentForeignKey: map[LK]FK{}, + foreignKeyToLocalKey: map[FK]LK{}, + foreignKeyToVscidWhenLastSent: map[FK]VSCID{}, + localKeysForWhichUpdateMustBeSent: []LK{}, + } +} + +func (m *KeyDelegation) SetKey(lk LK, fk FK) { + m.localKeyToCurrentForeignKey[lk] = fk + if u, ok := m.localKeyToLastUpdate[lk]; ok { + if 0 < u.power { + m.localKeysForWhichUpdateMustBeSent = append(m.localKeysForWhichUpdateMustBeSent, lk) + } + } +} + +func (m *KeyDelegation) GetLocalKey(fk FK) (LK, error) { + if lk, ok := m.foreignKeyToLocalKey[fk]; ok { + return lk, nil + } else { + return -1, errors.New("nope") + } +} + +func (m *KeyDelegation) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { + foreignUpdates = []update{} + +} From ec0b333cfa329615f70d6fafbe252ac0c05f7397 Mon Sep 17 00:00:00 2001 From: Daniel Date: Fri, 9 Sep 2022 18:52:42 +0100 Subject: [PATCH 004/127] pt --- x/ccv/provider/keeper/prototyping/prototype.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/prototype.py b/x/ccv/provider/keeper/prototyping/prototype.py index 2b89df86db..2cef2fe2ed 100644 --- a/x/ccv/provider/keeper/prototyping/prototype.py +++ b/x/ccv/provider/keeper/prototyping/prototype.py @@ -15,9 +15,6 @@ def SetKey(self, v, k): # If validator is known to the consumer self.localKeysForWhichUpdateMustBeSent.add(v) - def GetLocalKey(self, foreignKey): - return self.foreignKeyToLocalKey[foreignKey] - def ComputeUpdates(self, vscid, localUpdates): updates = {} # Ship updates for any From 46e54f965c7c20bce6fb865a1cd165d267546bbc Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 13:59:59 +0100 Subject: [PATCH 005/127] Write more code for KeyDelegation --- x/ccv/provider/keeper/key_delegation.go | 47 ++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/x/ccv/provider/keeper/key_delegation.go b/x/ccv/provider/keeper/key_delegation.go index de2bb76291..a287fe98dd 100644 --- a/x/ccv/provider/keeper/key_delegation.go +++ b/x/ccv/provider/keeper/key_delegation.go @@ -29,10 +29,12 @@ func MakeKeyDelegation() KeyDelegation { } } -func (m *KeyDelegation) SetKey(lk LK, fk FK) { +func (m *KeyDelegation) SetForeignKey(lk LK, fk FK) { m.localKeyToCurrentForeignKey[lk] = fk if u, ok := m.localKeyToLastUpdate[lk]; ok { if 0 < u.power { + // If last update had positive power then the consumer is aware of the old key + // so a deletion update must be sent. m.localKeysForWhichUpdateMustBeSent = append(m.localKeysForWhichUpdateMustBeSent, lk) } } @@ -46,7 +48,50 @@ func (m *KeyDelegation) GetLocalKey(fk FK) (LK, error) { } } +func (m *KeyDelegation) Prune(mostRecentlyMaturedVscid VSCID) { + toRemove := []FK{} + for fk, vscid := range m.foreignKeyToVscidWhenLastSent { + if vscid <= mostRecentlyMaturedVscid { + toRemove = append(toRemove, fk) + } + } + for _, fk := range toRemove { + delete(m.foreignKeyToVscidWhenLastSent, fk) + delete(m.foreignKeyToLocalKey, fk) + } +} + func (m *KeyDelegation) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { foreignUpdates = []update{} + // Create any updates for validators whose power did not change + for _, lk := range m.localKeysForWhichUpdateMustBeSent { + currKey := m.localKeyToCurrentForeignKey[lk] + u := m.localKeyToLastUpdate[lk] + // Create an update which will delete the validator for the old key + foreignUpdates = append(foreignUpdates, update{key: u.key, power: 0}) + // Create an update which will add the validator for the new key + foreignUpdates = append(foreignUpdates, update{key: currKey, power: u.power}) + } + m.localKeysForWhichUpdateMustBeSent = []LK{} + + // Create any updates for validators whose powers did change + for _, u := range localUpdates { + // Check if the consumer has an old key + if lastU, ok := m.localKeyToLastUpdate[u.key]; ok { + // Create an update which will delete the validator for the old key + foreignUpdates = append(foreignUpdates, update{key: lastU.key, power: 0}) + } + currKey := m.localKeyToCurrentForeignKey[u.key] + // Create an update which will add/update the validator for the current key + foreignUpdates = append(foreignUpdates, update{key: currKey, power: u.power}) + } + + // Update internal bookkeeping + for _, u := range foreignUpdates { + m.foreignKeyToVscidWhenLastSent[u.key] = vscid + m.localKeyToLastUpdate[m.foreignKeyToLocalKey[u.key]] = u + } + + return foreignUpdates } From 383bd04a423191a5988a2bf4167eddb9b3f0ac23 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 14:59:51 +0100 Subject: [PATCH 006/127] cp --- x/ccv/provider/keeper/key_delegation.go | 4 +- .../keeper/prototyping/model/main.tla | 68 +++++++++++++------ 2 files changed, 49 insertions(+), 23 deletions(-) diff --git a/x/ccv/provider/keeper/key_delegation.go b/x/ccv/provider/keeper/key_delegation.go index a287fe98dd..e6e4ce5d08 100644 --- a/x/ccv/provider/keeper/key_delegation.go +++ b/x/ccv/provider/keeper/key_delegation.go @@ -12,7 +12,9 @@ type update struct { } type KeyDelegation struct { - localKeyToLastUpdate map[LK]update + // TODO: how to GC this? + localKeyToLastUpdate map[LK]update + // TODO: how to GC this? localKeyToCurrentForeignKey map[LK]FK foreignKeyToLocalKey map[FK]LK foreignKeyToVscidWhenLastSent map[FK]VSCID diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index b121e48303..4c79c60b6c 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -1,35 +1,52 @@ ---- MODULE main ---- -\* EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache -EXTENDS Integers, FiniteSets, Sequences, TLC +EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache +\* EXTENDS Integers, FiniteSets, Sequences, TLC + +(* + +@typeAlias: Action = [ kind : Str, ... ]; +@typeAlias: Pair = [ lk : Str, fk : Str ]; +@typeAlias: LK = Str; +@typeAlias: FK = Str; +@typeAlias: Mapping = LK -> FK; + +*) + +LKS = + +Pairs == { + [ lk : "lk0", fk : "fk0-0" ], + [ lk : "lk0", fk : "fk0-1" ], + [ lk : "lk0", fk : "fk0-2" ], + [ lk : "lk1", fk : "fk1-0" ], + [ lk : "lk1", fk : "fk1-1" ], + [ lk : "lk1", fk : "fk1-2" ], + [ lk : "lk2", fk : "fk2-0" ], + [ lk : "lk2", fk : "fk2-1" ], + [ lk : "lk2", fk : "fk2-2" ] + } + +Mappings == { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } +Powers == [ LKS -> 0..2 ] VARIABLES - \* @type: Str; - actionKind, - \* @type: Int; - nextVSCId, - \* @type: Int; - nextConsumerId, - \* @type: Set(Int); - initialisingConsumers, - \* @type: Set(Int); - activeConsumers, - \* Maps consumer -> vscId - \* @type: Set(<>); - awaitedVSCIds + \* @type: Action; + action, + \* @type: Mapping; + mapping, + \* @type: Str -> Int; + powers, Init == - /\ actionKind = "Init" - /\ nextVSCId = 0 - /\ nextConsumerId = 0 - /\ initialisingConsumers = {} - /\ activeConsumers = {} - /\ awaitedVSCIds = {} EndBlock == + \E m \in Mappings, p \in Powers : + SetKey == + \E p \in Pairs : ReceiveSlash == @@ -41,7 +58,14 @@ Next == \/ ReceiveSlash \/ ReceiveMaturity -\* When a validator had a positive power on the consumer, it was slashable up until UNBONDING_PERIOD later Inv == +(* +It is always possible to slash a local key as long as the +An FK to LK mapping is always available from the time an update includes the LK until the time the vscid +for + +If a vscid for a given mapping was not matured then the mapping exists +*) + ==== From c27a62bad18cffcee60d4ddae8e9fae138f4afda Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 15:21:13 +0100 Subject: [PATCH 007/127] cp --- .../keeper/prototyping/model/main.tla | 73 +++++++------------ 1 file changed, 28 insertions(+), 45 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index 4c79c60b6c..fb7ea92b97 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -5,67 +5,50 @@ EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache (* -@typeAlias: Action = [ kind : Str, ... ]; -@typeAlias: Pair = [ lk : Str, fk : Str ]; -@typeAlias: LK = Str; -@typeAlias: FK = Str; -@typeAlias: Mapping = LK -> FK; + @typeAlias: ACTION = [ kind : Str ]; + @typeAlias: LK = Str; + @typeAlias: FK = Str; + @typeAlias: MAPPING = LK -> FK; + @typeAlias: POWER = LK -> Int; *) -LKS = +VARIABLES + \* @type: ACTION; + action, + \* @type: MAPPING; + mapping, + \* @type: POWER; + power + +\* @type: () => Set(LK); +LKS == {"lk0", "lk1", "lk2"} -Pairs == { - [ lk : "lk0", fk : "fk0-0" ], - [ lk : "lk0", fk : "fk0-1" ], - [ lk : "lk0", fk : "fk0-2" ], - [ lk : "lk1", fk : "fk1-0" ], - [ lk : "lk1", fk : "fk1-1" ], - [ lk : "lk1", fk : "fk1-2" ], - [ lk : "lk2", fk : "fk2-0" ], - [ lk : "lk2", fk : "fk2-1" ], - [ lk : "lk2", fk : "fk2-2" ] - } +\* @type: () => Set(FK); +FKS == {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} +\* @type: () => Set(MAPPING); Mappings == { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } -Powers == [ LKS -> 0..2 ] -VARIABLES - \* @type: Action; - action, - \* @type: Mapping; - mapping, - \* @type: Str -> Int; - powers, +\* @type: () => Set(POWER); +Powers == [ LKS -> 0..2 ] Init == + /\ action = [kind |-> "none"] + /\ mapping \in Mappings + /\ power \in Powers EndBlock == \E m \in Mappings, p \in Powers : - - -SetKey == - \E p \in Pairs : - -ReceiveSlash == - -ReceiveMaturity == + /\ UNCHANGED action + /\ mapping' = m + /\ power' = p Next == \/ EndBlock - \/ SetKey - \/ ReceiveSlash - \/ ReceiveMaturity + \/ TRUE -Inv == - -(* -It is always possible to slash a local key as long as the -An FK to LK mapping is always available from the time an update includes the LK until the time the vscid -for - -If a vscid for a given mapping was not matured then the mapping exists -*) +Inv == TRUE ==== From 07d8cdb0c43f61b9d2cee48591a78de02c42443c Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 15:28:35 +0100 Subject: [PATCH 008/127] cp --- .../keeper/prototyping/model/main.tla | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index fb7ea92b97..58f0654121 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -13,6 +13,17 @@ EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache *) +CONSTANTS + \* @type: Set(LK); + LKS, + \* @type: Set(FK); + FKS, + \* @type: Set(MAPPING); + Mappings, + \* @type: Set(POWER); + Powers + + VARIABLES \* @type: ACTION; action, @@ -21,18 +32,11 @@ VARIABLES \* @type: POWER; power -\* @type: () => Set(LK); -LKS == {"lk0", "lk1", "lk2"} - -\* @type: () => Set(FK); -FKS == {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} - -\* @type: () => Set(MAPPING); -Mappings == { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } - -\* @type: () => Set(POWER); -Powers == [ LKS -> 0..2 ] - +CInit == + /\ LKS = {"lk0", "lk1", "lk2"} + /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} + /\ Mappings = { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } + /\ Powers = [ LKS -> 0..2 ] Init == /\ action = [kind |-> "none"] From 72f48ccf80c9aeaed778d41096e1bcf694070952 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 15:35:25 +0100 Subject: [PATCH 009/127] replication --- .../keeper/prototyping/model/main.tla | 55 +++++-------------- 1 file changed, 13 insertions(+), 42 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index 58f0654121..35c0535065 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -1,57 +1,28 @@ ---- MODULE main ---- EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache -\* EXTENDS Integers, FiniteSets, Sequences, TLC (* - @typeAlias: ACTION = [ kind : Str ]; - @typeAlias: LK = Str; - @typeAlias: FK = Str; - @typeAlias: MAPPING = LK -> FK; - @typeAlias: POWER = LK -> Int; + @typeAlias: X = Str; *) CONSTANTS - \* @type: Set(LK); - LKS, - \* @type: Set(FK); - FKS, - \* @type: Set(MAPPING); - Mappings, - \* @type: Set(POWER); - Powers - + \* @type: Set(X); + XS VARIABLES - \* @type: ACTION; - action, - \* @type: MAPPING; - mapping, - \* @type: POWER; - power - -CInit == - /\ LKS = {"lk0", "lk1", "lk2"} - /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} - /\ Mappings = { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } - /\ Powers = [ LKS -> 0..2 ] - -Init == - /\ action = [kind |-> "none"] - /\ mapping \in Mappings - /\ power \in Powers - -EndBlock == - \E m \in Mappings, p \in Powers : - /\ UNCHANGED action - /\ mapping' = m - /\ power' = p - -Next == - \/ EndBlock - \/ TRUE + \* @type: Int; + a + +CInit == XS = {"k", "u", "v"} + +Init == a = 42 + +Go == a' = 42 + +Next == Go Inv == TRUE From dc617489f9152bdaea21dedaa42a73705f53ceb3 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 15:52:46 +0100 Subject: [PATCH 010/127] typecheck --- .../keeper/prototyping/model/main.tla | 55 ++++++++++++++----- 1 file changed, 42 insertions(+), 13 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index 35c0535065..803fa7d9e9 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -4,25 +4,54 @@ EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache (* - @typeAlias: X = Str; + @typeAlias: action = { kind : Str }; + @typeAlias: lk = Str; + @typeAlias: fk = Str; + @typeAlias: mapping = $lk -> $fk; + @typeAlias: power = $lk -> Int; *) +TypeAliases == TRUE + CONSTANTS - \* @type: Set(X); - XS + \* @type: Set($lk); + LKS, + \* @type: Set($fk); + FKS, + \* @type: Set($mapping); + MAPPINGS, + \* @type: Set($power); + POWERS VARIABLES - \* @type: Int; - a - -CInit == XS = {"k", "u", "v"} - -Init == a = 42 - -Go == a' = 42 - -Next == Go + \* @type: $action; + Action, + \* @type: $mapping; + Mapping, + \* @type: $power; + Power + +CInit == + /\ LKS = {"lk0", "lk1", "lk2"} + /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} + /\ MAPPINGS = { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } + /\ POWERS = [ LKS -> 0..2 ] + +Init == + /\ Action = [kind |-> "none"] + /\ Mapping \in MAPPINGS + /\ Power \in POWERS + +EndBlock == + \E m \in MAPPINGS, p \in POWERS : + /\ UNCHANGED Action + /\ Mapping' = m + /\ Power' = p + +Next == + \/ EndBlock + \/ TRUE Inv == TRUE From a7328184f520b6ab7412657816c7a3f7f0e04596 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 16:28:42 +0100 Subject: [PATCH 011/127] enable mappings --- .../keeper/prototyping/model/main.tla | 46 +++++++++++++++++-- 1 file changed, 41 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keeper/prototyping/model/main.tla index 803fa7d9e9..d64db376d6 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keeper/prototyping/model/main.tla @@ -1,6 +1,7 @@ ---- MODULE main ---- EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache +\* EXTENDS Integers, FiniteSets, Sequences, TLC (* @@ -30,28 +31,63 @@ VARIABLES \* @type: $mapping; Mapping, \* @type: $power; - Power + Power, + \* @type: Int; + TP, + \* @type: Int; + TC, + \* @type: Int; + TM + CInit == /\ LKS = {"lk0", "lk1", "lk2"} /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} - /\ MAPPINGS = { f \in [LKS -> FKS] : ~ (\E a, b \in DOMAIN f : f[a] = f[b]) } + /\ MAPPINGS = [LKS -> FKS] /\ POWERS = [ LKS -> 0..2 ] Init == + \E m \in MAPPINGS : + /\ \A a, b \in DOMAIN m : a = b \/ m[a] # m[b] /\ Action = [kind |-> "none"] - /\ Mapping \in MAPPINGS + /\ Mapping = m /\ Power \in POWERS + /\ TP = 1 + /\ TC = 0 + /\ TM = 0 EndBlock == \E m \in MAPPINGS, p \in POWERS : + /\ \A a, b \in DOMAIN m : a = b \/ m[a] # m[b] /\ UNCHANGED Action - /\ Mapping' = m + /\ Mapping' = m /\ Power' = p + /\ TP' = TP + 1 + /\ UNCHANGED TC + /\ UNCHANGED TM + +UpdateConsumer == + \E t \in (TC+1)..TP : + /\ UNCHANGED Action + /\ UNCHANGED Mapping + /\ UNCHANGED Power + /\ UNCHANGED TP + /\ TC' = t + /\ UNCHANGED TM + +ReceiveMaturities == + \E t \in (TM+1)..TC : + /\ UNCHANGED Action + /\ UNCHANGED Mapping + /\ UNCHANGED Power + /\ UNCHANGED TP + /\ UNCHANGED TC + /\ TM' = t Next == \/ EndBlock - \/ TRUE + \/ UpdateConsumer + \/ ReceiveMaturities Inv == TRUE From b4b3991515527dfdef682e16059f11286144deee Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 17:25:33 +0100 Subject: [PATCH 012/127] shuffle --- x/ccv/provider/keeper/example_test.go | 88 +++++++++++++++++++ .../keydelegation.go} | 7 +- .../keydelegation/keydelegation_test.go | 57 ++++++++++++ .../prototyping/model/.gitignore | 0 .../prototyping/model/library/Apalache.tla | 0 .../prototyping/model/library/Bags.tla | 0 .../model/library/FiniteSetTheorems.tla | 0 .../library/FiniteSetTheorems_proofs.tla | 0 .../prototyping/model/library/FiniteSets.tla | 0 .../model/library/FunctionTheorems.tla | 0 .../model/library/FunctionTheorems_proofs.tla | 0 .../prototyping/model/library/Functions.tla | 0 .../prototyping/model/library/JectionThm.tla | 0 .../prototyping/model/library/Jections.tla | 0 .../model/library/NaturalsInduction.tla | 0 .../library/NaturalsInduction_proofs.tla | 0 .../prototyping/model/library/RealTime.tla | 0 .../model/library/SequenceTheorems.tla | 0 .../model/library/SequenceTheorems_proofs.tla | 0 .../prototyping/model/library/TLAPS.tla | 0 .../model/library/WellFoundedInduction.tla | 0 .../library/WellFoundedInduction_proofs.tla | 0 .../prototyping/model/library/ref/CCV.tla | 0 .../prototyping/model/library/ref/MC_CCV.tla | 0 .../model/library/ref/typedefs.tla | 0 .../prototyping/model/library/tlcFolds.tla | 0 .../prototyping/model/main.cfg | 0 .../prototyping/model/main.tla | 14 ++- .../prototyping/model/traceUtil.py | 0 .../prototyping/prototype.py | 0 30 files changed, 162 insertions(+), 4 deletions(-) create mode 100644 x/ccv/provider/keeper/example_test.go rename x/ccv/provider/{keeper/key_delegation.go => keydelegation/keydelegation.go} (96%) create mode 100644 x/ccv/provider/keydelegation/keydelegation_test.go rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/.gitignore (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/Apalache.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/Bags.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/FiniteSetTheorems.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/FiniteSetTheorems_proofs.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/FiniteSets.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/FunctionTheorems.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/FunctionTheorems_proofs.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/Functions.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/JectionThm.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/Jections.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/NaturalsInduction.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/NaturalsInduction_proofs.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/RealTime.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/SequenceTheorems.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/SequenceTheorems_proofs.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/TLAPS.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/WellFoundedInduction.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/WellFoundedInduction_proofs.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/ref/CCV.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/ref/MC_CCV.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/ref/typedefs.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/library/tlcFolds.tla (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/main.cfg (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/main.tla (85%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/model/traceUtil.py (100%) rename x/ccv/provider/{keeper => keydelegation}/prototyping/prototype.py (100%) diff --git a/x/ccv/provider/keeper/example_test.go b/x/ccv/provider/keeper/example_test.go new file mode 100644 index 0000000000..6c4cb53fe4 --- /dev/null +++ b/x/ccv/provider/keeper/example_test.go @@ -0,0 +1,88 @@ +package keeper_test + +import ( + "testing" + + "pgregory.net/rapid" +) + +// Queue implements integer queue with a fixed maximum size. +type Queue struct { + buf []int + in int + out int +} + +func NewQueue(n int) *Queue { + return &Queue{ + buf: make([]int, n+1), + } +} + +// Precondition: Size() > 0. +func (q *Queue) Get() int { + i := q.buf[q.out] + q.out = (q.out + 1) % len(q.buf) + return i +} + +// Precondition: Size() < n. +func (q *Queue) Put(i int) { + q.buf[q.in] = i + q.in = (q.in + 1) % len(q.buf) +} + +func (q *Queue) Size() int { + return (q.in - q.out) % len(q.buf) +} + +// queueMachine is a description of a rapid state machine for testing Queue +type queueMachine struct { + q *Queue // queue being tested + n int // maximum queue size + state []int // model of the queue +} + +// Init is an action for initializing a queueMachine instance. +func (m *queueMachine) Init(t *rapid.T) { + n := rapid.IntRange(1, 1000).Draw(t, "n") + m.q = NewQueue(n) + m.n = n +} + +// Get is a conditional action which removes an item from the queue. +func (m *queueMachine) Get(t *rapid.T) { + if m.q.Size() == 0 { + t.Skip("queue empty") + } + + i := m.q.Get() + if i != m.state[0] { + t.Fatalf("got invalid value: %v vs expected %v", i, m.state[0]) + } + m.state = m.state[1:] +} + +// Put is a conditional action which adds an items to the queue. +func (m *queueMachine) Put(t *rapid.T) { + if m.q.Size() == m.n { + t.Skip("queue full") + } + + i := rapid.Int().Draw(t, "i") + m.q.Put(i) + m.state = append(m.state, i) +} + +// Check runs after every action and verifies that all required invariants hold. +func (m *queueMachine) Check(t *rapid.T) { + if m.q.Size() != len(m.state) { + t.Fatalf("queue size mismatch: %v vs expected %v", m.q.Size(), len(m.state)) + } +} + +// Rename to TestQueue(t *testing.T) to make an actual (failing) test. +func ExampleRun_queue() { + var t *testing.T + rapid.Check(t, rapid.Run[*queueMachine]()) +} diff --git a/x/ccv/provider/keeper/key_delegation.go b/x/ccv/provider/keydelegation/keydelegation.go similarity index 96% rename from x/ccv/provider/keeper/key_delegation.go rename to x/ccv/provider/keydelegation/keydelegation.go index e6e4ce5d08..5099f325ea 100644 --- a/x/ccv/provider/keeper/key_delegation.go +++ b/x/ccv/provider/keydelegation/keydelegation.go @@ -1,4 +1,4 @@ -package keeper +package keydelegation import "errors" @@ -32,6 +32,11 @@ func MakeKeyDelegation() KeyDelegation { } func (m *KeyDelegation) SetForeignKey(lk LK, fk FK) { + if currFk, ok := m.localKeyToCurrentForeignKey[lk]; ok { + if currFk == fk { + return + } + } m.localKeyToCurrentForeignKey[lk] = fk if u, ok := m.localKeyToLastUpdate[lk]; ok { if 0 < u.power { diff --git a/x/ccv/provider/keydelegation/keydelegation_test.go b/x/ccv/provider/keydelegation/keydelegation_test.go new file mode 100644 index 0000000000..9bf4293ac6 --- /dev/null +++ b/x/ccv/provider/keydelegation/keydelegation_test.go @@ -0,0 +1,57 @@ +package keydelegation_test + +import ( + "testing" + + keydelegation "github.com/cosmos/interchain-security/x/ccv/provider/keydelegation" + "pgregory.net/rapid" +) + +// Machine is a description of a rapid state machine for testing Queue +type Machine struct { + kd keydelegation.KeyDelegation // queue being tested + state []int // model of the queue +} + +// Init is an action for initializing a queueMachine instance. +func (m *Machine) Init(t *rapid.T) { + m.kd = keydelegation.MakeKeyDelegation() + m.state = []int{} +} + +// Get is a conditional action which removes an item from the queue. +func (m *Machine) Get(t *rapid.T) { + if m.q.Size() == 0 { + t.Skip("queue empty") + } + + i := m.q.Get() + if i != m.state[0] { + t.Fatalf("got invalid value: %v vs expected %v", i, m.state[0]) + } + m.state = m.state[1:] +} + +// Put is a conditional action which adds an items to the queue. +func (m *Machine) SetForeignKey(t *rapid.T) { + if m.q.Size() == m.n { + t.Skip("queue full") + } + + i := rapid.Int().Draw(t, "i") + m.q.Put(i) + m.state = append(m.state, i) +} + +// Check runs after every action and verifies that all required invariants hold. +func (m *Machine) Check(t *rapid.T) { + if 32 == 42 { + t.Fatalf("error msg") + } +} + +// Rename to TestQueue(t *testing.T) to make an actual (failing) test. +func ExampleRun_keydelegation() { + var t *testing.T + rapid.Check(t, rapid.Run[*Machine]()) +} diff --git a/x/ccv/provider/keeper/prototyping/model/.gitignore b/x/ccv/provider/keydelegation/prototyping/model/.gitignore similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/.gitignore rename to x/ccv/provider/keydelegation/prototyping/model/.gitignore diff --git a/x/ccv/provider/keeper/prototyping/model/library/Apalache.tla b/x/ccv/provider/keydelegation/prototyping/model/library/Apalache.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/Apalache.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/Apalache.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/Bags.tla b/x/ccv/provider/keydelegation/prototyping/model/library/Bags.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/Bags.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/Bags.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems.tla b/x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems_proofs.tla b/x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems_proofs.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/FiniteSetTheorems_proofs.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems_proofs.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/FiniteSets.tla b/x/ccv/provider/keydelegation/prototyping/model/library/FiniteSets.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/FiniteSets.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/FiniteSets.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems.tla b/x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems_proofs.tla b/x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems_proofs.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/FunctionTheorems_proofs.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems_proofs.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/Functions.tla b/x/ccv/provider/keydelegation/prototyping/model/library/Functions.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/Functions.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/Functions.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/JectionThm.tla b/x/ccv/provider/keydelegation/prototyping/model/library/JectionThm.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/JectionThm.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/JectionThm.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/Jections.tla b/x/ccv/provider/keydelegation/prototyping/model/library/Jections.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/Jections.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/Jections.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction.tla b/x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction_proofs.tla b/x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction_proofs.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/NaturalsInduction_proofs.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction_proofs.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/RealTime.tla b/x/ccv/provider/keydelegation/prototyping/model/library/RealTime.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/RealTime.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/RealTime.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems.tla b/x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems_proofs.tla b/x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems_proofs.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/SequenceTheorems_proofs.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems_proofs.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/TLAPS.tla b/x/ccv/provider/keydelegation/prototyping/model/library/TLAPS.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/TLAPS.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/TLAPS.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction.tla b/x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction_proofs.tla b/x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction_proofs.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/WellFoundedInduction_proofs.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction_proofs.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/ref/CCV.tla b/x/ccv/provider/keydelegation/prototyping/model/library/ref/CCV.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/ref/CCV.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/ref/CCV.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/ref/MC_CCV.tla b/x/ccv/provider/keydelegation/prototyping/model/library/ref/MC_CCV.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/ref/MC_CCV.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/ref/MC_CCV.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/ref/typedefs.tla b/x/ccv/provider/keydelegation/prototyping/model/library/ref/typedefs.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/ref/typedefs.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/ref/typedefs.tla diff --git a/x/ccv/provider/keeper/prototyping/model/library/tlcFolds.tla b/x/ccv/provider/keydelegation/prototyping/model/library/tlcFolds.tla similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/library/tlcFolds.tla rename to x/ccv/provider/keydelegation/prototyping/model/library/tlcFolds.tla diff --git a/x/ccv/provider/keeper/prototyping/model/main.cfg b/x/ccv/provider/keydelegation/prototyping/model/main.cfg similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/main.cfg rename to x/ccv/provider/keydelegation/prototyping/model/main.cfg diff --git a/x/ccv/provider/keeper/prototyping/model/main.tla b/x/ccv/provider/keydelegation/prototyping/model/main.tla similarity index 85% rename from x/ccv/provider/keeper/prototyping/model/main.tla rename to x/ccv/provider/keydelegation/prototyping/model/main.tla index d64db376d6..bc36eb0cff 100644 --- a/x/ccv/provider/keeper/prototyping/model/main.tla +++ b/x/ccv/provider/keydelegation/prototyping/model/main.tla @@ -10,6 +10,15 @@ EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache @typeAlias: fk = Str; @typeAlias: mapping = $lk -> $fk; @typeAlias: power = $lk -> Int; + + @typeAlias: state = { + Action : $action, + Mapping : $mapping, + Power : $power, + TP : Int, + TC : Int, + TM : Int, + }; *) @@ -39,7 +48,6 @@ VARIABLES \* @type: Int; TM - CInit == /\ LKS = {"lk0", "lk1", "lk2"} /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} @@ -48,7 +56,7 @@ CInit == Init == \E m \in MAPPINGS : - /\ \A a, b \in DOMAIN m : a = b \/ m[a] # m[b] + /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b /\ Action = [kind |-> "none"] /\ Mapping = m /\ Power \in POWERS @@ -58,7 +66,7 @@ Init == EndBlock == \E m \in MAPPINGS, p \in POWERS : - /\ \A a, b \in DOMAIN m : a = b \/ m[a] # m[b] + /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b /\ UNCHANGED Action /\ Mapping' = m /\ Power' = p diff --git a/x/ccv/provider/keeper/prototyping/model/traceUtil.py b/x/ccv/provider/keydelegation/prototyping/model/traceUtil.py similarity index 100% rename from x/ccv/provider/keeper/prototyping/model/traceUtil.py rename to x/ccv/provider/keydelegation/prototyping/model/traceUtil.py diff --git a/x/ccv/provider/keeper/prototyping/prototype.py b/x/ccv/provider/keydelegation/prototyping/prototype.py similarity index 100% rename from x/ccv/provider/keeper/prototyping/prototype.py rename to x/ccv/provider/keydelegation/prototyping/prototype.py From c4ca116823fa4cc63fd3fa01f213593d6bb4309e Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 18:30:44 +0100 Subject: [PATCH 013/127] mv, simplify model --- .../keydelegation/keydelegation_test.go | 57 - .../prototyping/model/traceUtil.py | 16 - .../{keeper => keyguard}/example_test.go | 2 +- .../keydelegation.go => keyguard/keyguard.go} | 33 +- x/ccv/provider/keyguard/keyguard_test.go | 18 + .../prototyping/prototype.py | 0 .../prototyping/tla}/.gitignore | 0 .../prototyping/tla}/library/Apalache.tla | 0 .../prototyping/tla}/library/Bags.tla | 0 .../tla}/library/FiniteSetTheorems.tla | 0 .../tla}/library/FiniteSetTheorems_proofs.tla | 0 .../prototyping/tla}/library/FiniteSets.tla | 0 .../tla}/library/FunctionTheorems.tla | 0 .../tla}/library/FunctionTheorems_proofs.tla | 0 .../prototyping/tla}/library/Functions.tla | 0 .../prototyping/tla}/library/JectionThm.tla | 0 .../prototyping/tla}/library/Jections.tla | 0 .../tla}/library/NaturalsInduction.tla | 0 .../tla}/library/NaturalsInduction_proofs.tla | 0 .../prototyping/tla}/library/RealTime.tla | 0 .../tla}/library/SequenceTheorems.tla | 0 .../tla}/library/SequenceTheorems_proofs.tla | 0 .../prototyping/tla}/library/TLAPS.tla | 0 .../tla}/library/WellFoundedInduction.tla | 0 .../library/WellFoundedInduction_proofs.tla | 0 .../prototyping/tla}/library/ref/CCV.tla | 0 .../prototyping/tla}/library/ref/MC_CCV.tla | 0 .../prototyping/tla}/library/ref/typedefs.tla | 0 .../prototyping/tla}/library/tlcFolds.tla | 0 .../prototyping/tla}/main.cfg | 0 .../prototyping/tla}/main.tla | 44 +- .../keyguard/prototyping/ts/.eslintignore | 1 + .../keyguard/prototyping/ts/.eslintrc.json | 27 + .../keyguard/prototyping/ts/.gitignore | 31 + .../keyguard/prototyping/ts/.prettierrc | 17 + .../prototyping/ts/__tests__/gen.test.ts | 14 + .../prototyping/ts/__tests__/tsconfig.json | 30 + .../keyguard/prototyping/ts/jest.config.js | 24 + .../keyguard/prototyping/ts/package.json | 50 + .../keyguard/prototyping/ts/src/main.ts | 23 + .../keyguard/prototyping/ts/tsconfig.json | 28 + .../prototyping/ts/tsconfig.release.json | 8 + .../keyguard/prototyping/ts/yarn.lock | 2862 +++++++++++++++++ 43 files changed, 3164 insertions(+), 121 deletions(-) delete mode 100644 x/ccv/provider/keydelegation/keydelegation_test.go delete mode 100644 x/ccv/provider/keydelegation/prototyping/model/traceUtil.py rename x/ccv/provider/{keeper => keyguard}/example_test.go (98%) rename x/ccv/provider/{keydelegation/keydelegation.go => keyguard/keyguard.go} (73%) create mode 100644 x/ccv/provider/keyguard/keyguard_test.go rename x/ccv/provider/{keydelegation => keyguard}/prototyping/prototype.py (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/.gitignore (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/Apalache.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/Bags.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/FiniteSetTheorems.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/FiniteSetTheorems_proofs.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/FiniteSets.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/FunctionTheorems.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/FunctionTheorems_proofs.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/Functions.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/JectionThm.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/Jections.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/NaturalsInduction.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/NaturalsInduction_proofs.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/RealTime.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/SequenceTheorems.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/SequenceTheorems_proofs.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/TLAPS.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/WellFoundedInduction.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/WellFoundedInduction_proofs.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/ref/CCV.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/ref/MC_CCV.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/ref/typedefs.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/library/tlcFolds.tla (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/main.cfg (100%) rename x/ccv/provider/{keydelegation/prototyping/model => keyguard/prototyping/tla}/main.tla (61%) create mode 100644 x/ccv/provider/keyguard/prototyping/ts/.eslintignore create mode 100644 x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json create mode 100644 x/ccv/provider/keyguard/prototyping/ts/.gitignore create mode 100644 x/ccv/provider/keyguard/prototyping/ts/.prettierrc create mode 100644 x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts create mode 100644 x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json create mode 100644 x/ccv/provider/keyguard/prototyping/ts/jest.config.js create mode 100644 x/ccv/provider/keyguard/prototyping/ts/package.json create mode 100644 x/ccv/provider/keyguard/prototyping/ts/src/main.ts create mode 100644 x/ccv/provider/keyguard/prototyping/ts/tsconfig.json create mode 100644 x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json create mode 100644 x/ccv/provider/keyguard/prototyping/ts/yarn.lock diff --git a/x/ccv/provider/keydelegation/keydelegation_test.go b/x/ccv/provider/keydelegation/keydelegation_test.go deleted file mode 100644 index 9bf4293ac6..0000000000 --- a/x/ccv/provider/keydelegation/keydelegation_test.go +++ /dev/null @@ -1,57 +0,0 @@ -package keydelegation_test - -import ( - "testing" - - keydelegation "github.com/cosmos/interchain-security/x/ccv/provider/keydelegation" - "pgregory.net/rapid" -) - -// Machine is a description of a rapid state machine for testing Queue -type Machine struct { - kd keydelegation.KeyDelegation // queue being tested - state []int // model of the queue -} - -// Init is an action for initializing a queueMachine instance. -func (m *Machine) Init(t *rapid.T) { - m.kd = keydelegation.MakeKeyDelegation() - m.state = []int{} -} - -// Get is a conditional action which removes an item from the queue. -func (m *Machine) Get(t *rapid.T) { - if m.q.Size() == 0 { - t.Skip("queue empty") - } - - i := m.q.Get() - if i != m.state[0] { - t.Fatalf("got invalid value: %v vs expected %v", i, m.state[0]) - } - m.state = m.state[1:] -} - -// Put is a conditional action which adds an items to the queue. -func (m *Machine) SetForeignKey(t *rapid.T) { - if m.q.Size() == m.n { - t.Skip("queue full") - } - - i := rapid.Int().Draw(t, "i") - m.q.Put(i) - m.state = append(m.state, i) -} - -// Check runs after every action and verifies that all required invariants hold. -func (m *Machine) Check(t *rapid.T) { - if 32 == 42 { - t.Fatalf("error msg") - } -} - -// Rename to TestQueue(t *testing.T) to make an actual (failing) test. -func ExampleRun_keydelegation() { - var t *testing.T - rapid.Check(t, rapid.Run[*Machine]()) -} diff --git a/x/ccv/provider/keydelegation/prototyping/model/traceUtil.py b/x/ccv/provider/keydelegation/prototyping/model/traceUtil.py deleted file mode 100644 index 26f3a345d4..0000000000 --- a/x/ccv/provider/keydelegation/prototyping/model/traceUtil.py +++ /dev/null @@ -1,16 +0,0 @@ -import json - -PREFIX_DRIVER = "/Users/danwt/Documents/work/interchain-security/tests/difference/consumerStuttering/driver/" -PREFIX_TRACE_OUPUTS = "/Users/danwt/Documents/work/interchain-security/tests/difference/consumerStuttering/model/_apalache-out/main.tla/" -DIR = "2022-09-06T17-55-50_14502121096210630182" -js = [] -for i in range(10): - fn = f"{PREFIX_TRACE_OUPUTS}{DIR}/example{i+1}.itf.json" - with open(fn, 'r') as fd: - content = fd.read() - j = json.loads(content) - js.append(j) - -fn = f"{PREFIX_DRIVER}traces.json" -with open(fn, 'w') as fd: - fd.write(json.dumps(js, indent=2)) diff --git a/x/ccv/provider/keeper/example_test.go b/x/ccv/provider/keyguard/example_test.go similarity index 98% rename from x/ccv/provider/keeper/example_test.go rename to x/ccv/provider/keyguard/example_test.go index 6c4cb53fe4..bd2df572b4 100644 --- a/x/ccv/provider/keeper/example_test.go +++ b/x/ccv/provider/keyguard/example_test.go @@ -1,4 +1,4 @@ -package keeper_test +package keyguard_test import ( "testing" diff --git a/x/ccv/provider/keydelegation/keydelegation.go b/x/ccv/provider/keyguard/keyguard.go similarity index 73% rename from x/ccv/provider/keydelegation/keydelegation.go rename to x/ccv/provider/keyguard/keyguard.go index 5099f325ea..7468204df3 100644 --- a/x/ccv/provider/keydelegation/keydelegation.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -1,4 +1,4 @@ -package keydelegation +package keyguard import "errors" @@ -11,18 +11,25 @@ type update struct { power int } -type KeyDelegation struct { - // TODO: how to GC this? +type KeyGuard struct { + // A new key is added when a relevant update is returned by ComputeUpdates + // the key is deleted at earliest after sending an update corresponding + // to a call to staking::DeleteValidator localKeyToLastUpdate map[LK]update - // TODO: how to GC this? - localKeyToCurrentForeignKey map[LK]FK - foreignKeyToLocalKey map[FK]LK - foreignKeyToVscidWhenLastSent map[FK]VSCID + // A new key is added on staking::CreateValidator + // the key is deleted at earliest after sending an update corresponding + // to a call to staking::DeleteValidator + localKeyToCurrentForeignKey map[LK]FK + // Prunable state + foreignKeyToLocalKey map[FK]LK + // Prunable state + foreignKeyToVscidWhenLastSent map[FK]VSCID + // Ephemeral state: will be cleared after each call to ComputeUpdates localKeysForWhichUpdateMustBeSent []LK } -func MakeKeyDelegation() KeyDelegation { - return KeyDelegation{ +func MakeKeyGuard() KeyGuard { + return KeyGuard{ localKeyToLastUpdate: map[LK]update{}, localKeyToCurrentForeignKey: map[LK]FK{}, foreignKeyToLocalKey: map[FK]LK{}, @@ -31,7 +38,7 @@ func MakeKeyDelegation() KeyDelegation { } } -func (m *KeyDelegation) SetForeignKey(lk LK, fk FK) { +func (m *KeyGuard) SetForeignKey(lk LK, fk FK) { if currFk, ok := m.localKeyToCurrentForeignKey[lk]; ok { if currFk == fk { return @@ -47,7 +54,7 @@ func (m *KeyDelegation) SetForeignKey(lk LK, fk FK) { } } -func (m *KeyDelegation) GetLocalKey(fk FK) (LK, error) { +func (m *KeyGuard) GetLocalKey(fk FK) (LK, error) { if lk, ok := m.foreignKeyToLocalKey[fk]; ok { return lk, nil } else { @@ -55,7 +62,7 @@ func (m *KeyDelegation) GetLocalKey(fk FK) (LK, error) { } } -func (m *KeyDelegation) Prune(mostRecentlyMaturedVscid VSCID) { +func (m *KeyGuard) Prune(mostRecentlyMaturedVscid VSCID) { toRemove := []FK{} for fk, vscid := range m.foreignKeyToVscidWhenLastSent { if vscid <= mostRecentlyMaturedVscid { @@ -68,7 +75,7 @@ func (m *KeyDelegation) Prune(mostRecentlyMaturedVscid VSCID) { } } -func (m *KeyDelegation) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { +func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { foreignUpdates = []update{} // Create any updates for validators whose power did not change diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go new file mode 100644 index 0000000000..df0a5abc30 --- /dev/null +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -0,0 +1,18 @@ +package keyguard_test + +import ( + "testing" +) + +type TraceState struct { + TP int + TC int + TM int +} + +type DriverState struct { +} + +func TestKeyDelegation(t *testing.T) { + +} diff --git a/x/ccv/provider/keydelegation/prototyping/prototype.py b/x/ccv/provider/keyguard/prototyping/prototype.py similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/prototype.py rename to x/ccv/provider/keyguard/prototyping/prototype.py diff --git a/x/ccv/provider/keydelegation/prototyping/model/.gitignore b/x/ccv/provider/keyguard/prototyping/tla/.gitignore similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/.gitignore rename to x/ccv/provider/keyguard/prototyping/tla/.gitignore diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/Apalache.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Apalache.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/Apalache.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/Apalache.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/Bags.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Bags.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/Bags.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/Bags.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems_proofs.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/FiniteSetTheorems_proofs.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems_proofs.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/FiniteSets.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSets.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/FiniteSets.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/FiniteSets.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems_proofs.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/FunctionTheorems_proofs.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems_proofs.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/Functions.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Functions.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/Functions.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/Functions.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/JectionThm.tla b/x/ccv/provider/keyguard/prototyping/tla/library/JectionThm.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/JectionThm.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/JectionThm.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/Jections.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Jections.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/Jections.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/Jections.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction.tla b/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction_proofs.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/NaturalsInduction_proofs.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction_proofs.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/RealTime.tla b/x/ccv/provider/keyguard/prototyping/tla/library/RealTime.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/RealTime.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/RealTime.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems.tla b/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems_proofs.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/SequenceTheorems_proofs.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems_proofs.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/TLAPS.tla b/x/ccv/provider/keyguard/prototyping/tla/library/TLAPS.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/TLAPS.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/TLAPS.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction.tla b/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction_proofs.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/WellFoundedInduction_proofs.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction_proofs.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/ref/CCV.tla b/x/ccv/provider/keyguard/prototyping/tla/library/ref/CCV.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/ref/CCV.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/ref/CCV.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/ref/MC_CCV.tla b/x/ccv/provider/keyguard/prototyping/tla/library/ref/MC_CCV.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/ref/MC_CCV.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/ref/MC_CCV.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/ref/typedefs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/ref/typedefs.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/ref/typedefs.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/ref/typedefs.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/library/tlcFolds.tla b/x/ccv/provider/keyguard/prototyping/tla/library/tlcFolds.tla similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/library/tlcFolds.tla rename to x/ccv/provider/keyguard/prototyping/tla/library/tlcFolds.tla diff --git a/x/ccv/provider/keydelegation/prototyping/model/main.cfg b/x/ccv/provider/keyguard/prototyping/tla/main.cfg similarity index 100% rename from x/ccv/provider/keydelegation/prototyping/model/main.cfg rename to x/ccv/provider/keyguard/prototyping/tla/main.cfg diff --git a/x/ccv/provider/keydelegation/prototyping/model/main.tla b/x/ccv/provider/keyguard/prototyping/tla/main.tla similarity index 61% rename from x/ccv/provider/keydelegation/prototyping/model/main.tla rename to x/ccv/provider/keyguard/prototyping/tla/main.tla index bc36eb0cff..07ad0374bd 100644 --- a/x/ccv/provider/keydelegation/prototyping/model/main.tla +++ b/x/ccv/provider/keyguard/prototyping/tla/main.tla @@ -5,20 +5,10 @@ EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache (* - @typeAlias: action = { kind : Str }; @typeAlias: lk = Str; @typeAlias: fk = Str; @typeAlias: mapping = $lk -> $fk; - @typeAlias: power = $lk -> Int; - - @typeAlias: state = { - Action : $action, - Mapping : $mapping, - Power : $power, - TP : Int, - TC : Int, - TM : Int, - }; + @typeAlias: updates = $lk -> Int; *) @@ -28,19 +18,13 @@ CONSTANTS \* @type: Set($lk); LKS, \* @type: Set($fk); - FKS, - \* @type: Set($mapping); - MAPPINGS, - \* @type: Set($power); - POWERS + FKS VARIABLES - \* @type: $action; - Action, \* @type: $mapping; Mapping, - \* @type: $power; - Power, + \* @type: $updates; + Updates, \* @type: Int; TP, \* @type: Int; @@ -51,43 +35,37 @@ VARIABLES CInit == /\ LKS = {"lk0", "lk1", "lk2"} /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} - /\ MAPPINGS = [LKS -> FKS] - /\ POWERS = [ LKS -> 0..2 ] Init == - \E m \in MAPPINGS : + \E m \in [LKS -> FKS], ss \in SUBSET LKS: /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b - /\ Action = [kind |-> "none"] /\ Mapping = m - /\ Power \in POWERS + /\ Updates \in [ss -> 0..2] /\ TP = 1 /\ TC = 0 /\ TM = 0 EndBlock == - \E m \in MAPPINGS, p \in POWERS : + \E m \in [LKS -> FKS], ss \in SUBSET LKS: /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b - /\ UNCHANGED Action /\ Mapping' = m - /\ Power' = p + /\ Updates' \in [ss -> 0..2] /\ TP' = TP + 1 /\ UNCHANGED TC /\ UNCHANGED TM UpdateConsumer == \E t \in (TC+1)..TP : - /\ UNCHANGED Action /\ UNCHANGED Mapping - /\ UNCHANGED Power + /\ UNCHANGED Updates /\ UNCHANGED TP /\ TC' = t /\ UNCHANGED TM ReceiveMaturities == \E t \in (TM+1)..TC : - /\ UNCHANGED Action /\ UNCHANGED Mapping - /\ UNCHANGED Power + /\ UNCHANGED Updates /\ UNCHANGED TP /\ UNCHANGED TC /\ TM' = t @@ -97,6 +75,4 @@ Next == \/ UpdateConsumer \/ ReceiveMaturities -Inv == TRUE - ==== diff --git a/x/ccv/provider/keyguard/prototyping/ts/.eslintignore b/x/ccv/provider/keyguard/prototyping/ts/.eslintignore new file mode 100644 index 0000000000..fc40c5a94d --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/.eslintignore @@ -0,0 +1 @@ +/**/*.js diff --git a/x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json b/x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json new file mode 100644 index 0000000000..239cad64e9 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json @@ -0,0 +1,27 @@ +{ + "env": { + "browser": false, + "es6": true, + "node": true + }, + "parser": "@typescript-eslint/parser", + "parserOptions": { + "project": "tsconfig.json", + "sourceType": "module", + "ecmaVersion": 2020 + }, + "plugins": [ + "@typescript-eslint", + "jest" + ], + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:@typescript-eslint/recommended", + "plugin:jest/recommended", + "prettier" + ], + "rules": { + "@typescript-eslint/no-var-requires": 0 + } +} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/.gitignore b/x/ccv/provider/keyguard/prototyping/ts/.gitignore new file mode 100644 index 0000000000..48c9c9e654 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/.gitignore @@ -0,0 +1,31 @@ +# Logs +logs +*.log +npm-debug.log* + +# Dependencies +node_modules/ + +# Coverage +coverage + +# Transpiled files +build/ + +# VS Code +.vscode +!.vscode/tasks.js + +# JetBrains IDEs +.idea/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Misc +.DS_Store + +traces/ \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/.prettierrc b/x/ccv/provider/keyguard/prototyping/ts/.prettierrc new file mode 100644 index 0000000000..f17b0543ee --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/.prettierrc @@ -0,0 +1,17 @@ +{ + "singleQuote": true, + "trailingComma": "all", + "overrides": [ + { + "files": [ + "*.ts", + "*.mts" + ], + "options": { + "parser": "typescript" + } + } + ], + "tabWidth": 2, + "printWidth": 74 +} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts b/x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts new file mode 100644 index 0000000000..5bc75b7fdf --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts @@ -0,0 +1,14 @@ +import { gen } from '../src/main.js'; + +/** + * This test is useful to check how much coverage + * trace generation actually gets over the model. + * + * yarn jest --collect-coverage + */ +describe('check properties', () => { + it('_', () => { + gen(120, true); + expect(true).toBeTruthy(); // satisfies linter + }); +}); diff --git a/x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json b/x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json new file mode 100644 index 0000000000..734ac11619 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json @@ -0,0 +1,30 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "node16", + "lib": [ + "ES2022" + ], + "moduleResolution": "Node16", + "rootDir": "..", + "outDir": "build", + "allowSyntheticDefaultImports": true, + "importHelpers": true, + "alwaysStrict": true, + "sourceMap": true, + "forceConsistentCasingInFileNames": true, + "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitAny": true, + "noImplicitThis": true, + "strictNullChecks": true, + "allowJs": true, + "esModuleInterop": true + }, + "include": [ + "src/**/*", + "__tests__/**/*" + ] +} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/jest.config.js b/x/ccv/provider/keyguard/prototyping/ts/jest.config.js new file mode 100644 index 0000000000..2d2c1ef1d3 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/jest.config.js @@ -0,0 +1,24 @@ +export default { + testEnvironment: 'node', + preset: 'ts-jest/presets/js-with-ts-esm', + globals: { + 'ts-jest': { + useESM: true, + tsconfig: '/__tests__/tsconfig.json', + }, + }, + transformIgnorePatterns: [ + "node_modules/(?!(time-span|convert-hrtime))", + ], + moduleNameMapper: { + '^(\\.{1,2}/.*)\\.(m)?js$': '$1', + }, + testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.(m)?ts$', + coverageDirectory: 'coverage', + collectCoverageFrom: [ + 'src/**/*.ts', + 'src/**/*.mts', + '!src/**/*.d.ts', + '!src/**/*.d.mts', + ], +}; diff --git a/x/ccv/provider/keyguard/prototyping/ts/package.json b/x/ccv/provider/keyguard/prototyping/ts/package.json new file mode 100644 index 0000000000..cb129e4ce2 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/package.json @@ -0,0 +1,50 @@ +{ + "name": "diff-tests-core", + "version": "3.0.1", + "description": "A model for generating difference tests for Interchain Security core protocol components.", + "type": "module", + "engines": { + "node": ">= 16.13 <17" + }, + "devDependencies": { + "@types/jest": "^28.1.4", + "@types/node": "~16", + "@typescript-eslint/eslint-plugin": "~5.26", + "@typescript-eslint/parser": "~5.26", + "eslint": "~8.16", + "eslint-config-prettier": "~8.5", + "eslint-plugin-jest": "~26.2", + "jest": "^28.1.1", + "prettier": "~2.6", + "rimraf": "~3.0", + "source-map-support": "^0.5.21", + "ts-jest": "^28.0.5", + "tsutils": "~3.21", + "typescript": "~4.7" + }, + "scripts": { + "start": "node build/src/main.js", + "clean": "rimraf coverage build tmp", + "prebuild": "npm run lint", + "build": "tsc -p tsconfig.json", + "build:watch": "tsc -w -p tsconfig.json", + "build:release": "npm run clean && tsc -p tsconfig.release.json", + "lint": "eslint . --ext .ts --ext .mts", + "test": "jest --coverage", + "prettier": "prettier --config .prettierrc --write .", + "test:watch": "jest --watch" + }, + "author": "", + "license": "Apache-2.0", + "dependencies": { + "@types/clone-deep": "^4.0.1", + "@types/underscore": "^1.11.4", + "clone-deep": "^4.0.1", + "time-span": "^5.1.0", + "tslib": "~2.4", + "underscore": "^1.13.4" + }, + "volta": { + "node": "16.13.0" + } +} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/src/main.ts b/x/ccv/provider/keyguard/prototyping/ts/src/main.ts new file mode 100644 index 0000000000..4fb0c2d54e --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/src/main.ts @@ -0,0 +1,23 @@ +import * as fs from 'fs'; +import _ from 'underscore'; +import timeSpan from 'time-span'; +import cloneDeep from 'clone-deep'; + +class Model { + constructor() {} + + endBlock = () => { + /** + * EndBlock can cause a complete change in the validator set + * and the powers of the validators + * + */ + }; +} + +function main() { + const m = new Model(); +} + +console.log(`Running main`); +main(); diff --git a/x/ccv/provider/keyguard/prototyping/ts/tsconfig.json b/x/ccv/provider/keyguard/prototyping/ts/tsconfig.json new file mode 100644 index 0000000000..2a6e221b27 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/tsconfig.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "node16", + "lib": [ + "ES2022" + ], + "moduleResolution": "Node16", + "rootDir": ".", + "outDir": "build", + "allowSyntheticDefaultImports": true, + "importHelpers": true, + "alwaysStrict": true, + "sourceMap": true, + "forceConsistentCasingInFileNames": true, + "noFallthroughCasesInSwitch": true, + "noImplicitReturns": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitAny": true, + "noImplicitThis": true, + "strictNullChecks": true, + }, + "include": [ + "src/**/*", + "__tests__/**/*" + ] +} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json b/x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json new file mode 100644 index 0000000000..f08638c215 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json @@ -0,0 +1,8 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "sourceMap": false, + "removeComments": true + }, + "include": ["src/**/*"] +} diff --git a/x/ccv/provider/keyguard/prototyping/ts/yarn.lock b/x/ccv/provider/keyguard/prototyping/ts/yarn.lock new file mode 100644 index 0000000000..77abc940b0 --- /dev/null +++ b/x/ccv/provider/keyguard/prototyping/ts/yarn.lock @@ -0,0 +1,2862 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.6.tgz#8b37d24e88e8e21c499d4328db80577d8882fa53" + integrity sha512-tzulrgDT0QD6U7BJ4TKVk2SDDg7wlP39P9yAx1RfLy7vP/7rsDRlWVfbWxElslu56+r7QOhB2NSDsabYYruoZQ== + +"@babel/core@^7.11.6", "@babel/core@^7.12.3": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.6.tgz#54a107a3c298aee3fe5e1947a6464b9b6faca03d" + integrity sha512-cQbWBpxcbbs/IUredIPkHiAGULLV8iwgNRMFzvbhEXISp4f3rUUXE5+TIw6KwUWUR3DwyI6gmBRnmAtYaWehwQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.6" + "@babel/helper-compilation-targets" "^7.18.6" + "@babel/helper-module-transforms" "^7.18.6" + "@babel/helpers" "^7.18.6" + "@babel/parser" "^7.18.6" + "@babel/template" "^7.18.6" + "@babel/traverse" "^7.18.6" + "@babel/types" "^7.18.6" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.18.6", "@babel/generator@^7.7.2": + version "7.18.7" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.7.tgz#2aa78da3c05aadfc82dbac16c99552fc802284bd" + integrity sha512-shck+7VLlY72a2w9c3zYWuE1pwOKEiQHV7GTUbSnhyl5eu3i04t30tBY82ZRWrDfo3gkakCFtevExnxbkf2a3A== + dependencies: + "@babel/types" "^7.18.7" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-compilation-targets@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.6.tgz#18d35bfb9f83b1293c22c55b3d576c1315b6ed96" + integrity sha512-vFjbfhNCzqdeAtZflUFrG5YIFqGTqsctrtkZ1D/NB0mDW9TwW3GmmUepYY4G9wCET5rY5ugz4OGTcLd614IzQg== + dependencies: + "@babel/compat-data" "^7.18.6" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.20.2" + semver "^6.3.0" + +"@babel/helper-environment-visitor@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.6.tgz#b7eee2b5b9d70602e59d1a6cad7dd24de7ca6cd7" + integrity sha512-8n6gSfn2baOY+qlp+VSzsosjCVGFqWKmDF0cCWOybh52Dw3SEyoWR1KrhMJASjLwIEkkAufZ0xvr+SxLHSpy2Q== + +"@babel/helper-function-name@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.18.6.tgz#8334fecb0afba66e6d87a7e8c6bb7fed79926b83" + integrity sha512-0mWMxV1aC97dhjCah5U5Ua7668r5ZmSC2DLfH2EZnf9c3/dHZKiFa5pRLMH5tjSl471tY6496ZWk/kjNONBxhw== + dependencies: + "@babel/template" "^7.18.6" + "@babel/types" "^7.18.6" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.6.tgz#57e3ca669e273d55c3cda55e6ebf552f37f483c8" + integrity sha512-L//phhB4al5uucwzlimruukHB3jRd5JGClwRMD/ROrVjXfLqovYnvQrK/JK36WYyVwGGO7OD3kMyVTjx+WVPhw== + dependencies: + "@babel/helper-environment-visitor" "^7.18.6" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.6" + "@babel/traverse" "^7.18.6" + "@babel/types" "^7.18.6" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.8.0": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.6.tgz#9448974dd4fb1d80fefe72e8a0af37809cd30d6d" + integrity sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg== + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-validator-identifier@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz#9c97e30d31b2b8c72a1d08984f2ca9b574d7a076" + integrity sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helpers@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.6.tgz#4c966140eaa1fcaa3d5a8c09d7db61077d4debfd" + integrity sha512-vzSiiqbQOghPngUYt/zWGvK3LAsPhz55vc9XNN0xAl2gV4ieShI2OQli5duxWHD+72PZPTKAcfcZDE1Cwc5zsQ== + dependencies: + "@babel/template" "^7.18.6" + "@babel/traverse" "^7.18.6" + "@babel/types" "^7.18.6" + +"@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.6.tgz#845338edecad65ebffef058d3be851f1d28a63bc" + integrity sha512-uQVSa9jJUe/G/304lXspfWVpKpK4euFLgGiMQFOCpM/bgcAdeoHwi/OQz23O9GK2osz26ZiXRRV9aV+Yl1O8tw== + +"@babel/plugin-syntax-async-generators@^7.8.4": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-bigint@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" + integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.8.3": + version "7.12.13" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" + integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== + dependencies: + "@babel/helper-plugin-utils" "^7.12.13" + +"@babel/plugin-syntax-import-meta@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" + integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.8.3": + version "7.14.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" + integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== + dependencies: + "@babel/helper-plugin-utils" "^7.14.5" + +"@babel/plugin-syntax-typescript@^7.7.2": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" + integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== + dependencies: + "@babel/helper-plugin-utils" "^7.18.6" + +"@babel/template@^7.18.6", "@babel/template@^7.3.3": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.6.tgz#1283f4993e00b929d6e2d3c72fdc9168a2977a31" + integrity sha512-JoDWzPe+wgBsTTgdnIma3iHNFC7YVJoPssVBDjiHfNlyt4YcunDtcDOUmfVDfCK5MfdsaIoX9PkijPhjH3nYUw== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.6" + "@babel/types" "^7.18.6" + +"@babel/traverse@^7.18.6", "@babel/traverse@^7.7.2": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.6.tgz#a228562d2f46e89258efa4ddd0416942e2fd671d" + integrity sha512-zS/OKyqmD7lslOtFqbscH6gMLFYOfG1YPqCKfAW5KrTeolKqvB8UelR49Fpr6y93kYkW2Ik00mT1LOGiAGvizw== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.18.6" + "@babel/helper-environment-visitor" "^7.18.6" + "@babel/helper-function-name" "^7.18.6" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.18.6" + "@babel/types" "^7.18.6" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.0.0", "@babel/types@^7.18.6", "@babel/types@^7.18.7", "@babel/types@^7.3.0", "@babel/types@^7.3.3": + version "7.18.7" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.7.tgz#a4a2c910c15040ea52cdd1ddb1614a65c8041726" + integrity sha512-QG3yxTcTIBoAcQmkCs+wAPYZhu7Dk9rXKacINfNbdJDNERTbLQbHGyVG8q/YGMPeCJRIhSY0+fTc5+xuh6WPSQ== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + to-fast-properties "^2.0.0" + +"@bcoe/v8-coverage@^0.2.3": + version "0.2.3" + resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" + integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== + +"@eslint/eslintrc@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.0.tgz#29f92c30bb3e771e4a2048c95fa6855392dfac4f" + integrity sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.3.2" + globals "^13.15.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.9.2": + version "0.9.5" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" + integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@istanbuljs/load-nyc-config@^1.0.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== + dependencies: + camelcase "^5.3.1" + find-up "^4.1.0" + get-package-type "^0.1.0" + js-yaml "^3.13.1" + resolve-from "^5.0.0" + +"@istanbuljs/schema@^0.1.2": + version "0.1.3" + resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== + +"@jest/console@^28.1.1": + version "28.1.1" + resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.1.tgz#305f8ca50b6e70413839f54c0e002b60a0f2fd7d" + integrity sha512-0RiUocPVFEm3WRMOStIHbRWllG6iW6E3/gUPnf4lkrVFyXIIDeCe+vlKeYyFOMhB2EPE6FLFCNADSOOQMaqvyA== + dependencies: + "@jest/types" "^28.1.1" + "@types/node" "*" + chalk "^4.0.0" + jest-message-util "^28.1.1" + jest-util "^28.1.1" + slash "^3.0.0" + +"@jest/core@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/core/-/core-28.1.2.tgz#eac519b9acbd154313854b8823a47b5c645f785a" + integrity sha512-Xo4E+Sb/nZODMGOPt2G3cMmCBqL4/W2Ijwr7/mrXlq4jdJwcFQ/9KrrJZT2adQRk2otVBXXOz1GRQ4Z5iOgvRQ== + dependencies: + "@jest/console" "^28.1.1" + "@jest/reporters" "^28.1.2" + "@jest/test-result" "^28.1.1" + "@jest/transform" "^28.1.2" + "@jest/types" "^28.1.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + ci-info "^3.2.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + jest-changed-files "^28.0.2" + jest-config "^28.1.2" + jest-haste-map "^28.1.1" + jest-message-util "^28.1.1" + jest-regex-util "^28.0.2" + jest-resolve "^28.1.1" + jest-resolve-dependencies "^28.1.2" + jest-runner "^28.1.2" + jest-runtime "^28.1.2" + jest-snapshot "^28.1.2" + jest-util "^28.1.1" + jest-validate "^28.1.1" + jest-watcher "^28.1.1" + micromatch "^4.0.4" + pretty-format "^28.1.1" + rimraf "^3.0.0" + slash "^3.0.0" + strip-ansi "^6.0.0" + +"@jest/environment@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-28.1.2.tgz#94a052c0c5f9f8c8e6d13ea6da78dbc5d7d9b85b" + integrity sha512-I0CR1RUMmOzd0tRpz10oUfaChBWs+/Hrvn5xYhMEF/ZqrDaaeHwS8yDBqEWCrEnkH2g+WE/6g90oBv3nKpcm8Q== + dependencies: + "@jest/fake-timers" "^28.1.2" + "@jest/types" "^28.1.1" + "@types/node" "*" + jest-mock "^28.1.1" + +"@jest/expect-utils@^28.1.1": + version "28.1.1" + resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-28.1.1.tgz#d84c346025b9f6f3886d02c48a6177e2b0360587" + integrity sha512-n/ghlvdhCdMI/hTcnn4qV57kQuV9OTsZzH1TTCVARANKhl6hXJqLKUkwX69ftMGpsbpt96SsDD8n8LD2d9+FRw== + dependencies: + jest-get-type "^28.0.2" + +"@jest/expect@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-28.1.2.tgz#0b25acedff46e1e1e5606285306c8a399c12534f" + integrity sha512-HBzyZBeFBiOelNbBKN0pilWbbrGvwDUwAqMC46NVJmWm8AVkuE58NbG1s7DR4cxFt4U5cVLxofAoHxgvC5MyOw== + dependencies: + expect "^28.1.1" + jest-snapshot "^28.1.2" + +"@jest/fake-timers@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-28.1.2.tgz#d49e8ee4e02ba85a6e844a52a5e7c59c23e3b76f" + integrity sha512-xSYEI7Y0D5FbZN2LsCUj/EKRR1zfQYmGuAUVh6xTqhx7V5JhjgMcK5Pa0iR6WIk0GXiHDe0Ke4A+yERKE9saqg== + dependencies: + "@jest/types" "^28.1.1" + "@sinonjs/fake-timers" "^9.1.2" + "@types/node" "*" + jest-message-util "^28.1.1" + jest-mock "^28.1.1" + jest-util "^28.1.1" + +"@jest/globals@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-28.1.2.tgz#92fab296e337c7309c25e4202fb724f62249d83f" + integrity sha512-cz0lkJVDOtDaYhvT3Fv2U1B6FtBnV+OpEyJCzTHM1fdoTsU4QNLAt/H4RkiwEUU+dL4g/MFsoTuHeT2pvbo4Hg== + dependencies: + "@jest/environment" "^28.1.2" + "@jest/expect" "^28.1.2" + "@jest/types" "^28.1.1" + +"@jest/reporters@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-28.1.2.tgz#0327be4ce4d0d9ae49e7908656f89669d0c2a260" + integrity sha512-/whGLhiwAqeCTmQEouSigUZJPVl7sW8V26EiboImL+UyXznnr1a03/YZ2BX8OlFw0n+Zlwu+EZAITZtaeRTxyA== + dependencies: + "@bcoe/v8-coverage" "^0.2.3" + "@jest/console" "^28.1.1" + "@jest/test-result" "^28.1.1" + "@jest/transform" "^28.1.2" + "@jest/types" "^28.1.1" + "@jridgewell/trace-mapping" "^0.3.13" + "@types/node" "*" + chalk "^4.0.0" + collect-v8-coverage "^1.0.0" + exit "^0.1.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + istanbul-lib-coverage "^3.0.0" + istanbul-lib-instrument "^5.1.0" + istanbul-lib-report "^3.0.0" + istanbul-lib-source-maps "^4.0.0" + istanbul-reports "^3.1.3" + jest-message-util "^28.1.1" + jest-util "^28.1.1" + jest-worker "^28.1.1" + slash "^3.0.0" + string-length "^4.0.1" + strip-ansi "^6.0.0" + terminal-link "^2.0.0" + v8-to-istanbul "^9.0.1" + +"@jest/schemas@^28.0.2": + version "28.0.2" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.0.2.tgz#08c30df6a8d07eafea0aef9fb222c5e26d72e613" + integrity sha512-YVDJZjd4izeTDkij00vHHAymNXQ6WWsdChFRK86qck6Jpr3DCL5W3Is3vslviRlP+bLuMYRLbdp98amMvqudhA== + dependencies: + "@sinclair/typebox" "^0.23.3" + +"@jest/source-map@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-28.1.2.tgz#7fe832b172b497d6663cdff6c13b0a920e139e24" + integrity sha512-cV8Lx3BeStJb8ipPHnqVw/IM2VCMWO3crWZzYodSIkxXnRcXJipCdx1JCK0K5MsJJouZQTH73mzf4vgxRaH9ww== + dependencies: + "@jridgewell/trace-mapping" "^0.3.13" + callsites "^3.0.0" + graceful-fs "^4.2.9" + +"@jest/test-result@^28.1.1": + version "28.1.1" + resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.1.tgz#c6f18d1bbb01aa88925dd687872a75f8414b317a" + integrity sha512-hPmkugBktqL6rRzwWAtp1JtYT4VHwv8OQ+9lE5Gymj6dHzubI/oJHMUpPOt8NrdVWSrz9S7bHjJUmv2ggFoUNQ== + dependencies: + "@jest/console" "^28.1.1" + "@jest/types" "^28.1.1" + "@types/istanbul-lib-coverage" "^2.0.0" + collect-v8-coverage "^1.0.0" + +"@jest/test-sequencer@^28.1.1": + version "28.1.1" + resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-28.1.1.tgz#f594ee2331df75000afe0d1ae3237630ecec732e" + integrity sha512-nuL+dNSVMcWB7OOtgb0EGH5AjO4UBCt68SLP08rwmC+iRhyuJWS9MtZ/MpipxFwKAlHFftbMsydXqWre8B0+XA== + dependencies: + "@jest/test-result" "^28.1.1" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.1" + slash "^3.0.0" + +"@jest/transform@^28.1.2": + version "28.1.2" + resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-28.1.2.tgz#b367962c53fd53821269bde050ce373e111327c1" + integrity sha512-3o+lKF6iweLeJFHBlMJysdaPbpoMmtbHEFsjzSv37HIq/wWt5ijTeO2Yf7MO5yyczCopD507cNwNLeX8Y/CuIg== + dependencies: + "@babel/core" "^7.11.6" + "@jest/types" "^28.1.1" + "@jridgewell/trace-mapping" "^0.3.13" + babel-plugin-istanbul "^6.1.1" + chalk "^4.0.0" + convert-source-map "^1.4.0" + fast-json-stable-stringify "^2.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.1" + jest-regex-util "^28.0.2" + jest-util "^28.1.1" + micromatch "^4.0.4" + pirates "^4.0.4" + slash "^3.0.0" + write-file-atomic "^4.0.1" + +"@jest/types@^28.1.1": + version "28.1.1" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.1.tgz#d059bbc80e6da6eda9f081f293299348bd78ee0b" + integrity sha512-vRXVqSg1VhDnB8bWcmvLzmg0Bt9CRKVgHPXqYwvWMX3TvAjeO+nRuK6+VdTKCtWOvYlmkF/HqNAL/z+N3B53Kw== + dependencies: + "@jest/schemas" "^28.0.2" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.0.8" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.8.tgz#687cc2bbf243f4e9a868ecf2262318e2658873a1" + integrity sha512-YK5G9LaddzGbcucK4c8h5tWFmMPBvRZ/uyWmN1/SbBdIvqGUdWGkJ5BAaccgs6XbzVLsqbPJrBSFwKv3kT9i7w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.13", "@jridgewell/trace-mapping@^0.3.9": + version "0.3.14" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" + integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@sinclair/typebox@^0.23.3": + version "0.23.5" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.23.5.tgz#93f7b9f4e3285a7a9ade7557d9a8d36809cbc47d" + integrity sha512-AFBVi/iT4g20DHoujvMH1aEDn8fGJh4xsRGCP6d8RpLPMqsNPvW01Jcn0QysXTsg++/xj25NmJsGyH9xug/wKg== + +"@sinonjs/commons@^1.7.0": + version "1.8.3" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" + integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== + dependencies: + type-detect "4.0.8" + +"@sinonjs/fake-timers@^9.1.2": + version "9.1.2" + resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" + integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== + dependencies: + "@sinonjs/commons" "^1.7.0" + +"@types/babel__core@^7.1.14": + version "7.1.19" + resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" + integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + "@types/babel__generator" "*" + "@types/babel__template" "*" + "@types/babel__traverse" "*" + +"@types/babel__generator@*": + version "7.6.4" + resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" + integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== + dependencies: + "@babel/types" "^7.0.0" + +"@types/babel__template@*": + version "7.4.1" + resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" + integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== + dependencies: + "@babel/parser" "^7.1.0" + "@babel/types" "^7.0.0" + +"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": + version "7.17.1" + resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.17.1.tgz#1a0e73e8c28c7e832656db372b779bfd2ef37314" + integrity sha512-kVzjari1s2YVi77D3w1yuvohV2idweYXMCDzqBiVNN63TcDWrIlTVOYpqVrvbbyOE/IyzBoTKF0fdnLPEORFxA== + dependencies: + "@babel/types" "^7.3.0" + +"@types/clone-deep@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@types/clone-deep/-/clone-deep-4.0.1.tgz#7c488443ab9f571cd343d774551b78e9264ea990" + integrity sha512-bdkCSkyVHsgl3Goe1y16T9k6JuQx7SiDREkq728QjKmTZkGJZuS8R3gGcnGzVuGBP0mssKrzM/GlMOQxtip9cg== + +"@types/graceful-fs@^4.1.3": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" + integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== + dependencies: + "@types/node" "*" + +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": + version "2.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" + integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== + +"@types/istanbul-lib-report@*": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" + integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" + integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/jest@^28.1.4": + version "28.1.4" + resolved "https://registry.yarnpkg.com/@types/jest/-/jest-28.1.4.tgz#a11ee6c8fd0b52c19c9c18138b78bbcc201dad5a" + integrity sha512-telv6G5N7zRJiLcI3Rs3o+ipZ28EnE+7EvF0pSrt2pZOMnAVI/f+6/LucDxOvcBcTeTL3JMF744BbVQAVBUQRA== + dependencies: + jest-matcher-utils "^28.0.0" + pretty-format "^28.0.0" + +"@types/json-schema@^7.0.9": + version "7.0.11" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" + integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== + +"@types/node@*": + version "18.0.0" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.0.tgz#67c7b724e1bcdd7a8821ce0d5ee184d3b4dd525a" + integrity sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA== + +"@types/node@~16": + version "16.11.42" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.42.tgz#d2a75c58e9b0902b82dc54bd4c13f8ef12bd1020" + integrity sha512-iwLrPOopPy6V3E+1yHTpJea3bdsNso0b0utLOJJwaa/PLzqBt3GZl3stMcakc/gr89SfcNk2ki3z7Gvue9hYGQ== + +"@types/prettier@^2.1.5": + version "2.6.3" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.3.tgz#68ada76827b0010d0db071f739314fa429943d0a" + integrity sha512-ymZk3LEC/fsut+/Q5qejp6R9O1rMxz3XaRHDV6kX8MrGAhOSPqVARbDi+EZvInBpw+BnCX3TD240byVkOfQsHg== + +"@types/stack-utils@^2.0.0": + version "2.0.1" + resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" + integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== + +"@types/underscore@^1.11.4": + version "1.11.4" + resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.11.4.tgz#62e393f8bc4bd8a06154d110c7d042a93751def3" + integrity sha512-uO4CD2ELOjw8tasUrAhvnn2W4A0ZECOvMjCivJr4gA9pGgjv+qxKWY9GLTMVEK8ej85BxQOocUyE7hImmSQYcg== + +"@types/yargs-parser@*": + version "21.0.0" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" + integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== + +"@types/yargs@^17.0.8": + version "17.0.10" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.10.tgz#591522fce85d8739bca7b8bb90d048e4478d186a" + integrity sha512-gmEaFwpj/7f/ROdtIlci1R1VYU1J4j95m8T+Tj3iBgiBFKg1foE/PSl93bBd5T9LDXNPo8UlNN6W0qwD8O5OaA== + dependencies: + "@types/yargs-parser" "*" + +"@typescript-eslint/eslint-plugin@~5.26": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.26.0.tgz#c1f98ccba9d345e38992975d3ca56ed6260643c2" + integrity sha512-oGCmo0PqnRZZndr+KwvvAUvD3kNE4AfyoGCwOZpoCncSh4MVD06JTE8XQa2u9u+NX5CsyZMBTEc2C72zx38eYA== + dependencies: + "@typescript-eslint/scope-manager" "5.26.0" + "@typescript-eslint/type-utils" "5.26.0" + "@typescript-eslint/utils" "5.26.0" + debug "^4.3.4" + functional-red-black-tree "^1.0.1" + ignore "^5.2.0" + regexpp "^3.2.0" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/parser@~5.26": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.26.0.tgz#a61b14205fe2ab7533deb4d35e604add9a4ceee2" + integrity sha512-n/IzU87ttzIdnAH5vQ4BBDnLPly7rC5VnjN3m0xBG82HK6rhRxnCb3w/GyWbNDghPd+NktJqB/wl6+YkzZ5T5Q== + dependencies: + "@typescript-eslint/scope-manager" "5.26.0" + "@typescript-eslint/types" "5.26.0" + "@typescript-eslint/typescript-estree" "5.26.0" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.26.0": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.26.0.tgz#44209c7f649d1a120f0717e0e82da856e9871339" + integrity sha512-gVzTJUESuTwiju/7NiTb4c5oqod8xt5GhMbExKsCTp6adU3mya6AGJ4Pl9xC7x2DX9UYFsjImC0mA62BCY22Iw== + dependencies: + "@typescript-eslint/types" "5.26.0" + "@typescript-eslint/visitor-keys" "5.26.0" + +"@typescript-eslint/scope-manager@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.30.0.tgz#bf585ee801ab4ad84db2f840174e171a6bb002c7" + integrity sha512-3TZxvlQcK5fhTBw5solQucWSJvonXf5yua5nx8OqK94hxdrT7/6W3/CS42MLd/f1BmlmmbGEgQcTHHCktUX5bQ== + dependencies: + "@typescript-eslint/types" "5.30.0" + "@typescript-eslint/visitor-keys" "5.30.0" + +"@typescript-eslint/type-utils@5.26.0": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.26.0.tgz#937dee97702361744a3815c58991acf078230013" + integrity sha512-7ccbUVWGLmcRDSA1+ADkDBl5fP87EJt0fnijsMFTVHXKGduYMgienC/i3QwoVhDADUAPoytgjbZbCOMj4TY55A== + dependencies: + "@typescript-eslint/utils" "5.26.0" + debug "^4.3.4" + tsutils "^3.21.0" + +"@typescript-eslint/types@5.26.0": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.26.0.tgz#cb204bb154d3c103d9cc4d225f311b08219469f3" + integrity sha512-8794JZFE1RN4XaExLWLI2oSXsVImNkl79PzTOOWt9h0UHROwJedNOD2IJyfL0NbddFllcktGIO2aOu10avQQyA== + +"@typescript-eslint/types@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.30.0.tgz#db7d81d585a3da3801432a9c1d2fafbff125e110" + integrity sha512-vfqcBrsRNWw/LBXyncMF/KrUTYYzzygCSsVqlZ1qGu1QtGs6vMkt3US0VNSQ05grXi5Yadp3qv5XZdYLjpp8ag== + +"@typescript-eslint/typescript-estree@5.26.0": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.26.0.tgz#16cbceedb0011c2ed4f607255f3ee1e6e43b88c3" + integrity sha512-EyGpw6eQDsfD6jIqmXP3rU5oHScZ51tL/cZgFbFBvWuCwrIptl+oueUZzSmLtxFuSOQ9vDcJIs+279gnJkfd1w== + dependencies: + "@typescript-eslint/types" "5.26.0" + "@typescript-eslint/visitor-keys" "5.26.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/typescript-estree@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.30.0.tgz#4565ee8a6d2ac368996e20b2344ea0eab1a8f0bb" + integrity sha512-hDEawogreZB4n1zoqcrrtg/wPyyiCxmhPLpZ6kmWfKF5M5G0clRLaEexpuWr31fZ42F96SlD/5xCt1bT5Qm4Nw== + dependencies: + "@typescript-eslint/types" "5.30.0" + "@typescript-eslint/visitor-keys" "5.30.0" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/utils@5.26.0": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.26.0.tgz#896b8480eb124096e99c8b240460bb4298afcfb4" + integrity sha512-PJFwcTq2Pt4AMOKfe3zQOdez6InIDOjUJJD3v3LyEtxHGVVRK3Vo7Dd923t/4M9hSH2q2CLvcTdxlLPjcIk3eg== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.26.0" + "@typescript-eslint/types" "5.26.0" + "@typescript-eslint/typescript-estree" "5.26.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/utils@^5.10.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.30.0.tgz#1dac771fead5eab40d31860716de219356f5f754" + integrity sha512-0bIgOgZflLKIcZsWvfklsaQTM3ZUbmtH0rJ1hKyV3raoUYyeZwcjQ8ZUJTzS7KnhNcsVT1Rxs7zeeMHEhGlltw== + dependencies: + "@types/json-schema" "^7.0.9" + "@typescript-eslint/scope-manager" "5.30.0" + "@typescript-eslint/types" "5.30.0" + "@typescript-eslint/typescript-estree" "5.30.0" + eslint-scope "^5.1.1" + eslint-utils "^3.0.0" + +"@typescript-eslint/visitor-keys@5.26.0": + version "5.26.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.26.0.tgz#7195f756e367f789c0e83035297c45b417b57f57" + integrity sha512-wei+ffqHanYDOQgg/fS6Hcar6wAWv0CUPQ3TZzOWd2BLfgP539rb49bwua8WRAs7R6kOSLn82rfEu2ro6Llt8Q== + dependencies: + "@typescript-eslint/types" "5.26.0" + eslint-visitor-keys "^3.3.0" + +"@typescript-eslint/visitor-keys@5.30.0": + version "5.30.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.30.0.tgz#07721d23daca2ec4c2da7f1e660d41cd78bacac3" + integrity sha512-6WcIeRk2DQ3pHKxU1Ni0qMXJkjO/zLjBymlYBy/53qxe7yjEFSvzKLDToJjURUhSl2Fzhkl4SMXQoETauF74cw== + dependencies: + "@typescript-eslint/types" "5.30.0" + eslint-visitor-keys "^3.3.0" + +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^8.7.1: + version "8.7.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" + integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ansi-escapes@^4.2.1: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^5.0.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" + integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== + +anymatch@^3.0.3: + version "3.1.2" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +babel-jest@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-28.1.2.tgz#2b37fb81439f14d34d8b2cc4a4bd7efabf9acbfe" + integrity sha512-pfmoo6sh4L/+5/G2OOfQrGJgvH7fTa1oChnuYH2G/6gA+JwDvO8PELwvwnofKBMNrQsam0Wy/Rw+QSrBNewq2Q== + dependencies: + "@jest/transform" "^28.1.2" + "@types/babel__core" "^7.1.14" + babel-plugin-istanbul "^6.1.1" + babel-preset-jest "^28.1.1" + chalk "^4.0.0" + graceful-fs "^4.2.9" + slash "^3.0.0" + +babel-plugin-istanbul@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" + integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@istanbuljs/load-nyc-config" "^1.0.0" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-instrument "^5.0.4" + test-exclude "^6.0.0" + +babel-plugin-jest-hoist@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-28.1.1.tgz#5e055cdcc47894f28341f87f5e35aad2df680b11" + integrity sha512-NovGCy5Hn25uMJSAU8FaHqzs13cFoOI4lhIujiepssjCKRsAo3TA734RDWSGxuFTsUJXerYOqQQodlxgmtqbzw== + dependencies: + "@babel/template" "^7.3.3" + "@babel/types" "^7.3.3" + "@types/babel__core" "^7.1.14" + "@types/babel__traverse" "^7.0.6" + +babel-preset-current-node-syntax@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" + integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== + dependencies: + "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/plugin-syntax-bigint" "^7.8.3" + "@babel/plugin-syntax-class-properties" "^7.8.3" + "@babel/plugin-syntax-import-meta" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" + "@babel/plugin-syntax-numeric-separator" "^7.8.3" + "@babel/plugin-syntax-object-rest-spread" "^7.8.3" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" + "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/plugin-syntax-top-level-await" "^7.8.3" + +babel-preset-jest@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-28.1.1.tgz#5b6e5e69f963eb2d70f739c607b8f723c0ee75e4" + integrity sha512-FCq9Oud0ReTeWtcneYf/48981aTfXYuB9gbU4rBNNJVBSQ6ssv7E6v/qvbBxtOWwZFXjLZwpg+W3q7J6vhH25g== + dependencies: + babel-plugin-jest-hoist "^28.1.1" + babel-preset-current-node-syntax "^1.0.0" + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browserslist@^4.20.2: + version "4.21.1" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.1.tgz#c9b9b0a54c7607e8dc3e01a0d311727188011a00" + integrity sha512-Nq8MFCSrnJXSc88yliwlzQe3qNe3VntIjhsArW9IJOEPSHNx23FalwApUVbzAWABLhYJJ7y8AynWI/XM8OdfjQ== + dependencies: + caniuse-lite "^1.0.30001359" + electron-to-chromium "^1.4.172" + node-releases "^2.0.5" + update-browserslist-db "^1.0.4" + +bs-logger@0.x: + version "0.2.6" + resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" + integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== + dependencies: + fast-json-stable-stringify "2.x" + +bser@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" + integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== + dependencies: + node-int64 "^0.4.0" + +buffer-from@^1.0.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +camelcase@^5.3.1: + version "5.3.1" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== + +camelcase@^6.2.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== + +caniuse-lite@^1.0.30001359: + version "1.0.30001361" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001361.tgz#ba2adb2527566fb96f3ac7c67698ae7fc495a28d" + integrity sha512-ybhCrjNtkFji1/Wto6SSJKkWk6kZgVQsDq5QI83SafsF6FXv2JB4df9eEdH6g8sdGgqTXrFLjAxqBGgYoU3azQ== + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + +ci-info@^3.2.0: + version "3.3.2" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.2.tgz#6d2967ffa407466481c6c90b6e16b3098f080128" + integrity sha512-xmDt/QIAdeZ9+nfdPsaBCpMvHNLFiLdjj59qjqn+6iPe6YmHGQ35sBnQ8uslRBXFmXkiZQOJRjvQeoGppoTjjg== + +cjs-module-lexer@^1.0.0: + version "1.2.2" + resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" + integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== + +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + +clone-deep@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" + integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== + dependencies: + is-plain-object "^2.0.4" + kind-of "^6.0.2" + shallow-clone "^3.0.0" + +co@^4.6.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" + integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== + +collect-v8-coverage@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" + integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +convert-hrtime@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/convert-hrtime/-/convert-hrtime-5.0.0.tgz#f2131236d4598b95de856926a67100a0a97e9fa3" + integrity sha512-lOETlkIeYSJWcbbcvjRKGxVMXJR+8+OQb/mTPbA4ObPMytYIsUbuOE0Jzy60hjARYszq1id0j8KgVhC+WGZVTg== + +convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +cross-spawn@^7.0.2, cross-spawn@^7.0.3: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +dedent@^0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" + integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== + +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +deepmerge@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" + integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== + +detect-newline@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" + integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + +diff-sequences@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-28.1.1.tgz#9989dc731266dc2903457a70e996f3a041913ac6" + integrity sha512-FU0iFaH/E23a+a718l8Qa/19bF9p06kgE0KipMOMadwa3SjnaElKzPaUC0vnibs6/B/9ni97s61mcejk8W1fQw== + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +electron-to-chromium@^1.4.172: + version "1.4.176" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.176.tgz#61ab2a1de3b5072ee31881a937c08ac6780d1cfa" + integrity sha512-92JdgyRlcNDwuy75MjuFSb3clt6DGJ2IXSpg0MCjKd3JV9eSmuUAIyWiGAp/EtT0z2D4rqbYqThQLV90maH3Zw== + +emittery@^0.10.2: + version "0.10.2" + resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" + integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-config-prettier@~8.5: + version "8.5.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz#5a81680ec934beca02c7b1a61cf8ca34b66feab1" + integrity sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q== + +eslint-plugin-jest@~26.2: + version "26.2.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-26.2.2.tgz#74e000544259f1ef0462a609a3fc9e5da3768f6c" + integrity sha512-etSFZ8VIFX470aA6kTqDPhIq7YWe0tjBcboFNV3WeiC18PJ/AVonGhuTwlmuz2fBkH8FJHA7JQ4k7GsQIj1Gew== + dependencies: + "@typescript-eslint/utils" "^5.10.0" + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint@~8.16: + version "8.16.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.16.0.tgz#6d936e2d524599f2a86c708483b4c372c5d3bbae" + integrity sha512-MBndsoXY/PeVTDJeWsYj7kLZ5hQpJOfMYLsF6LicLHQWbRDG19lK5jOix4DPl8yY4SUFcE3txy86OzFLWT+yoA== + dependencies: + "@eslint/eslintrc" "^1.3.0" + "@humanwhocodes/config-array" "^0.9.2" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.3.2" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^6.0.1" + globals "^13.15.0" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^9.3.2: + version "9.3.2" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" + integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== + dependencies: + acorn "^8.7.1" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +execa@^5.0.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" + integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== + dependencies: + cross-spawn "^7.0.3" + get-stream "^6.0.0" + human-signals "^2.1.0" + is-stream "^2.0.0" + merge-stream "^2.0.0" + npm-run-path "^4.0.1" + onetime "^5.1.2" + signal-exit "^3.0.3" + strip-final-newline "^2.0.0" + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" + integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== + +expect@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/expect/-/expect-28.1.1.tgz#ca6fff65f6517cf7220c2e805a49c19aea30b420" + integrity sha512-/AANEwGL0tWBwzLNOvO0yUdy2D52jVdNXppOqswC49sxMN2cPWsGCQdzuIf9tj6hHoBQzNvx75JUYuQAckPo3w== + dependencies: + "@jest/expect-utils" "^28.1.1" + jest-get-type "^28.0.2" + jest-matcher-utils "^28.1.1" + jest-message-util "^28.1.1" + jest-util "^28.1.1" + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +fb-watchman@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" + integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== + dependencies: + bser "2.1.1" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +find-up@^4.0.0, find-up@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.6" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.6.tgz#022e9218c637f9f3fc9c35ab9c9193f05add60b2" + integrity sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +fsevents@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-caller-file@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== + +get-package-type@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== + +get-stream@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + +glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.1: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob@^7.1.3, glob@^7.1.4: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.15.0: + version "13.15.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.15.0.tgz#38113218c907d2f7e98658af246cef8b77e90bac" + integrity sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog== + dependencies: + type-fest "^0.20.2" + +globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +html-escaper@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +human-signals@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" + integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== + +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +import-local@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" + integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== + dependencies: + pkg-dir "^4.2.0" + resolve-cwd "^3.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== + +is-core-module@^2.9.0: + version "2.9.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" + integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== + dependencies: + has "^1.0.3" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-generator-fn@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" + integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-stream@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== + +istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== + +istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" + integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== + dependencies: + "@babel/core" "^7.12.3" + "@babel/parser" "^7.14.7" + "@istanbuljs/schema" "^0.1.2" + istanbul-lib-coverage "^3.2.0" + semver "^6.3.0" + +istanbul-lib-report@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^3.0.0" + supports-color "^7.1.0" + +istanbul-lib-source-maps@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== + dependencies: + debug "^4.1.1" + istanbul-lib-coverage "^3.0.0" + source-map "^0.6.1" + +istanbul-reports@^3.1.3: + version "3.1.4" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.4.tgz#1b6f068ecbc6c331040aab5741991273e609e40c" + integrity sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw== + dependencies: + html-escaper "^2.0.0" + istanbul-lib-report "^3.0.0" + +jest-changed-files@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-28.0.2.tgz#7d7810660a5bd043af9e9cfbe4d58adb05e91531" + integrity sha512-QX9u+5I2s54ZnGoMEjiM2WeBvJR2J7w/8ZUmH2um/WLAuGAYFQcsVXY9+1YL6k0H/AGUdH8pXUAv6erDqEsvIA== + dependencies: + execa "^5.0.0" + throat "^6.0.1" + +jest-circus@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-28.1.2.tgz#0d5a5623eccb244efe87d1edc365696e4fcf80ce" + integrity sha512-E2vdPIJG5/69EMpslFhaA46WkcrN74LI5V/cSJ59L7uS8UNoXbzTxmwhpi9XrIL3zqvMt5T0pl5k2l2u2GwBNQ== + dependencies: + "@jest/environment" "^28.1.2" + "@jest/expect" "^28.1.2" + "@jest/test-result" "^28.1.1" + "@jest/types" "^28.1.1" + "@types/node" "*" + chalk "^4.0.0" + co "^4.6.0" + dedent "^0.7.0" + is-generator-fn "^2.0.0" + jest-each "^28.1.1" + jest-matcher-utils "^28.1.1" + jest-message-util "^28.1.1" + jest-runtime "^28.1.2" + jest-snapshot "^28.1.2" + jest-util "^28.1.1" + pretty-format "^28.1.1" + slash "^3.0.0" + stack-utils "^2.0.3" + throat "^6.0.1" + +jest-cli@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-28.1.2.tgz#b89012e5bad14135e71b1628b85475d3773a1bbc" + integrity sha512-l6eoi5Do/IJUXAFL9qRmDiFpBeEJAnjJb1dcd9i/VWfVWbp3mJhuH50dNtX67Ali4Ecvt4eBkWb4hXhPHkAZTw== + dependencies: + "@jest/core" "^28.1.2" + "@jest/test-result" "^28.1.1" + "@jest/types" "^28.1.1" + chalk "^4.0.0" + exit "^0.1.2" + graceful-fs "^4.2.9" + import-local "^3.0.2" + jest-config "^28.1.2" + jest-util "^28.1.1" + jest-validate "^28.1.1" + prompts "^2.0.1" + yargs "^17.3.1" + +jest-config@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-28.1.2.tgz#ba00ad30caf62286c86e7c1099e915218a0ac8c6" + integrity sha512-g6EfeRqddVbjPVBVY4JWpUY4IvQoFRIZcv4V36QkqzE0IGhEC/VkugFeBMAeUE7PRgC8KJF0yvJNDeQRbamEVA== + dependencies: + "@babel/core" "^7.11.6" + "@jest/test-sequencer" "^28.1.1" + "@jest/types" "^28.1.1" + babel-jest "^28.1.2" + chalk "^4.0.0" + ci-info "^3.2.0" + deepmerge "^4.2.2" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-circus "^28.1.2" + jest-environment-node "^28.1.2" + jest-get-type "^28.0.2" + jest-regex-util "^28.0.2" + jest-resolve "^28.1.1" + jest-runner "^28.1.2" + jest-util "^28.1.1" + jest-validate "^28.1.1" + micromatch "^4.0.4" + parse-json "^5.2.0" + pretty-format "^28.1.1" + slash "^3.0.0" + strip-json-comments "^3.1.1" + +jest-diff@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-28.1.1.tgz#1a3eedfd81ae79810931c63a1d0f201b9120106c" + integrity sha512-/MUUxeR2fHbqHoMMiffe/Afm+U8U4olFRJ0hiVG2lZatPJcnGxx292ustVu7bULhjV65IYMxRdploAKLbcrsyg== + dependencies: + chalk "^4.0.0" + diff-sequences "^28.1.1" + jest-get-type "^28.0.2" + pretty-format "^28.1.1" + +jest-docblock@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-28.1.1.tgz#6f515c3bf841516d82ecd57a62eed9204c2f42a8" + integrity sha512-3wayBVNiOYx0cwAbl9rwm5kKFP8yHH3d/fkEaL02NPTkDojPtheGB7HZSFY4wzX+DxyrvhXz0KSCVksmCknCuA== + dependencies: + detect-newline "^3.0.0" + +jest-each@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-28.1.1.tgz#ba5238dacf4f31d9fe23ddc2c44c01e7c23885c4" + integrity sha512-A042rqh17ZvEhRceDMi784ppoXR7MWGDEKTXEZXb4svt0eShMZvijGxzKsx+yIjeE8QYmHPrnHiTSQVhN4nqaw== + dependencies: + "@jest/types" "^28.1.1" + chalk "^4.0.0" + jest-get-type "^28.0.2" + jest-util "^28.1.1" + pretty-format "^28.1.1" + +jest-environment-node@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-28.1.2.tgz#3e2eb47f6d173b0648d5f7c717cb1c26651d5c8a" + integrity sha512-oYsZz9Qw27XKmOgTtnl0jW7VplJkN2oeof+SwAwKFQacq3CLlG9u4kTGuuLWfvu3J7bVutWlrbEQMOCL/jughw== + dependencies: + "@jest/environment" "^28.1.2" + "@jest/fake-timers" "^28.1.2" + "@jest/types" "^28.1.1" + "@types/node" "*" + jest-mock "^28.1.1" + jest-util "^28.1.1" + +jest-get-type@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-28.0.2.tgz#34622e628e4fdcd793d46db8a242227901fcf203" + integrity sha512-ioj2w9/DxSYHfOm5lJKCdcAmPJzQXmbM/Url3rhlghrPvT3tt+7a/+oXc9azkKmLvoiXjtV83bEWqi+vs5nlPA== + +jest-haste-map@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-28.1.1.tgz#471685f1acd365a9394745bb97c8fc16289adca3" + integrity sha512-ZrRSE2o3Ezh7sb1KmeLEZRZ4mgufbrMwolcFHNRSjKZhpLa8TdooXOOFlSwoUzlbVs1t0l7upVRW2K7RWGHzbQ== + dependencies: + "@jest/types" "^28.1.1" + "@types/graceful-fs" "^4.1.3" + "@types/node" "*" + anymatch "^3.0.3" + fb-watchman "^2.0.0" + graceful-fs "^4.2.9" + jest-regex-util "^28.0.2" + jest-util "^28.1.1" + jest-worker "^28.1.1" + micromatch "^4.0.4" + walker "^1.0.8" + optionalDependencies: + fsevents "^2.3.2" + +jest-leak-detector@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-28.1.1.tgz#537f37afd610a4b3f4cab15e06baf60484548efb" + integrity sha512-4jvs8V8kLbAaotE+wFR7vfUGf603cwYtFf1/PYEsyX2BAjSzj8hQSVTP6OWzseTl0xL6dyHuKs2JAks7Pfubmw== + dependencies: + jest-get-type "^28.0.2" + pretty-format "^28.1.1" + +jest-matcher-utils@^28.0.0, jest-matcher-utils@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-28.1.1.tgz#a7c4653c2b782ec96796eb3088060720f1e29304" + integrity sha512-NPJPRWrbmR2nAJ+1nmnfcKKzSwgfaciCCrYZzVnNoxVoyusYWIjkBMNvu0RHJe7dNj4hH3uZOPZsQA+xAYWqsw== + dependencies: + chalk "^4.0.0" + jest-diff "^28.1.1" + jest-get-type "^28.0.2" + pretty-format "^28.1.1" + +jest-message-util@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.1.tgz#60aa0b475cfc08c8a9363ed2fb9108514dd9ab89" + integrity sha512-xoDOOT66fLfmTRiqkoLIU7v42mal/SqwDKvfmfiWAdJMSJiU+ozgluO7KbvoAgiwIrrGZsV7viETjc8GNrA/IQ== + dependencies: + "@babel/code-frame" "^7.12.13" + "@jest/types" "^28.1.1" + "@types/stack-utils" "^2.0.0" + chalk "^4.0.0" + graceful-fs "^4.2.9" + micromatch "^4.0.4" + pretty-format "^28.1.1" + slash "^3.0.0" + stack-utils "^2.0.3" + +jest-mock@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-28.1.1.tgz#37903d269427fa1ef5b2447be874e1c62a39a371" + integrity sha512-bDCb0FjfsmKweAvE09dZT59IMkzgN0fYBH6t5S45NoJfd2DHkS3ySG2K+hucortryhO3fVuXdlxWcbtIuV/Skw== + dependencies: + "@jest/types" "^28.1.1" + "@types/node" "*" + +jest-pnp-resolver@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" + integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== + +jest-regex-util@^28.0.2: + version "28.0.2" + resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" + integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== + +jest-resolve-dependencies@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-28.1.2.tgz#ca528858e0c6642d5a1dda8fc7cda10230c275bc" + integrity sha512-OXw4vbOZuyRTBi3tapWBqdyodU+T33ww5cPZORuTWkg+Y8lmsxQlVu3MWtJh6NMlKRTHQetF96yGPv01Ye7Mbg== + dependencies: + jest-regex-util "^28.0.2" + jest-snapshot "^28.1.2" + +jest-resolve@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-28.1.1.tgz#bc2eaf384abdcc1aaf3ba7c50d1adf01e59095e5" + integrity sha512-/d1UbyUkf9nvsgdBildLe6LAD4DalgkgZcKd0nZ8XUGPyA/7fsnaQIlKVnDiuUXv/IeZhPEDrRJubVSulxrShA== + dependencies: + chalk "^4.0.0" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.1" + jest-pnp-resolver "^1.2.2" + jest-util "^28.1.1" + jest-validate "^28.1.1" + resolve "^1.20.0" + resolve.exports "^1.1.0" + slash "^3.0.0" + +jest-runner@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-28.1.2.tgz#f293409592a62234285a71237e38499a3554e350" + integrity sha512-6/k3DlAsAEr5VcptCMdhtRhOoYClZQmxnVMZvZ/quvPGRpN7OBQYPIC32tWSgOnbgqLXNs5RAniC+nkdFZpD4A== + dependencies: + "@jest/console" "^28.1.1" + "@jest/environment" "^28.1.2" + "@jest/test-result" "^28.1.1" + "@jest/transform" "^28.1.2" + "@jest/types" "^28.1.1" + "@types/node" "*" + chalk "^4.0.0" + emittery "^0.10.2" + graceful-fs "^4.2.9" + jest-docblock "^28.1.1" + jest-environment-node "^28.1.2" + jest-haste-map "^28.1.1" + jest-leak-detector "^28.1.1" + jest-message-util "^28.1.1" + jest-resolve "^28.1.1" + jest-runtime "^28.1.2" + jest-util "^28.1.1" + jest-watcher "^28.1.1" + jest-worker "^28.1.1" + source-map-support "0.5.13" + throat "^6.0.1" + +jest-runtime@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-28.1.2.tgz#d68f34f814a848555a345ceda23289f14d59a688" + integrity sha512-i4w93OsWzLOeMXSi9epmakb2+3z0AchZtUQVF1hesBmcQQy4vtaql5YdVe9KexdJaVRyPDw8DoBR0j3lYsZVYw== + dependencies: + "@jest/environment" "^28.1.2" + "@jest/fake-timers" "^28.1.2" + "@jest/globals" "^28.1.2" + "@jest/source-map" "^28.1.2" + "@jest/test-result" "^28.1.1" + "@jest/transform" "^28.1.2" + "@jest/types" "^28.1.1" + chalk "^4.0.0" + cjs-module-lexer "^1.0.0" + collect-v8-coverage "^1.0.0" + execa "^5.0.0" + glob "^7.1.3" + graceful-fs "^4.2.9" + jest-haste-map "^28.1.1" + jest-message-util "^28.1.1" + jest-mock "^28.1.1" + jest-regex-util "^28.0.2" + jest-resolve "^28.1.1" + jest-snapshot "^28.1.2" + jest-util "^28.1.1" + slash "^3.0.0" + strip-bom "^4.0.0" + +jest-snapshot@^28.1.2: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-28.1.2.tgz#93d31b87b11b384f5946fe0767541496135f8d52" + integrity sha512-wzrieFttZYfLvrCVRJxX+jwML2YTArOUqFpCoSVy1QUapx+LlV9uLbV/mMEhYj4t7aMeE9aSQFHSvV/oNoDAMA== + dependencies: + "@babel/core" "^7.11.6" + "@babel/generator" "^7.7.2" + "@babel/plugin-syntax-typescript" "^7.7.2" + "@babel/traverse" "^7.7.2" + "@babel/types" "^7.3.3" + "@jest/expect-utils" "^28.1.1" + "@jest/transform" "^28.1.2" + "@jest/types" "^28.1.1" + "@types/babel__traverse" "^7.0.6" + "@types/prettier" "^2.1.5" + babel-preset-current-node-syntax "^1.0.0" + chalk "^4.0.0" + expect "^28.1.1" + graceful-fs "^4.2.9" + jest-diff "^28.1.1" + jest-get-type "^28.0.2" + jest-haste-map "^28.1.1" + jest-matcher-utils "^28.1.1" + jest-message-util "^28.1.1" + jest-util "^28.1.1" + natural-compare "^1.4.0" + pretty-format "^28.1.1" + semver "^7.3.5" + +jest-util@^28.0.0, jest-util@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.1.tgz#ff39e436a1aca397c0ab998db5a51ae2b7080d05" + integrity sha512-FktOu7ca1DZSyhPAxgxB6hfh2+9zMoJ7aEQA759Z6p45NuO8mWcqujH+UdHlCm/V6JTWwDztM2ITCzU1ijJAfw== + dependencies: + "@jest/types" "^28.1.1" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-validate@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-28.1.1.tgz#59b7b339b3c85b5144bd0c06ad3600f503a4acc8" + integrity sha512-Kpf6gcClqFCIZ4ti5++XemYJWUPCFUW+N2gknn+KgnDf549iLul3cBuKVe1YcWRlaF8tZV8eJCap0eECOEE3Ug== + dependencies: + "@jest/types" "^28.1.1" + camelcase "^6.2.0" + chalk "^4.0.0" + jest-get-type "^28.0.2" + leven "^3.1.0" + pretty-format "^28.1.1" + +jest-watcher@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.1.tgz#533597fb3bfefd52b5cd115cd916cffd237fb60c" + integrity sha512-RQIpeZ8EIJMxbQrXpJQYIIlubBnB9imEHsxxE41f54ZwcqWLysL/A0ZcdMirf+XsMn3xfphVQVV4EW0/p7i7Ug== + dependencies: + "@jest/test-result" "^28.1.1" + "@jest/types" "^28.1.1" + "@types/node" "*" + ansi-escapes "^4.2.1" + chalk "^4.0.0" + emittery "^0.10.2" + jest-util "^28.1.1" + string-length "^4.0.1" + +jest-worker@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.1.tgz#3480c73247171dfd01eda77200f0063ab6a3bf28" + integrity sha512-Au7slXB08C6h+xbJPp7VIb6U0XX5Kc9uel/WFc6/rcTzGiaVCBRngBExSYuXSLFPULPSYU3cJ3ybS988lNFQhQ== + dependencies: + "@types/node" "*" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jest@^28.1.1: + version "28.1.2" + resolved "https://registry.yarnpkg.com/jest/-/jest-28.1.2.tgz#451ff24081ce31ca00b07b60c61add13aa96f8eb" + integrity sha512-Tuf05DwLeCh2cfWCQbcz9UxldoDyiR1E9Igaei5khjonKncYdc6LDfynKCEWozK0oLE3GD+xKAo2u8x/0s6GOg== + dependencies: + "@jest/core" "^28.1.2" + "@jest/types" "^28.1.1" + import-local "^3.0.2" + jest-cli "^28.1.2" + +js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== + dependencies: + argparse "^2.0.1" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-parse-even-better-errors@^2.3.0: + version "2.3.1" + resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" + integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +kleur@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" + integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +locate-path@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== + dependencies: + p-locate "^4.1.0" + +lodash.memoize@4.x: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +make-dir@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + +make-error@1.x: + version "1.3.6" + resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" + integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== + +makeerror@1.0.12: + version "1.0.12" + resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" + integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== + dependencies: + tmpl "1.0.5" + +merge-stream@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" + integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +mimic-fn@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +node-int64@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" + integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== + +node-releases@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666" + integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q== + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +npm-run-path@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" + integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== + dependencies: + path-key "^3.0.0" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +onetime@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== + dependencies: + mimic-fn "^2.1.0" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +p-limit@^2.2.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== + dependencies: + p-try "^2.0.0" + +p-locate@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== + dependencies: + p-limit "^2.2.0" + +p-try@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +parse-json@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" + integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== + dependencies: + "@babel/code-frame" "^7.0.0" + error-ex "^1.3.1" + json-parse-even-better-errors "^2.3.0" + lines-and-columns "^1.1.6" + +path-exists@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.0.0, path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +pirates@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" + integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== + +pkg-dir@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prettier@~2.6: + version "2.6.2" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.6.2.tgz#e26d71a18a74c3d0f0597f55f01fb6c06c206032" + integrity sha512-PkUpF+qoXTqhOeWL9fu7As8LXsIUZ1WYaJiY/a7McAQzxjk82OF0tibkFXVCDImZtWxbvojFjerkiLb0/q8mew== + +pretty-format@^28.0.0, pretty-format@^28.1.1: + version "28.1.1" + resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.1.tgz#f731530394e0f7fcd95aba6b43c50e02d86b95cb" + integrity sha512-wwJbVTGFHeucr5Jw2bQ9P+VYHyLdAqedFLEkdQUVaBF/eiidDwH5OpilINq4mEfhbCjLnirt6HTTDhv1HaTIQw== + dependencies: + "@jest/schemas" "^28.0.2" + ansi-regex "^5.0.1" + ansi-styles "^5.0.0" + react-is "^18.0.0" + +prompts@^2.0.1: + version "2.4.2" + resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" + integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +react-is@^18.0.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" + integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== + +regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +require-directory@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== + +resolve-cwd@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" + integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== + dependencies: + resolve-from "^5.0.0" + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve-from@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== + +resolve.exports@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" + integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== + +resolve@^1.20.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.0, rimraf@^3.0.2, rimraf@~3.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +semver@7.x, semver@^7.3.5, semver@^7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +semver@^6.0.0, semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +shallow-clone@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" + integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== + dependencies: + kind-of "^6.0.2" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +signal-exit@^3.0.3, signal-exit@^3.0.7: + version "3.0.7" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== + +sisteransi@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" + integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +source-map-support@0.5.13: + version "0.5.13" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" + integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-support@^0.5.21: + version "0.5.21" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map@^0.6.0, source-map@^0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +stack-utils@^2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" + integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== + dependencies: + escape-string-regexp "^2.0.0" + +string-length@^4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" + integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== + dependencies: + char-regex "^1.0.2" + strip-ansi "^6.0.0" + +string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== + +strip-final-newline@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" + integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.0.0, supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-color@^8.0.0: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-hyperlinks@^2.0.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" + integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== + dependencies: + has-flag "^4.0.0" + supports-color "^7.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +terminal-link@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" + integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== + dependencies: + ansi-escapes "^4.2.1" + supports-hyperlinks "^2.0.0" + +test-exclude@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== + dependencies: + "@istanbuljs/schema" "^0.1.2" + glob "^7.1.4" + minimatch "^3.0.4" + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +throat@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" + integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== + +time-span@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/time-span/-/time-span-5.1.0.tgz#80c76cf5a0ca28e0842d3f10a4e99034ce94b90d" + integrity sha512-75voc/9G4rDIJleOo4jPvN4/YC4GRZrY8yy1uU4lwrB3XEQbWve8zXoO5No4eFrGcTAMYyoY67p8jRQdtA1HbA== + dependencies: + convert-hrtime "^5.0.0" + +tmpl@1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" + integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +ts-jest@^28.0.5: + version "28.0.5" + resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-28.0.5.tgz#31776f768fba6dfc8c061d488840ed0c8eeac8b9" + integrity sha512-Sx9FyP9pCY7pUzQpy4FgRZf2bhHY3za576HMKJFs+OnQ9jS96Du5vNsDKkyedQkik+sEabbKAnCliv9BEsHZgQ== + dependencies: + bs-logger "0.x" + fast-json-stable-stringify "2.x" + jest-util "^28.0.0" + json5 "^2.2.1" + lodash.memoize "4.x" + make-error "1.x" + semver "7.x" + yargs-parser "^21.0.1" + +tslib@^1.8.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@~2.4: + version "2.4.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsutils@^3.21.0, tsutils@~3.21: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-detect@4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== + +typescript@~4.7: + version "4.7.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.4.tgz#1a88596d1cf47d59507a1bcdfb5b9dfe4d488235" + integrity sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ== + +underscore@^1.13.4: + version "1.13.4" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.4.tgz#7886b46bbdf07f768e0052f1828e1dcab40c0dee" + integrity sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ== + +update-browserslist-db@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.4.tgz#dbfc5a789caa26b1db8990796c2c8ebbce304824" + integrity sha512-jnmO2BEGUjsMOe/Fg9u0oczOe/ppIDZPebzccl1yDWGLFP16Pa1/RM5wEoKYPG2zstNcDuAStejyxsOuKINdGA== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +v8-to-istanbul@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz#b6f994b0b5d4ef255e17a0d17dc444a9f5132fa4" + integrity sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w== + dependencies: + "@jridgewell/trace-mapping" "^0.3.12" + "@types/istanbul-lib-coverage" "^2.0.1" + convert-source-map "^1.6.0" + +walker@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" + integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== + dependencies: + makeerror "1.0.12" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +write-file-atomic@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.1.tgz#9faa33a964c1c85ff6f849b80b42a88c2c537c8f" + integrity sha512-nSKUxgAbyioruk6hU87QzVbY279oYT6uiwgDoujth2ju4mJ+TZau7SQBhtbTmUyuNYTuXnSyRn66FV0+eCgcrQ== + dependencies: + imurmurhash "^0.1.4" + signal-exit "^3.0.7" + +y18n@^5.0.5: + version "5.0.8" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + +yargs-parser@^21.0.0, yargs-parser@^21.0.1: + version "21.0.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.1.tgz#0267f286c877a4f0f728fceb6f8a3e4cb95c6e35" + integrity sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg== + +yargs@^17.3.1: + version "17.5.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.1.tgz#e109900cab6fcb7fd44b1d8249166feb0b36e58e" + integrity sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.0.0" From 8f29c6070e37642fd750ac63bdcf95dd4ced4d8d Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 20 Sep 2022 18:48:59 +0100 Subject: [PATCH 014/127] cp test --- x/ccv/provider/keyguard/keyguard_test.go | 54 +++++++++++++++++++++--- 1 file changed, 48 insertions(+), 6 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index df0a5abc30..d9d8132fc1 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -1,18 +1,60 @@ -package keyguard_test +package keyguard import ( "testing" ) type TraceState struct { - TP int - TC int - TM int + Mapping map[LK]FK + LocalUpdates []update + TP int + TC int + TM int } -type DriverState struct { +type Driver struct { + lastTP int + lastTC int + lastTM int } -func TestKeyDelegation(t *testing.T) { +func (d *Driver) runTrace(t *testing.T, trace []TraceState) { + kg := KeyGuard{} + // TODO: + + // These are the critical properties + // 1. All validator sets on consumer are a validator set for provider for an earlier + // time, mapped through the effective mapping at that time. + // 2. It is always possible to fetch a local key, given a foreign key, if the foreign + // key is still known to the consumer + + // My thinking now is that I can test by doing the following + // If the trace TP increases than there is a new mapping and local updates + // the local updates aggregate to create a local validator set + // record that validator set, and the relevant mapping to time T=TP + // If TC increases to time T, can check the ACTUAL validator set in C + // It should be be possible to query kg for every validator foreign key + // in any intermediate val set in [TM+1, TP] + // It should not be possible to query kg for any validator that does not appear + // in any intermediate vla set in [0, TM] + for _, s := range trace { + if d.lastTP < s.TP { + // TODO: impl all endblock shenanigans + } + if d.lastTC < s.TC { + // TODO: do 'slash' checks + } + if d.lastTM < s.TM { + // prune and do slash checks + } + } +} + +func TestKeyDelegation(t *testing.T) { + traces := [][]TraceState{} + for _, trace := range traces { + d := Driver{} + d.runTrace(t, trace) + } } From 4475b4d9ba99d9260dadbe2a9ecb8b43afd8259a Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 10:07:49 +0100 Subject: [PATCH 015/127] prototype some test gen --- x/ccv/provider/keyguard/keyguard.go | 8 +- x/ccv/provider/keyguard/keyguard_test.go | 96 ++++++++++++++++++++++++ 2 files changed, 102 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index 7468204df3..ed8ec9a414 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -11,14 +11,18 @@ type update struct { power int } +// TODO: I need to integrate this into the keyStore +// TODO: I need to integrate this into the system +// TODO: I need to integrate with staking Create/Destroy validator + type KeyGuard struct { // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator + // to a call to staking::DeleteValidator TODO: impl this localKeyToLastUpdate map[LK]update // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator + // to a call to staking::DeleteValidator TODO: impl this localKeyToCurrentForeignKey map[LK]FK // Prunable state foreignKeyToLocalKey map[FK]LK diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index d9d8132fc1..42e7323b87 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -1,6 +1,7 @@ package keyguard import ( + "math/rand" "testing" ) @@ -51,6 +52,101 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { } } +func getTrace() []TraceState { + + NUM_VALS := 3 + NUM_FKS := 9 + + mapping := func() map[LK]FK { + // Create a mapping of nums [0, NUM_VALS] mapped injectively to [0, NUM_FKS] + ret := map[LK]FK{} + good := func() bool { + for i := 0; i < NUM_VALS-1; i++ { + if ret[i] == ret[i+1] { + return false + } + } + return true + } + for !good() { + for i := 0; i < NUM_VALS; i++ { + ret[i] = rand.Intn(NUM_FKS) + } + } + return ret + } + + localUpdates := func() []update { + ret := []update{} + include := rand.Intn(NUM_VALS + 1) + for _, i := range rand.Perm(NUM_VALS)[0:include] { + ret = append(ret, update{key: i, power: rand.Intn(3)}) + } + return ret + } + + ret := []TraceState{ + TraceState{ + Mapping: mapping(), + LocalUpdates: localUpdates(), + TP: 0, + TC: 0, + TM: 0, + }, + } + + for i := 0; i < 100; i++ { + choice := rand.Intn(3) + if choice == 0 { + ret = append(ret, TraceState{ + Mapping: mapping(), + LocalUpdates: localUpdates(), + TP: ret[i].TP + 1, + TC: ret[i].TC, + TM: ret[i].TM, + }) + } + if choice == 1 { + curr := ret[i].TC + limInclusive := ret[i].TP + if curr < limInclusive { + // add in [1, limInclusive - curr] + // rand in [0, limInclusive - curr - 1] + // bound is [0, limInclusive - curr) + newTC := rand.Intn(limInclusive-curr) + curr + 1 + if newTC <= curr || limInclusive < curr { + panic("bad choice 1") + } + ret = append(ret, TraceState{ + Mapping: ret[i].Mapping, + LocalUpdates: ret[i].LocalUpdates, + TP: ret[i].TP, + TC: newTC, + TM: ret[i].TM, + }) + } + } + if choice == 2 { + curr := ret[i].TM + limInclusive := ret[i].TC + if curr < limInclusive { + newTM := rand.Intn(limInclusive-curr) + curr + 1 + if newTM <= curr || limInclusive < curr { + panic("bad choice 2") + } + ret = append(ret, TraceState{ + Mapping: ret[i].Mapping, + LocalUpdates: ret[i].LocalUpdates, + TP: ret[i].TP, + TC: ret[i].TC, + TM: newTM, + }) + } + } + } + return ret +} + func TestKeyDelegation(t *testing.T) { traces := [][]TraceState{} for _, trace := range traces { From ec541912a68d76f0c42164171bdb7e4e86748ca0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 10:07:53 +0100 Subject: [PATCH 016/127] model update --- x/ccv/provider/keyguard/prototyping/tla/main.tla | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/x/ccv/provider/keyguard/prototyping/tla/main.tla b/x/ccv/provider/keyguard/prototyping/tla/main.tla index 07ad0374bd..8b1033f77a 100644 --- a/x/ccv/provider/keyguard/prototyping/tla/main.tla +++ b/x/ccv/provider/keyguard/prototyping/tla/main.tla @@ -1,7 +1,6 @@ ---- MODULE main ---- EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache -\* EXTENDS Integers, FiniteSets, Sequences, TLC (* @@ -14,11 +13,8 @@ EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache TypeAliases == TRUE -CONSTANTS - \* @type: Set($lk); - LKS, - \* @type: Set($fk); - FKS +LKS == {"lk0", "lk1", "lk2"} +FKS == {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} VARIABLES \* @type: $mapping; @@ -32,10 +28,6 @@ VARIABLES \* @type: Int; TM -CInit == - /\ LKS = {"lk0", "lk1", "lk2"} - /\ FKS = {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} - Init == \E m \in [LKS -> FKS], ss \in SUBSET LKS: /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b @@ -75,4 +67,6 @@ Next == \/ UpdateConsumer \/ ReceiveMaturities +View == <> + ==== From 95157a95c6b37ad44fe80bbe27240393a8a8c9b6 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 10:09:21 +0100 Subject: [PATCH 017/127] pre del comment --- x/ccv/provider/keyguard/keyguard_test.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 42e7323b87..9de58f7b4b 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -147,6 +147,12 @@ func getTrace() []TraceState { return ret } +func TestPrototype(t *testing.T) { + trace := getTrace() + d := Driver{} + d.runTrace(t, trace) +} + func TestKeyDelegation(t *testing.T) { traces := [][]TraceState{} for _, trace := range traces { From 7a57f121eb668eb4fe2e629be3e7770e334f45f0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 10:16:24 +0100 Subject: [PATCH 018/127] pre-tweak and del comment --- x/ccv/provider/keyguard/keyguard_test.go | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 9de58f7b4b..c1b1aa7b83 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -58,19 +58,22 @@ func getTrace() []TraceState { NUM_FKS := 9 mapping := func() map[LK]FK { + // TODO: currently I don't generate partial mappings but I might want to // Create a mapping of nums [0, NUM_VALS] mapped injectively to [0, NUM_FKS] ret := map[LK]FK{} good := func() bool { - for i := 0; i < NUM_VALS-1; i++ { - if ret[i] == ret[i+1] { + seen := map[FK]bool{} + for _, fk := range ret { + if _, ok := seen[fk]; ok { return false } + seen[fk] = true } return true } for !good() { - for i := 0; i < NUM_VALS; i++ { - ret[i] = rand.Intn(NUM_FKS) + for lk := 0; lk < NUM_VALS; lk++ { + ret[lk] = rand.Intn(NUM_FKS) } } return ret @@ -78,9 +81,10 @@ func getTrace() []TraceState { localUpdates := func() []update { ret := []update{} + // include 0 to all validators include := rand.Intn(NUM_VALS + 1) - for _, i := range rand.Perm(NUM_VALS)[0:include] { - ret = append(ret, update{key: i, power: rand.Intn(3)}) + for _, lk := range rand.Perm(NUM_VALS)[0:include] { + ret = append(ret, update{key: lk, power: rand.Intn(3)}) } return ret } From e6bb6235a8c0a286688f362721d7df53a8091a94 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 10:40:01 +0100 Subject: [PATCH 019/127] CP test --- x/ccv/provider/keyguard/keyguard_test.go | 88 +++++++++++++++++------- 1 file changed, 63 insertions(+), 25 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index c1b1aa7b83..7991224007 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -14,41 +14,58 @@ type TraceState struct { } type Driver struct { - lastTP int - lastTC int - lastTM int + lastTP int + lastTC int + lastTM int + valSets []map[LK]int + mappings []map[LK]FK + foreignUpdates [][]update } func (d *Driver) runTrace(t *testing.T, trace []TraceState) { kg := KeyGuard{} - // TODO: - - // These are the critical properties - // 1. All validator sets on consumer are a validator set for provider for an earlier - // time, mapped through the effective mapping at that time. - // 2. It is always possible to fetch a local key, given a foreign key, if the foreign - // key is still known to the consumer - - // My thinking now is that I can test by doing the following - // If the trace TP increases than there is a new mapping and local updates - // the local updates aggregate to create a local validator set - // record that validator set, and the relevant mapping to time T=TP - // If TC increases to time T, can check the ACTUAL validator set in C - // It should be be possible to query kg for every validator foreign key - // in any intermediate val set in [TM+1, TP] - // It should not be possible to query kg for any validator that does not appear - // in any intermediate vla set in [0, TM] - for _, s := range trace { + + d.lastTP = 0 + d.lastTC = 0 + d.lastTM = 0 + d.valSets = []map[LK]int{} + d.mappings = []map[LK]FK{} + d.foreignUpdates = [][]update{} + + init := trace[0] + d.mappings = append(d.mappings, init.Mapping) + for lk, fk := range init.Mapping { + kg.SetForeignKey(lk, fk) + } + d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(init.TP, init.LocalUpdates)) + d.valSets = append(d.valSets, map[LK]int{}) + for _, u := range init.LocalUpdates { + d.valSets[0][u.key] = u.power + } + + for _, s := range trace[1:] { if d.lastTP < s.TP { // TODO: impl all endblock shenanigans - + d.lastTP = s.TP + d.mappings = append(d.mappings, s.Mapping) + for lk, fk := range s.Mapping { + kg.SetForeignKey(lk, fk) + } + d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(s.TP, s.LocalUpdates)) } if d.lastTC < s.TC { + for i := d.lastTC + 1; i <= s.TC; i++ { + // TODO: forward d.foreignUpdates[i] to the consumer logic + } + // use foreign updates ini range // TODO: do 'slash' checks } if d.lastTM < s.TM { - // prune and do slash checks + // TODO: careful of meaning of TM because + // it is initialised to 0 but actually 0 has not matured + // TODO: prune up to TM } + // TODO: check properties } } @@ -90,7 +107,7 @@ func getTrace() []TraceState { } ret := []TraceState{ - TraceState{ + { Mapping: mapping(), LocalUpdates: localUpdates(), TP: 0, @@ -152,7 +169,10 @@ func getTrace() []TraceState { } func TestPrototype(t *testing.T) { - trace := getTrace() + trace := []TraceState{} + for len(trace) < 2 { + trace = getTrace() + } d := Driver{} d.runTrace(t, trace) } @@ -164,3 +184,21 @@ func TestKeyDelegation(t *testing.T) { d.runTrace(t, trace) } } + +// TODO: + +// These are the critical properties +// 1. All validator sets on consumer are a validator set for provider for an earlier +// time, mapped through the effective mapping at that time. +// 2. It is always possible to fetch a local key, given a foreign key, if the foreign +// key is still known to the consumer + +// My thinking now is that I can test by doing the following +// If the trace TP increases than there is a new mapping and local updates +// the local updates aggregate to create a local validator set +// record that validator set, and the relevant mapping to time T=TP +// If TC increases to time T, can check the ACTUAL validator set in C +// It should be be possible to query kg for every validator foreign key +// in any intermediate val set in [TM+1, TP] +// It should not be possible to query kg for any validator that does not appear +// in any intermediate vla set in [0, TM] From ec727e7d5824fdca28bc0d73bddd84163d5cff3e Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 10:50:38 +0100 Subject: [PATCH 020/127] CP keyguard test with fakeConsumer --- x/ccv/provider/keyguard/keyguard_test.go | 21 +++++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 7991224007..26afab41e7 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -20,6 +20,20 @@ type Driver struct { valSets []map[LK]int mappings []map[LK]FK foreignUpdates [][]update + fakeConsumer FakeConsumer +} + +type FakeConsumer struct { + valSet map[FK]int +} + +func (f *FakeConsumer) processUpdates(updates []update) { + for _, u := range updates { + delete(f.valSet, u.key) + if 0 < u.power { + f.valSet[u.key] = u.power + } + } } func (d *Driver) runTrace(t *testing.T, trace []TraceState) { @@ -31,6 +45,8 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { d.valSets = []map[LK]int{} d.mappings = []map[LK]FK{} d.foreignUpdates = [][]update{} + d.fakeConsumer = FakeConsumer{} + d.fakeConsumer.valSet = map[FK]int{} init := trace[0] d.mappings = append(d.mappings, init.Mapping) @@ -45,7 +61,6 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { for _, s := range trace[1:] { if d.lastTP < s.TP { - // TODO: impl all endblock shenanigans d.lastTP = s.TP d.mappings = append(d.mappings, s.Mapping) for lk, fk := range s.Mapping { @@ -55,10 +70,8 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { } if d.lastTC < s.TC { for i := d.lastTC + 1; i <= s.TC; i++ { - // TODO: forward d.foreignUpdates[i] to the consumer logic + d.fakeConsumer.processUpdates(d.foreignUpdates[i]) } - // use foreign updates ini range - // TODO: do 'slash' checks } if d.lastTM < s.TM { // TODO: careful of meaning of TM because From 0b048dc1ea5d1e4d2eda55c9320706ec0e6cc02e Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 11:06:05 +0100 Subject: [PATCH 021/127] Add property check, start testing --- x/ccv/provider/keyguard/keyguard_test.go | 43 +++++++++++++++++++++--- 1 file changed, 38 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 26afab41e7..5b75e8d6e3 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -58,30 +58,63 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { for _, u := range init.LocalUpdates { d.valSets[0][u.key] = u.power } + kg.Prune(0) for _, s := range trace[1:] { if d.lastTP < s.TP { - d.lastTP = s.TP d.mappings = append(d.mappings, s.Mapping) for lk, fk := range s.Mapping { kg.SetForeignKey(lk, fk) } d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(s.TP, s.LocalUpdates)) + d.lastTP = s.TP } if d.lastTC < s.TC { for i := d.lastTC + 1; i <= s.TC; i++ { d.fakeConsumer.processUpdates(d.foreignUpdates[i]) } + d.lastTC = s.TC } if d.lastTM < s.TM { - // TODO: careful of meaning of TM because - // it is initialised to 0 but actually 0 has not matured - // TODO: prune up to TM + // TODO: check this because TM is initialised to 0 but 0 has not actually matured + // TODO: I think one solution IS TO ACTUALLY prune 0 in init + kg.Prune(s.TM) + d.lastTM = s.TM } + d.checkProperties(t) // TODO: check properties } } +func (d *Driver) checkProperties(t *testing.T) { + // Check that the valSet on the fake consumer is the valSet + // on the provider at time TC via inverse mapping + foreignSet := d.fakeConsumer.valSet + localSet := d.valSets[d.lastTC] + mapping := d.mappings[d.lastTC] + inverseMapping := map[FK]LK{} + for lk, fk := range mapping { + inverseMapping[fk] = lk + } + foreignSetAsLocal := map[LK]int{} + for fk, power := range foreignSet { + foreignSetAsLocal[inverseMapping[fk]] = power + } + for lk, actual := range foreignSetAsLocal { + expect := localSet[lk] + if expect != actual { + t.Fatalf("[A]") + } + } + for lk, expect := range localSet { + actual := foreignSetAsLocal[lk] + if expect != actual { + t.Fatalf("[B]") + } + } + +} + func getTrace() []TraceState { NUM_VALS := 3 @@ -103,7 +136,7 @@ func getTrace() []TraceState { } for !good() { for lk := 0; lk < NUM_VALS; lk++ { - ret[lk] = rand.Intn(NUM_FKS) + ret[lk] = rand.Intn(NUM_FKS) + 100 } } return ret From dceae78412f60e2cbd429c95892c60c40adedbd2 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 11:13:42 +0100 Subject: [PATCH 022/127] pre rework valsets --- x/ccv/provider/keyguard/keyguard_test.go | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 5b75e8d6e3..65a8bf5002 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -14,6 +14,7 @@ type TraceState struct { } type Driver struct { + trace []TraceState lastTP int lastTC int lastTM int @@ -36,8 +37,8 @@ func (f *FakeConsumer) processUpdates(updates []update) { } } -func (d *Driver) runTrace(t *testing.T, trace []TraceState) { - kg := KeyGuard{} +func (d *Driver) runTrace(t *testing.T) { + kg := MakeKeyGuard() d.lastTP = 0 d.lastTC = 0 @@ -48,7 +49,7 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { d.fakeConsumer = FakeConsumer{} d.fakeConsumer.valSet = map[FK]int{} - init := trace[0] + init := d.trace[0] d.mappings = append(d.mappings, init.Mapping) for lk, fk := range init.Mapping { kg.SetForeignKey(lk, fk) @@ -60,12 +61,15 @@ func (d *Driver) runTrace(t *testing.T, trace []TraceState) { } kg.Prune(0) - for _, s := range trace[1:] { + for _, s := range d.trace[1:] { if d.lastTP < s.TP { d.mappings = append(d.mappings, s.Mapping) for lk, fk := range s.Mapping { kg.SetForeignKey(lk, fk) } + for _, u := range s.LocalUpdates { + d.valSets + } d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(s.TP, s.LocalUpdates)) d.lastTP = s.TP } @@ -162,7 +166,8 @@ func getTrace() []TraceState { }, } - for i := 0; i < 100; i++ { + i := 0 + for i < 100 { choice := rand.Intn(3) if choice == 0 { ret = append(ret, TraceState{ @@ -172,6 +177,7 @@ func getTrace() []TraceState { TC: ret[i].TC, TM: ret[i].TM, }) + i++ } if choice == 1 { curr := ret[i].TC @@ -191,6 +197,7 @@ func getTrace() []TraceState { TC: newTC, TM: ret[i].TM, }) + i++ } } if choice == 2 { @@ -208,6 +215,7 @@ func getTrace() []TraceState { TC: ret[i].TC, TM: newTM, }) + i++ } } } @@ -220,14 +228,16 @@ func TestPrototype(t *testing.T) { trace = getTrace() } d := Driver{} - d.runTrace(t, trace) + d.trace = trace + d.runTrace(t) } func TestKeyDelegation(t *testing.T) { traces := [][]TraceState{} for _, trace := range traces { d := Driver{} - d.runTrace(t, trace) + d.trace = trace + d.runTrace(t) } } From d7e527a6b6baf9b9b122e885ace028d99ec4d7c0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 11:28:40 +0100 Subject: [PATCH 023/127] Refactor test --- x/ccv/provider/keyguard/keyguard_test.go | 84 +++++++++++++----------- 1 file changed, 47 insertions(+), 37 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 65a8bf5002..2d00bff44d 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -3,6 +3,8 @@ package keyguard import ( "math/rand" "testing" + + "github.com/stretchr/testify/require" ) type TraceState struct { @@ -14,25 +16,31 @@ type TraceState struct { } type Driver struct { - trace []TraceState - lastTP int - lastTC int - lastTM int - valSets []map[LK]int - mappings []map[LK]FK - foreignUpdates [][]update - fakeConsumer FakeConsumer + trace []TraceState + lastTP int + lastTC int + lastTM int + // indexed by TP + mappings []map[LK]FK + foreignUpdates [][]update + providerValSets []ValSet + // corresponds to TC + consumerValSet ValSet +} + +type ValSet struct { + keyToPower map[int]int } -type FakeConsumer struct { - valSet map[FK]int +func MakeValSet() ValSet { + return ValSet{keyToPower: map[int]int{}} } -func (f *FakeConsumer) processUpdates(updates []update) { +func (vs *ValSet) processUpdates(updates []update) { for _, u := range updates { - delete(f.valSet, u.key) + delete(vs.keyToPower, u.key) if 0 < u.power { - f.valSet[u.key] = u.power + vs.keyToPower[u.key] = u.power } } } @@ -43,11 +51,10 @@ func (d *Driver) runTrace(t *testing.T) { d.lastTP = 0 d.lastTC = 0 d.lastTM = 0 - d.valSets = []map[LK]int{} d.mappings = []map[LK]FK{} d.foreignUpdates = [][]update{} - d.fakeConsumer = FakeConsumer{} - d.fakeConsumer.valSet = map[FK]int{} + d.providerValSets = []ValSet{} + d.consumerValSet = MakeValSet() init := d.trace[0] d.mappings = append(d.mappings, init.Mapping) @@ -55,27 +62,35 @@ func (d *Driver) runTrace(t *testing.T) { kg.SetForeignKey(lk, fk) } d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(init.TP, init.LocalUpdates)) - d.valSets = append(d.valSets, map[LK]int{}) - for _, u := range init.LocalUpdates { - d.valSets[0][u.key] = u.power - } - kg.Prune(0) + d.providerValSets = append(d.providerValSets, MakeValSet()) + d.providerValSets[init.TP].processUpdates(init.LocalUpdates) + kg.Prune(init.TM) - for _, s := range d.trace[1:] { + require.Len(t, d.mappings, 1) + require.Len(t, d.foreignUpdates, 1) + require.Len(t, d.providerValSets, 1) + + for i, s := range d.trace { + if i < 1 { + continue + } if d.lastTP < s.TP { d.mappings = append(d.mappings, s.Mapping) + d.providerValSets = append(d.providerValSets, MakeValSet()) + for lk, power := range d.providerValSets[i-1].keyToPower { + d.providerValSets[i].keyToPower[lk] = power + } + d.providerValSets[i].processUpdates(s.LocalUpdates) + d.lastTP = s.TP + for lk, fk := range s.Mapping { kg.SetForeignKey(lk, fk) } - for _, u := range s.LocalUpdates { - d.valSets - } d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(s.TP, s.LocalUpdates)) - d.lastTP = s.TP } if d.lastTC < s.TC { - for i := d.lastTC + 1; i <= s.TC; i++ { - d.fakeConsumer.processUpdates(d.foreignUpdates[i]) + for j := d.lastTC + 1; j <= s.TC; j++ { + d.consumerValSet.processUpdates(d.foreignUpdates[j]) } d.lastTC = s.TC } @@ -86,15 +101,14 @@ func (d *Driver) runTrace(t *testing.T) { d.lastTM = s.TM } d.checkProperties(t) - // TODO: check properties } } func (d *Driver) checkProperties(t *testing.T) { // Check that the valSet on the fake consumer is the valSet // on the provider at time TC via inverse mapping - foreignSet := d.fakeConsumer.valSet - localSet := d.valSets[d.lastTC] + foreignSet := d.consumerValSet.keyToPower + localSet := d.providerValSets[d.lastTC].keyToPower mapping := d.mappings[d.lastTC] inverseMapping := map[FK]LK{} for lk, fk := range mapping { @@ -106,15 +120,11 @@ func (d *Driver) checkProperties(t *testing.T) { } for lk, actual := range foreignSetAsLocal { expect := localSet[lk] - if expect != actual { - t.Fatalf("[A]") - } + require.Equal(t, expect, actual) } for lk, expect := range localSet { actual := foreignSetAsLocal[lk] - if expect != actual { - t.Fatalf("[B]") - } + require.Equal(t, expect, actual) } } From 7f3e81211008fad8fed6c7efe563b2abab7a65a9 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 11:36:09 +0100 Subject: [PATCH 024/127] cp --- x/ccv/provider/keyguard/keyguard_test.go | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 2d00bff44d..ebf942eed6 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -61,9 +61,12 @@ func (d *Driver) runTrace(t *testing.T) { for lk, fk := range init.Mapping { kg.SetForeignKey(lk, fk) } - d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(init.TP, init.LocalUpdates)) + // Set the initial provider set d.providerValSets = append(d.providerValSets, MakeValSet()) d.providerValSets[init.TP].processUpdates(init.LocalUpdates) + // Set the initial consumer set + d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(init.TP, init.LocalUpdates)) + d.consumerValSet.processUpdates(d.foreignUpdates[init.TC]) kg.Prune(init.TM) require.Len(t, d.mappings, 1) @@ -127,10 +130,12 @@ func (d *Driver) checkProperties(t *testing.T) { require.Equal(t, expect, actual) } + // TODO: check pruning and reverse queries } func getTrace() []TraceState { + TRACE_LEN := 2 NUM_VALS := 3 NUM_FKS := 9 @@ -177,7 +182,7 @@ func getTrace() []TraceState { } i := 0 - for i < 100 { + for i < TRACE_LEN { choice := rand.Intn(3) if choice == 0 { ret = append(ret, TraceState{ From 420cd07f531a0b51494e4e356dacfcd26505f3d8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 11:37:24 +0100 Subject: [PATCH 025/127] rn --- x/ccv/provider/keyguard/keyguard_test.go | 42 ++++++++++++------------ 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index ebf942eed6..708ea66440 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -21,11 +21,11 @@ type Driver struct { lastTC int lastTM int // indexed by TP - mappings []map[LK]FK - foreignUpdates [][]update - providerValSets []ValSet + mappings []map[LK]FK + foreignUpdates [][]update + localValSets []ValSet // corresponds to TC - consumerValSet ValSet + foreignValSet ValSet } type ValSet struct { @@ -53,25 +53,25 @@ func (d *Driver) runTrace(t *testing.T) { d.lastTM = 0 d.mappings = []map[LK]FK{} d.foreignUpdates = [][]update{} - d.providerValSets = []ValSet{} - d.consumerValSet = MakeValSet() + d.localValSets = []ValSet{} + d.foreignValSet = MakeValSet() init := d.trace[0] d.mappings = append(d.mappings, init.Mapping) for lk, fk := range init.Mapping { kg.SetForeignKey(lk, fk) } - // Set the initial provider set - d.providerValSets = append(d.providerValSets, MakeValSet()) - d.providerValSets[init.TP].processUpdates(init.LocalUpdates) - // Set the initial consumer set + // Set the initial local set + d.localValSets = append(d.localValSets, MakeValSet()) + d.localValSets[init.TP].processUpdates(init.LocalUpdates) + // Set the initial foreign set d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(init.TP, init.LocalUpdates)) - d.consumerValSet.processUpdates(d.foreignUpdates[init.TC]) + d.foreignValSet.processUpdates(d.foreignUpdates[init.TC]) kg.Prune(init.TM) require.Len(t, d.mappings, 1) require.Len(t, d.foreignUpdates, 1) - require.Len(t, d.providerValSets, 1) + require.Len(t, d.localValSets, 1) for i, s := range d.trace { if i < 1 { @@ -79,11 +79,11 @@ func (d *Driver) runTrace(t *testing.T) { } if d.lastTP < s.TP { d.mappings = append(d.mappings, s.Mapping) - d.providerValSets = append(d.providerValSets, MakeValSet()) - for lk, power := range d.providerValSets[i-1].keyToPower { - d.providerValSets[i].keyToPower[lk] = power + d.localValSets = append(d.localValSets, MakeValSet()) + for lk, power := range d.localValSets[i-1].keyToPower { + d.localValSets[i].keyToPower[lk] = power } - d.providerValSets[i].processUpdates(s.LocalUpdates) + d.localValSets[i].processUpdates(s.LocalUpdates) d.lastTP = s.TP for lk, fk := range s.Mapping { @@ -93,7 +93,7 @@ func (d *Driver) runTrace(t *testing.T) { } if d.lastTC < s.TC { for j := d.lastTC + 1; j <= s.TC; j++ { - d.consumerValSet.processUpdates(d.foreignUpdates[j]) + d.foreignValSet.processUpdates(d.foreignUpdates[j]) } d.lastTC = s.TC } @@ -108,10 +108,10 @@ func (d *Driver) runTrace(t *testing.T) { } func (d *Driver) checkProperties(t *testing.T) { - // Check that the valSet on the fake consumer is the valSet - // on the provider at time TC via inverse mapping - foreignSet := d.consumerValSet.keyToPower - localSet := d.providerValSets[d.lastTC].keyToPower + // Check that the foreign ValSet is equal to the local ValSet + // at time TC via inverse mapping + foreignSet := d.foreignValSet.keyToPower + localSet := d.localValSets[d.lastTC].keyToPower mapping := d.mappings[d.lastTC] inverseMapping := map[FK]LK{} for lk, fk := range mapping { From 549d9afd4c7482272580627cbaa65c284c2ea371 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 12:33:36 +0100 Subject: [PATCH 026/127] CP --- x/ccv/provider/keyguard/keyguard.go | 84 ++++++++++++------------ x/ccv/provider/keyguard/keyguard_test.go | 76 +++++++++++---------- 2 files changed, 84 insertions(+), 76 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index ed8ec9a414..8c0d5a3d39 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -19,97 +19,99 @@ type KeyGuard struct { // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this - localKeyToLastUpdate map[LK]update + localToLastUpdate map[LK]update // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this - localKeyToCurrentForeignKey map[LK]FK + localToForeign map[LK]FK // Prunable state - foreignKeyToLocalKey map[FK]LK + foreignToLocal map[FK]LK // Prunable state - foreignKeyToVscidWhenLastSent map[FK]VSCID + foreignToGreatestVSCID map[FK]VSCID // Ephemeral state: will be cleared after each call to ComputeUpdates - localKeysForWhichUpdateMustBeSent []LK + localsWhichMustUpdate []LK } func MakeKeyGuard() KeyGuard { return KeyGuard{ - localKeyToLastUpdate: map[LK]update{}, - localKeyToCurrentForeignKey: map[LK]FK{}, - foreignKeyToLocalKey: map[FK]LK{}, - foreignKeyToVscidWhenLastSent: map[FK]VSCID{}, - localKeysForWhichUpdateMustBeSent: []LK{}, + localToLastUpdate: map[LK]update{}, + localToForeign: map[LK]FK{}, + foreignToLocal: map[FK]LK{}, + foreignToGreatestVSCID: map[FK]VSCID{}, + localsWhichMustUpdate: []LK{}, } } -func (m *KeyGuard) SetForeignKey(lk LK, fk FK) { - if currFk, ok := m.localKeyToCurrentForeignKey[lk]; ok { +func (m *KeyGuard) SetLocalToForeign(lk LK, fk FK) { + if currFk, ok := m.localToForeign[lk]; ok { if currFk == fk { return } } - m.localKeyToCurrentForeignKey[lk] = fk - if u, ok := m.localKeyToLastUpdate[lk]; ok { + m.localToForeign[lk] = fk + // If an update was created for lk + if u, ok := m.localToLastUpdate[lk]; ok { + // If that update was not a deletion if 0 < u.power { - // If last update had positive power then the consumer is aware of the old key - // so a deletion update must be sent. - m.localKeysForWhichUpdateMustBeSent = append(m.localKeysForWhichUpdateMustBeSent, lk) + // We must create an update + m.localsWhichMustUpdate = append(m.localsWhichMustUpdate, lk) } } } -func (m *KeyGuard) GetLocalKey(fk FK) (LK, error) { - if lk, ok := m.foreignKeyToLocalKey[fk]; ok { +func (m *KeyGuard) GetLocal(fk FK) (LK, error) { + if lk, ok := m.foreignToLocal[fk]; ok { return lk, nil } else { - return -1, errors.New("nope") - } -} - -func (m *KeyGuard) Prune(mostRecentlyMaturedVscid VSCID) { - toRemove := []FK{} - for fk, vscid := range m.foreignKeyToVscidWhenLastSent { - if vscid <= mostRecentlyMaturedVscid { - toRemove = append(toRemove, fk) - } - } - for _, fk := range toRemove { - delete(m.foreignKeyToVscidWhenLastSent, fk) - delete(m.foreignKeyToLocalKey, fk) + return -1, errors.New("Nope") } } func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { + foreignUpdates = []update{} // Create any updates for validators whose power did not change - for _, lk := range m.localKeysForWhichUpdateMustBeSent { - currKey := m.localKeyToCurrentForeignKey[lk] - u := m.localKeyToLastUpdate[lk] + for _, lk := range m.localsWhichMustUpdate { + currKey := m.localToForeign[lk] + u := m.localToLastUpdate[lk] // Create an update which will delete the validator for the old key foreignUpdates = append(foreignUpdates, update{key: u.key, power: 0}) // Create an update which will add the validator for the new key foreignUpdates = append(foreignUpdates, update{key: currKey, power: u.power}) } - m.localKeysForWhichUpdateMustBeSent = []LK{} + m.localsWhichMustUpdate = []LK{} // Create any updates for validators whose powers did change for _, u := range localUpdates { // Check if the consumer has an old key - if lastU, ok := m.localKeyToLastUpdate[u.key]; ok { + if lastU, ok := m.localToLastUpdate[u.key]; ok { // Create an update which will delete the validator for the old key foreignUpdates = append(foreignUpdates, update{key: lastU.key, power: 0}) } - currKey := m.localKeyToCurrentForeignKey[u.key] + currKey := m.localToForeign[u.key] // Create an update which will add/update the validator for the current key foreignUpdates = append(foreignUpdates, update{key: currKey, power: u.power}) } // Update internal bookkeeping for _, u := range foreignUpdates { - m.foreignKeyToVscidWhenLastSent[u.key] = vscid - m.localKeyToLastUpdate[m.foreignKeyToLocalKey[u.key]] = u + m.foreignToGreatestVSCID[u.key] = vscid + m.localToLastUpdate[m.foreignToLocal[u.key]] = u } return foreignUpdates } + +func (m *KeyGuard) Prune(mostRecentlyMaturedVscid VSCID) { + toRemove := []FK{} + for fk, vscid := range m.foreignToGreatestVSCID { + if vscid <= mostRecentlyMaturedVscid { + toRemove = append(toRemove, fk) + } + } + for _, fk := range toRemove { + delete(m.foreignToGreatestVSCID, fk) + delete(m.foreignToLocal, fk) + } +} diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 708ea66440..855879d4eb 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -59,7 +59,7 @@ func (d *Driver) runTrace(t *testing.T) { init := d.trace[0] d.mappings = append(d.mappings, init.Mapping) for lk, fk := range init.Mapping { - kg.SetForeignKey(lk, fk) + kg.SetLocalToForeign(lk, fk) } // Set the initial local set d.localValSets = append(d.localValSets, MakeValSet()) @@ -87,7 +87,7 @@ func (d *Driver) runTrace(t *testing.T) { d.lastTP = s.TP for lk, fk := range s.Mapping { - kg.SetForeignKey(lk, fk) + kg.SetLocalToForeign(lk, fk) } d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(s.TP, s.LocalUpdates)) } @@ -133,9 +133,9 @@ func (d *Driver) checkProperties(t *testing.T) { // TODO: check pruning and reverse queries } -func getTrace() []TraceState { +func getTrace(t *testing.T) []TraceState { - TRACE_LEN := 2 + TRACE_LEN := 3 NUM_VALS := 3 NUM_FKS := 9 @@ -144,6 +144,9 @@ func getTrace() []TraceState { // Create a mapping of nums [0, NUM_VALS] mapped injectively to [0, NUM_FKS] ret := map[LK]FK{} good := func() bool { + if len(ret) != NUM_VALS { + return false + } seen := map[FK]bool{} for _, fk := range ret { if _, ok := seen[fk]; ok { @@ -181,70 +184,73 @@ func getTrace() []TraceState { }, } - i := 0 + i := 1 for i < TRACE_LEN { choice := rand.Intn(3) + last := ret[len(ret)-1] + good := false if choice == 0 { ret = append(ret, TraceState{ Mapping: mapping(), LocalUpdates: localUpdates(), - TP: ret[i].TP + 1, - TC: ret[i].TC, - TM: ret[i].TM, + TP: last.TP + 1, + TC: last.TC, + TM: last.TM, }) - i++ + good = true } if choice == 1 { - curr := ret[i].TC - limInclusive := ret[i].TP + curr := last.TC + limInclusive := last.TP if curr < limInclusive { // add in [1, limInclusive - curr] // rand in [0, limInclusive - curr - 1] // bound is [0, limInclusive - curr) newTC := rand.Intn(limInclusive-curr) + curr + 1 - if newTC <= curr || limInclusive < curr { - panic("bad choice 1") - } + require.True(t, curr < newTC && curr <= limInclusive) ret = append(ret, TraceState{ - Mapping: ret[i].Mapping, - LocalUpdates: ret[i].LocalUpdates, - TP: ret[i].TP, + Mapping: nil, + LocalUpdates: nil, + TP: last.TP, TC: newTC, - TM: ret[i].TM, + TM: last.TM, }) - i++ + good = true } } if choice == 2 { - curr := ret[i].TM - limInclusive := ret[i].TC + curr := last.TM + limInclusive := last.TC if curr < limInclusive { newTM := rand.Intn(limInclusive-curr) + curr + 1 - if newTM <= curr || limInclusive < curr { - panic("bad choice 2") - } + require.True(t, curr < newTM && curr <= limInclusive) ret = append(ret, TraceState{ - Mapping: ret[i].Mapping, - LocalUpdates: ret[i].LocalUpdates, - TP: ret[i].TP, - TC: ret[i].TC, + Mapping: nil, + LocalUpdates: nil, + TP: last.TP, + TC: last.TC, TM: newTM, }) - i++ + good = true } } + if good { + i++ + } } return ret } func TestPrototype(t *testing.T) { - trace := []TraceState{} - for len(trace) < 2 { - trace = getTrace() + for i := 0; i < 1000; i++ { + trace := []TraceState{} + for len(trace) < 2 { + trace = getTrace(t) + } + d := Driver{} + d.trace = trace + d.runTrace(t) } - d := Driver{} - d.trace = trace - d.runTrace(t) } func TestKeyDelegation(t *testing.T) { From 411928c089cfc730eb492a5f0300b62c52e30d4a Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 12:39:46 +0100 Subject: [PATCH 027/127] cp keyguard --- x/ccv/provider/keyguard/keyguard.go | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index 8c0d5a3d39..2fcadea56a 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -43,11 +43,6 @@ func MakeKeyGuard() KeyGuard { } func (m *KeyGuard) SetLocalToForeign(lk LK, fk FK) { - if currFk, ok := m.localToForeign[lk]; ok { - if currFk == fk { - return - } - } m.localToForeign[lk] = fk // If an update was created for lk if u, ok := m.localToLastUpdate[lk]; ok { @@ -67,18 +62,19 @@ func (m *KeyGuard) GetLocal(fk FK) (LK, error) { } } -func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { +func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { - foreignUpdates = []update{} + foreignUpdates := map[FK]int{} - // Create any updates for validators whose power did not change + // Create updates for any locals whose foreign key changed + // NOTE: this includes the case of updating to the same foreign key for _, lk := range m.localsWhichMustUpdate { - currKey := m.localToForeign[lk] + fk := m.localToForeign[lk] u := m.localToLastUpdate[lk] // Create an update which will delete the validator for the old key - foreignUpdates = append(foreignUpdates, update{key: u.key, power: 0}) + foreignUpdates[u.key] = 0 // Create an update which will add the validator for the new key - foreignUpdates = append(foreignUpdates, update{key: currKey, power: u.power}) + foreignUpdates[fk] = u.power } m.localsWhichMustUpdate = []LK{} From 67c136cd588964460a3d87bc43317d6704baa42d Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 12:48:05 +0100 Subject: [PATCH 028/127] CP --- x/ccv/provider/keyguard/keyguard.go | 44 ++++++++++++++--------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index 2fcadea56a..b32ccc8fc7 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -19,7 +19,7 @@ type KeyGuard struct { // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this - localToLastUpdate map[LK]update + localToLastForeignUpdate map[LK]update // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this @@ -34,18 +34,18 @@ type KeyGuard struct { func MakeKeyGuard() KeyGuard { return KeyGuard{ - localToLastUpdate: map[LK]update{}, - localToForeign: map[LK]FK{}, - foreignToLocal: map[FK]LK{}, - foreignToGreatestVSCID: map[FK]VSCID{}, - localsWhichMustUpdate: []LK{}, + localToLastForeignUpdate: map[LK]update{}, + localToForeign: map[LK]FK{}, + foreignToLocal: map[FK]LK{}, + foreignToGreatestVSCID: map[FK]VSCID{}, + localsWhichMustUpdate: []LK{}, } } func (m *KeyGuard) SetLocalToForeign(lk LK, fk FK) { m.localToForeign[lk] = fk // If an update was created for lk - if u, ok := m.localToLastUpdate[lk]; ok { + if u, ok := m.localToLastForeignUpdate[lk]; ok { // If that update was not a deletion if 0 < u.power { // We must create an update @@ -70,33 +70,33 @@ func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { // NOTE: this includes the case of updating to the same foreign key for _, lk := range m.localsWhichMustUpdate { fk := m.localToForeign[lk] - u := m.localToLastUpdate[lk] + lastForeignUpdate := m.localToLastForeignUpdate[lk] // Create an update which will delete the validator for the old key - foreignUpdates[u.key] = 0 + foreignUpdates[lastForeignUpdate.key] = 0 // Create an update which will add the validator for the new key - foreignUpdates[fk] = u.power + foreignUpdates[fk] = lastForeignUpdate.power } m.localsWhichMustUpdate = []LK{} - // Create any updates for validators whose powers did change - for _, u := range localUpdates { - // Check if the consumer has an old key - if lastU, ok := m.localToLastUpdate[u.key]; ok { - // Create an update which will delete the validator for the old key - foreignUpdates = append(foreignUpdates, update{key: lastU.key, power: 0}) + for _, localUpdate := range localUpdates { + if lastForeignUpdate, ok := m.localToLastForeignUpdate[localUpdate.key]; ok { + // If an update for the local key existed, send a deletion + foreignUpdates[lastForeignUpdate.key] = 0 } - currKey := m.localToForeign[u.key] + fk := m.localToForeign[localUpdate.key] // Create an update which will add/update the validator for the current key - foreignUpdates = append(foreignUpdates, update{key: currKey, power: u.power}) + foreignUpdates[fk] = localUpdate.power } + ret := []update{} // Update internal bookkeeping - for _, u := range foreignUpdates { - m.foreignToGreatestVSCID[u.key] = vscid - m.localToLastUpdate[m.foreignToLocal[u.key]] = u + for fk, power := range foreignUpdates { + m.foreignToGreatestVSCID[fk] = vscid + m.localToLastForeignUpdate[m.foreignToLocal[fk]] = update{key: fk, power: power} + ret = append(ret, update{key: fk, power: power}) } - return foreignUpdates + return ret } func (m *KeyGuard) Prune(mostRecentlyMaturedVscid VSCID) { From cf31fbcd5a73828ab33c67f9b28f1eebed46724e Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 13:06:29 +0100 Subject: [PATCH 029/127] CP --- x/ccv/provider/keyguard/keyguard.go | 47 +++++++++++++++-------------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index b32ccc8fc7..d4bdf0911f 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -19,38 +19,34 @@ type KeyGuard struct { // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this - localToLastForeignUpdate map[LK]update + localToLastPositiveForeignUpdate map[LK]update // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this localToForeign map[LK]FK + // Ephemeral state: will be cleared after each call to ComputeUpdates + localsWhichMustUpdate []LK // Prunable state foreignToLocal map[FK]LK // Prunable state foreignToGreatestVSCID map[FK]VSCID - // Ephemeral state: will be cleared after each call to ComputeUpdates - localsWhichMustUpdate []LK } func MakeKeyGuard() KeyGuard { return KeyGuard{ - localToLastForeignUpdate: map[LK]update{}, - localToForeign: map[LK]FK{}, - foreignToLocal: map[FK]LK{}, - foreignToGreatestVSCID: map[FK]VSCID{}, - localsWhichMustUpdate: []LK{}, + localToLastPositiveForeignUpdate: map[LK]update{}, + localToForeign: map[LK]FK{}, + foreignToLocal: map[FK]LK{}, + foreignToGreatestVSCID: map[FK]VSCID{}, + localsWhichMustUpdate: []LK{}, } } func (m *KeyGuard) SetLocalToForeign(lk LK, fk FK) { m.localToForeign[lk] = fk - // If an update was created for lk - if u, ok := m.localToLastForeignUpdate[lk]; ok { - // If that update was not a deletion - if 0 < u.power { - // We must create an update - m.localsWhichMustUpdate = append(m.localsWhichMustUpdate, lk) - } + if _, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + // We must create an update + m.localsWhichMustUpdate = append(m.localsWhichMustUpdate, lk) } } @@ -70,29 +66,36 @@ func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { // NOTE: this includes the case of updating to the same foreign key for _, lk := range m.localsWhichMustUpdate { fk := m.localToForeign[lk] - lastForeignUpdate := m.localToLastForeignUpdate[lk] + fu := m.localToLastPositiveForeignUpdate[lk] // Create an update which will delete the validator for the old key - foreignUpdates[lastForeignUpdate.key] = 0 + foreignUpdates[fu.key] = 0 + delete(m.localToLastPositiveForeignUpdate, lk) // Create an update which will add the validator for the new key - foreignUpdates[fk] = lastForeignUpdate.power + foreignUpdates[fk] = fu.power + m.localToLastPositiveForeignUpdate[lk] = update{key: fk, power: fu.power} } + m.localsWhichMustUpdate = []LK{} for _, localUpdate := range localUpdates { - if lastForeignUpdate, ok := m.localToLastForeignUpdate[localUpdate.key]; ok { + if fu, ok := m.localToLastPositiveForeignUpdate[localUpdate.key]; ok { // If an update for the local key existed, send a deletion - foreignUpdates[lastForeignUpdate.key] = 0 + foreignUpdates[fu.key] = 0 } fk := m.localToForeign[localUpdate.key] - // Create an update which will add/update the validator for the current key + // Create an update which will add or update the validator for the current key foreignUpdates[fk] = localUpdate.power + if 0 < localUpdate.power { + m.localToLastPositiveForeignUpdate[localUpdate.key] = update{key: fk, power: localUpdate.power} + } else { + delete(m.localToLastPositiveForeignUpdate, localUpdate.key) + } } ret := []update{} // Update internal bookkeeping for fk, power := range foreignUpdates { m.foreignToGreatestVSCID[fk] = vscid - m.localToLastForeignUpdate[m.foreignToLocal[fk]] = update{key: fk, power: power} ret = append(ret, update{key: fk, power: power}) } From 68b439c2ef121258bca4c0fb687d985fe4e6f5ea Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 15:29:19 +0100 Subject: [PATCH 030/127] cp --- x/ccv/provider/keyguard/keyguard.go | 85 ++++++++++++++---------- x/ccv/provider/keyguard/keyguard_test.go | 2 +- 2 files changed, 51 insertions(+), 36 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index d4bdf0911f..99deff2b48 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -24,8 +24,6 @@ type KeyGuard struct { // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this localToForeign map[LK]FK - // Ephemeral state: will be cleared after each call to ComputeUpdates - localsWhichMustUpdate []LK // Prunable state foreignToLocal map[FK]LK // Prunable state @@ -38,16 +36,11 @@ func MakeKeyGuard() KeyGuard { localToForeign: map[LK]FK{}, foreignToLocal: map[FK]LK{}, foreignToGreatestVSCID: map[FK]VSCID{}, - localsWhichMustUpdate: []LK{}, } } func (m *KeyGuard) SetLocalToForeign(lk LK, fk FK) { m.localToForeign[lk] = fk - if _, ok := m.localToLastPositiveForeignUpdate[lk]; ok { - // We must create an update - m.localsWhichMustUpdate = append(m.localsWhichMustUpdate, lk) - } } func (m *KeyGuard) GetLocal(fk FK) (LK, error) { @@ -58,44 +51,66 @@ func (m *KeyGuard) GetLocal(fk FK) (LK, error) { } } -func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { +func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { - foreignUpdates := map[FK]int{} + lks := []LK{} - // Create updates for any locals whose foreign key changed - // NOTE: this includes the case of updating to the same foreign key - for _, lk := range m.localsWhichMustUpdate { - fk := m.localToForeign[lk] - fu := m.localToLastPositiveForeignUpdate[lk] - // Create an update which will delete the validator for the old key - foreignUpdates[fu.key] = 0 - delete(m.localToLastPositiveForeignUpdate, lk) - // Create an update which will add the validator for the new key - foreignUpdates[fk] = fu.power - m.localToLastPositiveForeignUpdate[lk] = update{key: fk, power: fu.power} + // Key changes + for lk, newFk := range m.localToForeign { + if u, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + oldFk := u.key + if oldFk != newFk { + lks = append(lks, lk) + } + } + } + // Power changes + for lk := range localUpdates { + lks = append(lks, lk) } - m.localsWhichMustUpdate = []LK{} + foreignUpdates := map[FK]int{} - for _, localUpdate := range localUpdates { - if fu, ok := m.localToLastPositiveForeignUpdate[localUpdate.key]; ok { - // If an update for the local key existed, send a deletion - foreignUpdates[fu.key] = 0 + // Iterate each lk for which the fk changed, or there is a power update + for _, lk := range lks { + power := 0 + if last, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + foreignUpdates[last.key] = 0 + power = last.power + delete(m.localToLastPositiveForeignUpdate, lk) + } + if newPower, ok := localUpdates[lk]; ok { + if 0 < newPower { + power = newPower + } } - fk := m.localToForeign[localUpdate.key] - // Create an update which will add or update the validator for the current key - foreignUpdates[fk] = localUpdate.power - if 0 < localUpdate.power { - m.localToLastPositiveForeignUpdate[localUpdate.key] = update{key: fk, power: localUpdate.power} - } else { - delete(m.localToLastPositiveForeignUpdate, localUpdate.key) + if 0 < power { + fk := m.localToForeign[lk] + foreignUpdates[fk] = power + m.localToLastPositiveForeignUpdate[lk] = update{key: fk, power: power} } } - ret := []update{} - // Update internal bookkeeping - for fk, power := range foreignUpdates { + for fk := range foreignUpdates { m.foreignToGreatestVSCID[fk] = vscid + } + + return foreignUpdates +} + +func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { + + local := map[LK]int{} + + for _, u := range localUpdates { + local[u.key] = u.power + } + + foreign := m.inner(vscid, local) + + ret := []update{} + + for fk, power := range foreign { ret = append(ret, update{key: fk, power: power}) } diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 855879d4eb..84c428c499 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -158,7 +158,7 @@ func getTrace(t *testing.T) []TraceState { } for !good() { for lk := 0; lk < NUM_VALS; lk++ { - ret[lk] = rand.Intn(NUM_FKS) + 100 + ret[lk] = -rand.Intn(NUM_FKS) } } return ret From 17ee7f8cbec29db42ef8164fa13c164e21f5a298 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 15:29:24 +0100 Subject: [PATCH 031/127] gi --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index e5f8e1cb87..d100931091 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,4 @@ docs/tla/states/ *.out vendor/ .vscode +x/ccv/provider/keyguard/__debug_bin From 82c127bddd05e5237342437b10fa596e55ad3efc Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 16:35:48 +0100 Subject: [PATCH 032/127] cp --- x/ccv/provider/keyguard/keyguard.go | 9 +++++--- x/ccv/provider/keyguard/keyguard_test.go | 28 ++++++++++++++---------- 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index 99deff2b48..1fbaad9333 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -74,16 +74,19 @@ func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Iterate each lk for which the fk changed, or there is a power update for _, lk := range lks { power := 0 + // If a positive update was sent, undo it. + // Store power for possible redo. if last, ok := m.localToLastPositiveForeignUpdate[lk]; ok { foreignUpdates[last.key] = 0 power = last.power delete(m.localToLastPositiveForeignUpdate, lk) } + // If there is a power update if newPower, ok := localUpdates[lk]; ok { - if 0 < newPower { - power = newPower - } + power = newPower } + // If power is 0, already deleted a few lines above + // If power is positive, we are updating or redoing if 0 < power { fk := m.localToForeign[lk] foreignUpdates[fk] = power diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 84c428c499..f65cf0ef49 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -1,6 +1,7 @@ package keyguard import ( + "fmt" "math/rand" "testing" @@ -16,6 +17,7 @@ type TraceState struct { } type Driver struct { + t *testing.T trace []TraceState lastTP int lastTC int @@ -45,7 +47,7 @@ func (vs *ValSet) processUpdates(updates []update) { } } -func (d *Driver) runTrace(t *testing.T) { +func (d *Driver) runTrace() { kg := MakeKeyGuard() d.lastTP = 0 @@ -69,9 +71,9 @@ func (d *Driver) runTrace(t *testing.T) { d.foreignValSet.processUpdates(d.foreignUpdates[init.TC]) kg.Prune(init.TM) - require.Len(t, d.mappings, 1) - require.Len(t, d.foreignUpdates, 1) - require.Len(t, d.localValSets, 1) + require.Len(d.t, d.mappings, 1) + require.Len(d.t, d.foreignUpdates, 1) + require.Len(d.t, d.localValSets, 1) for i, s := range d.trace { if i < 1 { @@ -85,7 +87,6 @@ func (d *Driver) runTrace(t *testing.T) { } d.localValSets[i].processUpdates(s.LocalUpdates) d.lastTP = s.TP - for lk, fk := range s.Mapping { kg.SetLocalToForeign(lk, fk) } @@ -103,11 +104,11 @@ func (d *Driver) runTrace(t *testing.T) { kg.Prune(s.TM) d.lastTM = s.TM } - d.checkProperties(t) + d.checkProperties() } } -func (d *Driver) checkProperties(t *testing.T) { +func (d *Driver) checkProperties() { // Check that the foreign ValSet is equal to the local ValSet // at time TC via inverse mapping foreignSet := d.foreignValSet.keyToPower @@ -123,11 +124,11 @@ func (d *Driver) checkProperties(t *testing.T) { } for lk, actual := range foreignSetAsLocal { expect := localSet[lk] - require.Equal(t, expect, actual) + require.Equal(d.t, expect, actual) } for lk, expect := range localSet { actual := foreignSetAsLocal[lk] - require.Equal(t, expect, actual) + require.Equal(d.t, expect, actual) } // TODO: check pruning and reverse queries @@ -242,14 +243,18 @@ func getTrace(t *testing.T) []TraceState { } func TestPrototype(t *testing.T) { + + rand.Seed(40) for i := 0; i < 1000; i++ { + fmt.Println("i: ", i) trace := []TraceState{} for len(trace) < 2 { trace = getTrace(t) } d := Driver{} d.trace = trace - d.runTrace(t) + d.t = t + d.runTrace() } } @@ -258,7 +263,8 @@ func TestKeyDelegation(t *testing.T) { for _, trace := range traces { d := Driver{} d.trace = trace - d.runTrace(t) + d.t = t + d.runTrace() } } From 6302dda4e477a102f2b406ee8bf7595c4fdc95b4 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 16:55:46 +0100 Subject: [PATCH 033/127] cp --- x/ccv/provider/keyguard/keyguard.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index 1fbaad9333..d2fe5c7a17 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -71,6 +71,13 @@ func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { foreignUpdates := map[FK]int{} + // need to go over every local key with a key change or a power update + // add a deletion for all of these, that have a last positive update + // need to go over every local key with a key change or a power update + // if new power update is 0, do nothing + // if new power update is positive, use it + // else: use old power update, which must be positve + // Iterate each lk for which the fk changed, or there is a power update for _, lk := range lks { power := 0 From 252bbb4a9b3595e2158fdda6dcf647c25f19d848 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 17:03:35 +0100 Subject: [PATCH 034/127] Pass basic test --- x/ccv/provider/keyguard/keyguard.go | 41 +++++++++++++++--------- x/ccv/provider/keyguard/keyguard_test.go | 2 -- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keyguard/keyguard.go index d2fe5c7a17..b400a8007b 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keyguard/keyguard.go @@ -71,36 +71,45 @@ func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { foreignUpdates := map[FK]int{} - // need to go over every local key with a key change or a power update - // add a deletion for all of these, that have a last positive update - // need to go over every local key with a key change or a power update - // if new power update is 0, do nothing - // if new power update is positive, use it - // else: use old power update, which must be positve - - // Iterate each lk for which the fk changed, or there is a power update + // Make a temporary copy + localToLastPositiveForeignUpdate := map[LK]update{} + for lk, u := range m.localToLastPositiveForeignUpdate { + localToLastPositiveForeignUpdate[lk] = u + } + + // Iterate all local keys for which either the foreign key changed or there + // has been a power update. for _, lk := range lks { - power := 0 - // If a positive update was sent, undo it. - // Store power for possible redo. if last, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + // If the key has previously been shipped in an update + // delete it. foreignUpdates[last.key] = 0 + delete(localToLastPositiveForeignUpdate, lk) + } + } + + // Iterate all local keys for which either the foreign key changed or there + // has been a power update. + for _, lk := range lks { + power := 0 + if last, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + // If there was a positive power before, use it. power = last.power - delete(m.localToLastPositiveForeignUpdate, lk) } - // If there is a power update + // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { power = newPower } - // If power is 0, already deleted a few lines above - // If power is positive, we are updating or redoing + // Only ship positive powers. if 0 < power { fk := m.localToForeign[lk] foreignUpdates[fk] = power - m.localToLastPositiveForeignUpdate[lk] = update{key: fk, power: power} + localToLastPositiveForeignUpdate[lk] = update{key: fk, power: power} } } + m.localToLastPositiveForeignUpdate = localToLastPositiveForeignUpdate + for fk := range foreignUpdates { m.foreignToGreatestVSCID[fk] = vscid } diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index f65cf0ef49..8f88a0a61b 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -1,7 +1,6 @@ package keyguard import ( - "fmt" "math/rand" "testing" @@ -246,7 +245,6 @@ func TestPrototype(t *testing.T) { rand.Seed(40) for i := 0; i < 1000; i++ { - fmt.Println("i: ", i) trace := []TraceState{} for len(trace) < 2 { trace = getTrace(t) From 01c416c07511c938c8f1c510d8c95f5d1b12c8d9 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 17:10:49 +0100 Subject: [PATCH 035/127] checkpoint, passing a bunch --- x/ccv/provider/keyguard/.gitignore | 6 + x/ccv/provider/keyguard/keyguard_test.go | 33 +- .../keyguard/{prototyping/tla => }/main.cfg | 0 .../keyguard/{prototyping/tla => }/main.tla | 0 .../keyguard/prototyping/prototype.py | 85 - .../keyguard/prototyping/tla/.gitignore | 4 - .../prototyping/tla/library/Apalache.tla | 155 - .../keyguard/prototyping/tla/library/Bags.tla | 591 ---- .../tla/library/FiniteSetTheorems.tla | 385 --- .../tla/library/FiniteSetTheorems_proofs.tla | 848 ----- .../prototyping/tla/library/FiniteSets.tla | 23 - .../tla/library/FunctionTheorems.tla | 575 ---- .../tla/library/FunctionTheorems_proofs.tla | 947 ------ .../prototyping/tla/library/Functions.tla | 63 - .../prototyping/tla/library/JectionThm.tla | 1130 ------- .../prototyping/tla/library/Jections.tla | 48 - .../tla/library/NaturalsInduction.tla | 210 -- .../tla/library/NaturalsInduction_proofs.tla | 454 --- .../prototyping/tla/library/RealTime.tla | 22 - .../tla/library/SequenceTheorems.tla | 636 ---- .../tla/library/SequenceTheorems_proofs.tla | 1446 --------- .../prototyping/tla/library/TLAPS.tla | 411 --- .../tla/library/WellFoundedInduction.tla | 328 -- .../library/WellFoundedInduction_proofs.tla | 738 ----- .../prototyping/tla/library/ref/CCV.tla | 550 ---- .../prototyping/tla/library/ref/MC_CCV.tla | 62 - .../prototyping/tla/library/ref/typedefs.tla | 12 - .../prototyping/tla/library/tlcFolds.tla | 27 - .../keyguard/prototyping/ts/.eslintignore | 1 - .../keyguard/prototyping/ts/.eslintrc.json | 27 - .../keyguard/prototyping/ts/.gitignore | 31 - .../keyguard/prototyping/ts/.prettierrc | 17 - .../prototyping/ts/__tests__/gen.test.ts | 14 - .../prototyping/ts/__tests__/tsconfig.json | 30 - .../keyguard/prototyping/ts/jest.config.js | 24 - .../keyguard/prototyping/ts/package.json | 50 - .../keyguard/prototyping/ts/src/main.ts | 23 - .../keyguard/prototyping/ts/tsconfig.json | 28 - .../prototyping/ts/tsconfig.release.json | 8 - .../keyguard/prototyping/ts/yarn.lock | 2862 ----------------- 40 files changed, 12 insertions(+), 12892 deletions(-) create mode 100644 x/ccv/provider/keyguard/.gitignore rename x/ccv/provider/keyguard/{prototyping/tla => }/main.cfg (100%) rename x/ccv/provider/keyguard/{prototyping/tla => }/main.tla (100%) delete mode 100644 x/ccv/provider/keyguard/prototyping/prototype.py delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/.gitignore delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/Apalache.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/Bags.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems_proofs.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/FiniteSets.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems_proofs.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/Functions.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/JectionThm.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/Jections.tla delete mode 100755 x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction_proofs.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/RealTime.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems_proofs.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/TLAPS.tla delete mode 100755 x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction_proofs.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/ref/CCV.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/ref/MC_CCV.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/ref/typedefs.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/tla/library/tlcFolds.tla delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/.eslintignore delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/.gitignore delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/.prettierrc delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/jest.config.js delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/package.json delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/src/main.ts delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/tsconfig.json delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json delete mode 100644 x/ccv/provider/keyguard/prototyping/ts/yarn.lock diff --git a/x/ccv/provider/keyguard/.gitignore b/x/ccv/provider/keyguard/.gitignore new file mode 100644 index 0000000000..2afa065528 --- /dev/null +++ b/x/ccv/provider/keyguard/.gitignore @@ -0,0 +1,6 @@ +*.json +*.tla +*.cfg +!main.tla +!main.cfg +*apalache* \ No newline at end of file diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keyguard/keyguard_test.go index 8f88a0a61b..24b416fca4 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keyguard/keyguard_test.go @@ -74,17 +74,14 @@ func (d *Driver) runTrace() { require.Len(d.t, d.foreignUpdates, 1) require.Len(d.t, d.localValSets, 1) - for i, s := range d.trace { - if i < 1 { - continue - } + for _, s := range d.trace[1:] { if d.lastTP < s.TP { d.mappings = append(d.mappings, s.Mapping) d.localValSets = append(d.localValSets, MakeValSet()) - for lk, power := range d.localValSets[i-1].keyToPower { - d.localValSets[i].keyToPower[lk] = power + for lk, power := range d.localValSets[d.lastTP].keyToPower { + d.localValSets[s.TP].keyToPower[lk] = power } - d.localValSets[i].processUpdates(s.LocalUpdates) + d.localValSets[s.TP].processUpdates(s.LocalUpdates) d.lastTP = s.TP for lk, fk := range s.Mapping { kg.SetLocalToForeign(lk, fk) @@ -130,12 +127,12 @@ func (d *Driver) checkProperties() { require.Equal(d.t, expect, actual) } - // TODO: check pruning and reverse queries + // TODO: check pruning is correct (reverse lookup) } func getTrace(t *testing.T) []TraceState { - TRACE_LEN := 3 + TRACE_LEN := 1000 NUM_VALS := 3 NUM_FKS := 9 @@ -265,21 +262,3 @@ func TestKeyDelegation(t *testing.T) { d.runTrace() } } - -// TODO: - -// These are the critical properties -// 1. All validator sets on consumer are a validator set for provider for an earlier -// time, mapped through the effective mapping at that time. -// 2. It is always possible to fetch a local key, given a foreign key, if the foreign -// key is still known to the consumer - -// My thinking now is that I can test by doing the following -// If the trace TP increases than there is a new mapping and local updates -// the local updates aggregate to create a local validator set -// record that validator set, and the relevant mapping to time T=TP -// If TC increases to time T, can check the ACTUAL validator set in C -// It should be be possible to query kg for every validator foreign key -// in any intermediate val set in [TM+1, TP] -// It should not be possible to query kg for any validator that does not appear -// in any intermediate vla set in [0, TM] diff --git a/x/ccv/provider/keyguard/prototyping/tla/main.cfg b/x/ccv/provider/keyguard/main.cfg similarity index 100% rename from x/ccv/provider/keyguard/prototyping/tla/main.cfg rename to x/ccv/provider/keyguard/main.cfg diff --git a/x/ccv/provider/keyguard/prototyping/tla/main.tla b/x/ccv/provider/keyguard/main.tla similarity index 100% rename from x/ccv/provider/keyguard/prototyping/tla/main.tla rename to x/ccv/provider/keyguard/main.tla diff --git a/x/ccv/provider/keyguard/prototyping/prototype.py b/x/ccv/provider/keyguard/prototyping/prototype.py deleted file mode 100644 index 2cef2fe2ed..0000000000 --- a/x/ccv/provider/keyguard/prototyping/prototype.py +++ /dev/null @@ -1,85 +0,0 @@ - -class KeyDelegation: - def __init__(self): - self.localKeyToLastUpdate = {} - self.localKeyToCurrentForeignKey - self.foreignKeyToLocalKey = {} - self.foreignKeyToVSCIDWhenLastSent = {} - self.localKeysForWhichUpdateMustBeSent = set() - - def SetKey(self, v, k): - self.currentKey[v] = k - if v in self.localKeyToLastUpdate: - [_, lastPower] = self.localKeyToLastUpdate[v] - if 0 < lastPower: - # If validator is known to the consumer - self.localKeysForWhichUpdateMustBeSent.add(v) - - def ComputeUpdates(self, vscid, localUpdates): - updates = {} - # Ship updates for any - for v in self.localKeysForWhichUpdateMustBeSent: - currKey = self.localKeyToCurrentForeignKey[v] - [lastKey, lastPower] = self.localKeyToLastUpdate[v] - updates[lastKey] = 0 - updates[currKey] = lastPower - self.localKeysForWhichUpdateMustBeSent = set() - for v, power in localUpdates.items(): # Will happen if power changed since last block - if v in self.localKeyToLastUpdate: - [lastKey, _] = self.localKeyToLastUpdate[v] - updates[lastKey] = 0 - currKey = self.localKeyToCurrentForeignKey[v] - updates[currKey] = power - - for foreignKey, power in updates.items(): - self.foreignKeyToVSCIDWhenLastSent[foreignKey] = vscid - self.localKeyToLastUpdate[self.foreignKeyToLocalKey[foreignKey]] = [ - foreignKey, power] - return updates - - def Prune(self, mostRecentlyMaturedVscid): - removed = [foreignKey for foreignKey, - vscid in self.foreignKeyToVSCIDWhenLastSent if vscid <= mostRecentlyMaturedVscid] - for foreignKey in removed: - del self.foreignKeyToVSCIDWhenLastSent[foreignKey] - del self.foreignKeyToLocalKey[foreignKey] - - -consumers = ["c0", "c1"] -vals = ["v0", "v1"] - - -class Provider: - def __init__(self): - self.keyDelegations = {c: KeyDelegation() for c in consumers} - pass - - def SendUpdates(self): - for c in consumers: - updates = {"v0": 42, "v1": 0} - updates = self.keyDelegations[c].computeUpdates(updates) - # ship the updates - - def SetKey(self, c, v, k): - self.keyDelegations[c].SetKey(v, k) - - def Slash(self, c, foreignKey, vscID): - localKey = self.keyDelegations[c].foreignKeyToLocalKey[foreignKey] - # slash - - def Mature(self, c, ascendingVscids): - latestVscid = ascendingVscids[-1] - self.keyDelegations[c].Prune(latestVscid) - - -def main(): - print("hello") - pass - - -if __name__ == "__main__": - # x = {c: KeyDelegation() for c in consumers} - # x["c0"].lastKeySent = {1: 2} - # print(x["c0"].lastKeySent) - # print(x["c1"].lastKeySent) - main() diff --git a/x/ccv/provider/keyguard/prototyping/tla/.gitignore b/x/ccv/provider/keyguard/prototyping/tla/.gitignore deleted file mode 100644 index e37c5b8761..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -_apalache-out -states/ -*TTrace* -*.json \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/Apalache.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Apalache.tla deleted file mode 100644 index b8bb5cb1c5..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/Apalache.tla +++ /dev/null @@ -1,155 +0,0 @@ ---------------------------- MODULE Apalache ----------------------------------- -(* - * This is a standard module for use with the Apalache model checker. - * The meaning of the operators is explained in the comments. - * Many of the operators serve as additional annotations of their arguments. - * As we like to preserve compatibility with TLC and TLAPS, we define the - * operator bodies by erasure. The actual interpretation of the operators is - * encoded inside Apalache. For the moment, these operators are mirrored in - * the class at.forsyte.apalache.tla.lir.oper.ApalacheOper. - * - * Igor Konnov, Jure Kukovec, Informal Systems 2020-2022 - *) - -(** - * An assignment of an expression e to a state variable x. Typically, one - * uses the non-primed version of x in the initializing predicate Init and - * the primed version of x (that is, x') in the transition predicate Next. - * Although TLA+ does not have a concept of a variable assignment, we find - * this concept extremely useful for symbolic model checking. In pure TLA+, - * one would simply write x = e, or x \in {e}. - * - * Apalache automatically converts some expressions of the form - * x = e or x \in {e} into assignments. However, if you like to annotate - * assignments by hand, you can use this operator. - * - * For a further discussion on that matter, see: - * https://github.com/informalsystems/apalache/blob/main/docs/src/idiomatic/001assignments.md - *) -__x := __e == __x = __e - -(** - * A generator of a data structure. Given a positive integer `bound`, and - * assuming that the type of the operator application is known, we - * recursively generate a TLA+ data structure as a tree, whose width is - * bound by the number `bound`. - * - * The body of this operator is redefined by Apalache. - *) -Gen(__size) == {} - -(** - * Non-deterministically pick a value out of the set `S`, if `S` is non-empty. - * If `S` is empty, return some value of the proper type. This can be - * understood as a non-deterministic version of CHOOSE x \in S: TRUE. - * - * @type: Set(a) => a; - *) -Guess(__S) == - \* Since this is not supported by TLC, - \* we fall back to the deterministic version for TLC. - \* Apalache redefines the operator `Guess` as explained above. - CHOOSE __x \in __S: TRUE - -(** - * Convert a set of pairs S to a function F. Note that if S contains at least - * two pairs <> and <> such that x = u and y /= v, - * then F is not uniquely defined. We use CHOOSE to resolve this ambiguity. - * Apalache implements a more efficient encoding of this operator - * than the default one. - * - * @type: Set(<>) => (a -> b); - *) -SetAsFun(__S) == - LET __Dom == { __x: <<__x, __y>> \in __S } - __Rng == { __y: <<__x, __y>> \in __S } - IN - [ __x \in __Dom |-> CHOOSE __y \in __Rng: <<__x, __y>> \in __S ] - -(** - * A sequence constructor that avoids using a function constructor. - * Since Apalache is typed, this operator is more efficient than - * FunAsSeq([ i \in 1..N |-> F(i) ]). Apalache requires N to be - * a constant expression. - * - * @type: (Int, (Int -> a)) => Seq(a); - *) -LOCAL INSTANCE Integers -MkSeq(__N, __F(_)) == - \* This is the TLC implementation. Apalache does it differently. - [ __i \in (1..__N) |-> __F(__i) ] - -\* required by our default definition of FoldSeq and FunAsSeq -LOCAL INSTANCE Sequences - -(** - * As TLA+ is untyped, one can use function- and sequence-specific operators - * interchangeably. However, to maintain correctness w.r.t. our type-system, - * an explicit cast is needed when using functions as sequences. - * FunAsSeq reinterprets a function over integers as a sequence. - * - * The parameters have the following meaning: - * - * - fn is the function from 1..len that should be interpreted as a sequence. - * - len is the length of the sequence, len = Cardinality(DOMAIN fn), - * len may be a variable, a computable expression, etc. - * - capacity is a static upper bound on the length, that is, len <= capacity. - * - * @type: ((Int -> a), Int, Int) => Seq(a); - *) -FunAsSeq(__fn, __len, __capacity) == - LET __FunAsSeq_elem_ctor(__i) == __fn[__i] IN - SubSeq(MkSeq(__capacity, __FunAsSeq_elem_ctor), 1, __len) - -(** - * Annotating an expression \E x \in S: P as Skolemizable. That is, it can - * be replaced with an expression c \in S /\ P(c) for a fresh constant c. - * Not every exisential can be replaced with a constant, this should be done - * with care. Apalache detects Skolemizable expressions by static analysis. - *) -Skolem(__e) == __e - -(** - * A hint to the model checker to expand a set S, instead of dealing - * with it symbolically. Apalache finds out which sets have to be expanded - * by static analysis. - *) -Expand(__S) == __S - -(** - * A hint to the model checker to replace its argument Cardinality(S) >= k - * with a series of existential quantifiers for a constant k. - * Similar to Skolem, this has to be done carefully. Apalache automatically - * places this hint by static analysis. - *) -ConstCardinality(__cardExpr) == __cardExpr - -(** - * The folding operator, used to implement computation over a set. - * Apalache implements a more efficient encoding than the one below. - * (from the community modules). - * - * @type: ((a, b) => a, a, Set(b)) => a; - *) -RECURSIVE ApaFoldSet(_, _, _) -ApaFoldSet(__Op(_,_), __v, __S) == - IF __S = {} - THEN __v - ELSE LET __w == CHOOSE __x \in __S: TRUE IN - LET __T == __S \ {__w} IN - ApaFoldSet(__Op, __Op(__v,__w), __T) - -(** - * The folding operator, used to implement computation over a sequence. - * Apalache implements a more efficient encoding than the one below. - * (from the community modules). - * - * @type: ((a, b) => a, a, Seq(b)) => a; - *) -RECURSIVE ApaFoldSeqLeft(_, _, _) -ApaFoldSeqLeft(__Op(_,_), __v, __seq) == - IF __seq = <<>> - THEN __v - ELSE ApaFoldSeqLeft(__Op, __Op(__v, Head(__seq)), Tail(__seq)) - -=============================================================================== diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/Bags.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Bags.tla deleted file mode 100644 index c66323edf1..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/Bags.tla +++ /dev/null @@ -1,591 +0,0 @@ ------------------------------ MODULE Bags -------------------------------- -(**************************************************************************) -(* A bag, also called a multiset, is a set that can contain multiple *) -(* copies of the same element. A bag can have infinitely many elements, *) -(* but only finitely many copies of any single element. *) -(* *) -(* We represent a bag in the usual way as a function whose range is a *) -(* subset of the positive integers. An element e belongs to bag B iff e *) -(* is in the domain of B, in which case bag B contains B[e] copies of e. *) -(**************************************************************************) -EXTENDS TLC, TLAPS, - FiniteSetTheorems, - SequenceTheorems - -LOCAL INSTANCE Naturals - -IsABag(B) == - (************************************************************************) - (* True iff B is a bag. *) - (************************************************************************) - B \in [DOMAIN B -> {n \in Nat : n > 0}] - -BagToSet(B) == DOMAIN B - (************************************************************************) - (* The set of elements at least one copy of which is in B. *) - (************************************************************************) - -SetToBag(S) == [e \in S |-> 1] - (************************************************************************) - (* The bag that contains one copy of every element of the set S. *) - (************************************************************************) - -BagIn(e,B) == e \in BagToSet(B) - (************************************************************************) - (* The \in operator for bags. *) - (************************************************************************) - -EmptyBag == SetToBag({}) - -B1 (+) B2 == - (************************************************************************) - (* The union of bags B1 and B2. *) - (************************************************************************) - [e \in (DOMAIN B1) \cup (DOMAIN B2) |-> - (IF e \in DOMAIN B1 THEN B1[e] ELSE 0) - + (IF e \in DOMAIN B2 THEN B2[e] ELSE 0) ] - -B1 (-) B2 == - (************************************************************************) - (* The bag B1 with the elements of B2 removed--that is, with one copy *) - (* of an element removed from B1 for each copy of the same element in *) - (* B2. If B2 has at least as many copies of e as B1, then B1 (-) B2 *) - (* has no copies of e. *) - (************************************************************************) - LET B == [e \in DOMAIN B1 |-> IF e \in DOMAIN B2 THEN B1[e] - B2[e] - ELSE B1[e]] - IN [e \in {d \in DOMAIN B : B[d] > 0} |-> B[e]] - -LOCAL Sum(f) == - (******************************************************************) - (* The sum of f[x] for all x in DOMAIN f. The definition assumes *) - (* that f is a Nat-valued function and that f[x] equals 0 for all *) - (* but a finite number of elements x in DOMAIN f. *) - (******************************************************************) - LET DSum[S \in SUBSET DOMAIN f] == - LET elt == CHOOSE e \in S : TRUE - IN IF S = {} THEN 0 - ELSE f[elt] + DSum[S \ {elt}] - IN DSum[DOMAIN f] - -BagUnion(S) == - (************************************************************************) - (* The bag union of all elements of the set S of bags. *) - (************************************************************************) - [e \in UNION {BagToSet(B) : B \in S} |-> - Sum( [B \in S |-> IF BagIn(e, B) THEN B[e] ELSE 0] ) ] - -B1 \sqsubseteq B2 == - (************************************************************************) - (* The subset operator for bags. B1 \sqsubseteq B2 iff, for all e, bag *) - (* B2 has at least as many copies of e as bag B1 does. *) - (************************************************************************) - /\ (DOMAIN B1) \subseteq (DOMAIN B2) - /\ \A e \in DOMAIN B1 : B1[e] \leq B2[e] - -SubBag(B) == - (************************************************************************) - (* The set of all subbags of bag B. *) - (* *) - (* The following definition is not the one described in the TLA+ book, *) - (* but rather one that TLC can evaluate. *) - (************************************************************************) - - LET RemoveFromDom(x, f) == [y \in (DOMAIN f) \ {x} |-> f[y]] - Combine(x, BagSet) == - BagSet \cup - {[y \in (DOMAIN f) \cup {x} |-> IF y = x THEN i ELSE f[y]] : - f \in BagSet, i \in 1..B[x]} - Biggest == LET Range1 == {B[x] : x \in DOMAIN B} - IN IF Range1 = {} THEN 0 - ELSE CHOOSE r \in Range1 : - \A s \in Range1 : r \geq s - RSB[BB \in UNION {[S -> 1..Biggest] : S \in SUBSET DOMAIN B}] == - IF BB = << >> THEN {<< >>} - ELSE LET x == CHOOSE x \in DOMAIN BB : TRUE - IN Combine(x, RSB[RemoveFromDom(x, BB)]) - IN RSB[B] - - (******************* Here is the definition from the TLA+ book. ******** - LET AllBagsOfSubset == - (******************************************************************) - (* The set of all bags SB such that BagToSet(SB) \subseteq *) - (* BagToSet(B). *) - (******************************************************************) - UNION {[SB -> {n \in Nat : n > 0}] : SB \in SUBSET BagToSet(B)} - IN {SB \in AllBagsOfSubset : \A e \in DOMAIN SB : SB[e] \leq B[e]} - ***************************************************************************) - -BagOfAll(F(_), B) == - (************************************************************************) - (* The bag analog of the set {F(x) : x \in B} for a set B. It's the bag *) - (* that contains, for each element e of B, one copy of F(e) for every *) - (* copy of e in B. This defines a bag iff, for any value v, the set of *) - (* e in B such that F(e) = v is finite. *) - (************************************************************************) - [e \in {F(d) : d \in BagToSet(B)} |-> - Sum( [d \in BagToSet(B) |-> IF F(d) = e THEN B[d] ELSE 0] ) ] - -BagCardinality(B) == - (************************************************************************) - (* If B is a finite bag (one such that BagToSet(B) is a finite set), *) - (* then this is its cardinality (the total number of copies of elements *) - (* in B). Its value is unspecified if B is infinite. *) - (************************************************************************) - Sum(B) - -CopiesIn(e, B) == - (************************************************************************) - (* If B is a bag, then CopiesIn(e, B) is the number of copies of e in *) - (* B. If ~BagIn(e, B), then CopiesIn(e, B) = 0. *) - (************************************************************************) - - IF BagIn(e, B) THEN B[e] ELSE 0 - -Scaling(n, B) == - (************************************************************************) - (* If B is a bag, then Scaling(e, B) is the Bag containing the same *) - (* elements of B with n times their copies *) - (************************************************************************) - IF n>0 THEN [i \in DOMAIN B |-> n*B[i] ] ELSE EmptyBag - -(***************************************************************************) -(* Converts the Sequence into a bag *) -(***************************************************************************) - -SeqToBag(seq) == [ x \in Range(seq) |-> Cardinality({i \in DOMAIN seq: seq[i]=x}) ] - - - -(***************************************************************************) -(* \sqsubseteq is a PARTIAL ORDER relattion *) -(***************************************************************************) - -(*AntiSymmetry*) -THEOREM Bags_SqsubseteqPO_AntiSymmetry == ASSUME NEW A, NEW B, IsABag(A), IsABag(B), A \sqsubseteq B, B \sqsubseteq A - PROVE A = B -<1>1. DOMAIN A = DOMAIN B - BY DEF \sqsubseteq -<1>2. (\A i \in DOMAIN A: A[i]<=B[i]) /\ (\A i \in DOMAIN B: B[i]<=A[i]) - BY DEF \sqsubseteq -<1>3. \A i \in DOMAIN A: A[i]=B[i] - BY <1>1, <1>2, SMT DEF IsABag -<1>4. A \in [DOMAIN A -> {n \in Nat: n>0}] /\ B \in [DOMAIN B -> {n \in Nat: n>0}] - BY DEF IsABag -<1>5. QED - BY <1>4, <1>3, <1>1 - -(*Reflexivity*) -THEOREM Bags_SqsubsetPO_Reflexivity == ASSUME NEW B, IsABag(B) - PROVE B \sqsubseteq B -BY SMT DEF \sqsubseteq, IsABag - -(*Transitivity*) -THEOREM Bags_SqsubseteqPO_Transitivity == ASSUME NEW A, NEW B, NEW C, IsABag(A), IsABag(B), IsABag(C), A \sqsubseteq B, B \sqsubseteq C - PROVE A \sqsubseteq C -<1>1. DOMAIN A \subseteq DOMAIN C /\ DOMAIN A \subseteq DOMAIN B - BY DEF \sqsubseteq -<1>2. (\A i \in DOMAIN A: A[i] <= B[i]) /\ (\A i \in DOMAIN B: B[i]<=C[i] ) - BY <1>1 DEF \sqsubseteq, IsABag -<1>3. \A i \in DOMAIN A: B[i]<=C[i] - BY <1>1, <1>2 -<1>4. \A i \in DOMAIN A: A[i]<=C[i] - BY <1>3, <1>2, SMT DEF IsABag -<1>.QED - BY <1>1, <1>4 DEF \sqsubseteq - -(***************************************************************************) -(* Lemmas on EmptyBags *) -(***************************************************************************) - - -THEOREM Bags_EmptyBag == ASSUME NEW B, IsABag(B) - PROVE /\ IsABag(EmptyBag) - /\ B=EmptyBag <=> DOMAIN B ={} - /\ DOMAIN EmptyBag ={} - /\ EmptyBag \sqsubseteq B - /\ \A e: ~BagIn(e, EmptyBag) -<1>1. DOMAIN EmptyBag = {} - BY DEF EmptyBag, SetToBag -<1>2. IsABag(EmptyBag) - <2>1. \A i \in DOMAIN EmptyBag: EmptyBag[i] \in {n \in Nat: n>0} - BY <1>1 - <2>2. QED - BY <2>1 DEF IsABag, EmptyBag, SetToBag -<1>3. B=EmptyBag => DOMAIN B ={} - BY DEF EmptyBag, SetToBag -<1>4. ASSUME DOMAIN B ={} PROVE B=EmptyBag - <2>1. B \in [{} -> {n \in Nat: n>0}] /\ EmptyBag \in [{} -> {n \in Nat: n>0}] - BY <1>4 DEF EmptyBag, IsABag, SetToBag - <2>2. DOMAIN B = DOMAIN EmptyBag - BY <1>4 DEF EmptyBag, SetToBag - <2>3. \A i \in DOMAIN B : B[i]=EmptyBag[i] - BY <1>4 DEF EmptyBag, SetToBag - <2>4. QED - BY <2>3, <2>2, <2>1 -<1>5. EmptyBag \sqsubseteq B - BY <1>1 DEF \sqsubseteq -<1>6. ASSUME ~(\A e: ~BagIn(e, EmptyBag)) PROVE FALSE - <2>1. \E e: BagIn(e, EmptyBag) - BY <1>6 - <2>2. PICK e : BagIn(e, EmptyBag) - BY <2>1 - <2>3. QED - BY <2>2, <1>1 DEF BagIn, BagToSet -<1>7. QED - BY <1>1, <1>2, <1>3, <1>4, <1>5, <1>6 - -(***************************************************************************) -(* Lemmas on Scalng Operator for Bags *) -(***************************************************************************) - -THEOREM Bags_Scaling == ASSUME NEW B, IsABag(B), NEW n \in Nat, NEW m \in Nat - PROVE /\ IsABag(Scaling(n, B)) - /\ Scaling(n, EmptyBag)=EmptyBag - /\ Scaling(0, B)=EmptyBag - /\ Scaling(1, B)= B - /\ Scaling((n*m), B) = Scaling(n, Scaling(m, B)) - /\ n>0 => DOMAIN(Scaling(n, B))= DOMAIN B -PROOF -<1>1. IsABag(Scaling(n, B)) - <2>1. CASE n=0 - <3>1. Scaling(n, B)= EmptyBag - BY <2>1 DEF Scaling - <3>2. QED - BY <3>1, Bags_EmptyBag - <2>2. CASE n>0 - BY <2>2, SMT DEF IsABag, Scaling - <2>3. QED - BY <2>1, <2>2, SMT - -<1>2. Scaling(n, EmptyBag)=EmptyBag - <2>1. DOMAIN Scaling(n, EmptyBag)={} - BY Bags_EmptyBag DEF Scaling - <2>2. IsABag(Scaling(n, EmptyBag)) - BY Bags_EmptyBag, SMT DEF Scaling, EmptyBag, SetToBag, IsABag - <2>. QED - BY <2>1, <2>2, Bags_EmptyBag -<1>3. Scaling(0, B)=EmptyBag - BY DEF Scaling -<1>4. Scaling(1, B)= B - BY SMT DEF Scaling, IsABag -<1>5. Scaling((n*m), B) = Scaling(n, Scaling(m, B)) - <2>1. CASE m>0 /\ n>0 - <3>1. n*m>0 - BY <2>1, SMT - <3>2. QED - BY <3>1, <2>1, SMT DEF Scaling, IsABag - <2>2. CASE m>0 /\ n=0 - <3>1. n*m=0 - BY <2>2, SMT - <3>2. QED - BY <3>1, <2>2, SMT DEF Scaling, IsABag - <2>3. CASE m=0 /\ n>0 - <3>1. Scaling(n, Scaling(m, B))=EmptyBag - BY <2>3, <1>2, <1>3 - <3>2. Scaling(n*m, B)=EmptyBag - BY <2>3, SMT DEF Scaling, IsABag - <3>3. QED - BY <3>1, <3>2 - <2>4. CASE m=0 /\ n=0 - <3>1. n*m=0 - BY <2>4, SMT - <3>2. QED - BY <3>1, <2>4, SMT DEF Scaling, IsABag - <2>5. QED - BY SMT, <2>1, <2>2, <2>3, <2>4 -<1>6. ASSUME n>0 PROVE DOMAIN Scaling(n, B)=DOMAIN B - <2>1. QED - BY <1>6, <1>1 DEF Scaling, IsABag -<1> QED - BY <1>1, <1>2, <1>3, <1>4, <1>5, <1>6 - - -(***************************************************************************) -(* SetToBag and BagToSet are inverse of each other *) -(***************************************************************************) - -THEOREM Bags_Inverse == ASSUME NEW S - PROVE BagToSet(SetToBag(S))=S -BY DEF SetToBag, BagToSet - -THEOREM Bags_Inverse1 == ASSUME NEW B, IsABag(B) - PROVE SetToBag(BagToSet(B)) \sqsubseteq B -<1>1. DOMAIN SetToBag(BagToSet(B)) \subseteq DOMAIN B - BY DEF SetToBag, BagToSet, \sqsubseteq, IsABag -<1>2. \A i \in DOMAIN SetToBag(BagToSet(B)): SetToBag(BagToSet(B))[i] <= B[i] - <2>1. TAKE i \in DOMAIN SetToBag(BagToSet(B)) - <2>2. QED - BY <2>1, SMT DEF SetToBag, BagToSet, IsABag -<1>3. QED - BY <1>1, <1>2 DEF \sqsubseteq - -(***************************************************************************) -(* SetToBag Preserves Equality *) -(***************************************************************************) - -THEOREM Bags_SetToBagEquality == ASSUME NEW A, NEW B - PROVE A=B <=> SetToBag(A)=SetToBag(B) -<1>1. A=B => SetToBag(A) = SetToBag(B) - BY DEF SetToBag -<1>2. ASSUME SetToBag(A)=SetToBag(B) PROVE A=B - <2>1. BagToSet(SetToBag(A))=BagToSet(SetToBag(B)) - BY <1>2 - <2>2. QED - BY <2>1, Bags_Inverse -<1>3. QED - BY <1>1, <1>2 - -(***************************************************************************) -(* Union of Bags *) -(***************************************************************************) - -THEOREM Bags_Union == - ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) - PROVE /\ IsABag(B1(+)B2) - /\ DOMAIN(B1 (+) B2) = DOMAIN B1 \cup DOMAIN B2 - /\ \A e : CopiesIn(e, B1(+)B2) = CopiesIn(e,B1) + CopiesIn(e,B2) -BY DEF IsABag, (+), CopiesIn, BagIn, BagToSet - -(***************************************************************************) -(* Differene of Bags *) -(***************************************************************************) - -THEOREM Bags_Difference == - ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) - PROVE /\ IsABag(B1(-)B2) - /\ DOMAIN (B1 (-) B2) = {e \in DOMAIN B1 : e \notin DOMAIN B2 \/ B1[e] > B2[e]} - /\ \A e : CopiesIn(e, B1 (-) B2) = IF BagIn(e, B1(-)B2) THEN CopiesIn(e,B1) - CopiesIn(e,B2) ELSE 0 -<1>. DEFINE B == [e \in DOMAIN B1 |-> IF e \in DOMAIN B2 THEN B1[e] - B2[e] - ELSE B1[e]] - D == {d \in DOMAIN B1 : B[d] > 0} -<1>1. B \in [DOMAIN B1 -> Int] - BY DEF IsABag -<1>2. B1 (-) B2 = [e \in D |-> B[e]] - BY DEF (-) -<1>3. D = {e \in DOMAIN B1 : e \notin DOMAIN B2 \/ B1[e] > B2[e]} - BY DEF IsABag -<1>4. \A e \in D : B[e] = B1[e] - (IF e \in DOMAIN B2 THEN B2[e] ELSE 0) - BY DEF IsABag -<1>. HIDE DEF B -<1>. QED - BY <1>1, <1>2, <1>3, <1>4 DEF IsABag, CopiesIn, BagIn, BagToSet - -(***************************************************************************) -(* Union is Commutative *) -(***************************************************************************) - -THEOREM Bags_UnionCommutative == ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) - PROVE B1(+)B2 = B2(+)B1 -<1>1. DOMAIN(B1(+)B2) = DOMAIN(B2(+)B1) - BY DEF (+) -<1>2. B1(+)B2 \in [DOMAIN(B1(+)B2) -> {n \in Nat: n>0}] /\ B2(+)B1 \in [DOMAIN(B1(+)B2) -> {n \in Nat: n>0}] - BY <1>1, Bags_Union DEF IsABag -<1>3. \A i \in DOMAIN(B1(+)B2): (B1(+)B2)[i] = (B2(+)B1)[i] - <2>1. TAKE i \in DOMAIN(B1(+)B2) - <2>. QED - BY SMT, <2>1 DEF (+), IsABag -<1>4. QED - BY <1>1, <1>2, <1>3 - -(***************************************************************************) -(* Unon is Associative *) -(***************************************************************************) - -THEOREM Bags_UnionAssociative == ASSUME NEW B1, NEW B2, NEW B3, IsABag(B1), IsABag(B2), IsABag(B3) - PROVE (B1(+)B2)(+)B3 = B1(+)(B2(+)B3) -BY DEF IsABag, (+) - -(***************************************************************************) -(* Given Bags B1, B2 then B1 \sqsubseteq B1(+)B2 *) -(***************************************************************************) - -THEOREM Bags_UnionSqSubset == ASSUME NEW B1, NEW B2, IsABag(B1), IsABag(B2) - PROVE B1 \sqsubseteq B1(+)B2 -<1>1. IsABag(B1(+)B2) - BY Bags_Union -<1>2. DOMAIN B1 \subseteq DOMAIN(B1(+)B2) - BY DEF (+) -<1>3. \A i \in DOMAIN B1: B1[i]<=(B1(+)B2)[i] - <2>1. TAKE i \in DOMAIN B1 - <2>2. QED - BY <2>1, <1>1, SMT DEF IsABag, \sqsubseteq, (+) -<1>4. QED - BY <1>2, <1>3 DEF \sqsubseteq, (+) - -(***************************************************************************) -(* Given Bag B1, B1 \sqsubseteq Scaling(n, B1) for all n>0 *) -(***************************************************************************) - -THEOREM Bags_ScalingSqSubseteq == ASSUME NEW B, IsABag(B), NEW n \in Nat, NEW m \in Nat, m1. CASE m>0 /\ n>0 - <2>1. DOMAIN Scaling(m, B)= DOMAIN Scaling(n, B) - BY <1>1, Bags_Scaling - <2>2. \A i \in DOMAIN Scaling(m, B): Scaling(m, B)[i]<= Scaling(n, B)[i] - <3>1. TAKE i \in DOMAIN Scaling(m, B) - <3>2. QED - BY <1>1, SMT DEF Scaling, IsABag - <2>3. QED - BY <2>1, <2>2 DEF \sqsubseteq -<1>2. CASE m=0 /\ n>0 - <2>1. Scaling(m, B)=EmptyBag - BY <1>2, Bags_Union DEF Scaling - <2>2. QED - BY <2>1, Bags_EmptyBag, Bags_Scaling -<1>3. CASE m>0 /\ n=0 \* Impossible Case - BY <1>3, SMT -<1>4. CASE m=0 /\ n=0 \* Impossible Case - BY <1>4, SMT -<1>5. QED - BY <1>1, <1>2, <1>3, <1>4, SMT - -(***************************************************************************) -(* Given Bags A and B, A(-)B \sqsubseteq A *) -(***************************************************************************) - -THEOREM Bags_DifferenceSqsubset == ASSUME NEW A, NEW B, IsABag(A), IsABag(B) - PROVE A(-)B \sqsubseteq A -<1>1. DOMAIN(A(-)B) \subseteq DOMAIN A - BY DEF (-) -<1>2. \A i \in DOMAIN(A(-)B) : (A(-)B)[i] <= A[i] - <2>1. TAKE i \in DOMAIN(A(-)B) - <2>2. QED - BY <2>1, SMT DEF (-), IsABag -<1>3. QED - BY <1>1, <1>2 DEF \sqsubseteq - -(***************************************************************************) -(* EmptyBag is Addidtive Identity *) -(***************************************************************************) - -THEOREM Bags_EmptyBagOperations == ASSUME NEW B, IsABag(B) - PROVE /\ B (+) EmptyBag = B - /\ B (-) EmptyBag = B -<1>1. B (+) EmptyBag = B - <2>1. IsABag(B(+)EmptyBag) - BY Bags_EmptyBag, Bags_Union - <2>2. DOMAIN(B(+)EmptyBag) = DOMAIN B - BY Bags_EmptyBag DEF (+) - <2>3. B \in [DOMAIN B -> {n \in Nat : n>0}] /\ B(+)EmptyBag \in [DOMAIN B -> {n \in Nat : n>0}] - BY <2>1, <2>2 DEF IsABag - <2>4. \A i \in DOMAIN B: (B(+)EmptyBag)[i]=B[i] - <3>1. TAKE i \in DOMAIN B - <3>2. QED - BY <3>1, SMT DEF EmptyBag, (+), IsABag, SetToBag - <2>5. QED - BY <2>2, <2>3, <2>4 -<1>2. B (-) EmptyBag = B - <2>1. /\ IsABag(B(-)EmptyBag) - /\ DOMAIN(B (-) EmptyBag) = DOMAIN B - BY Bags_EmptyBag, Bags_Difference, Isa - <2>3. B \in [DOMAIN B -> {n \in Nat : n>0}] /\ B(-)EmptyBag \in [DOMAIN B -> {n \in Nat : n>0}] - BY <2>1 DEF IsABag - <2>4. \A i \in DOMAIN B: (B(-)EmptyBag)[i]=B[i] - <3>1. TAKE i \in DOMAIN B - <3>2. QED - BY <3>1 DEF EmptyBag, (-), IsABag, SetToBag - <2>5. QED - BY <2>1, <2>3, <2>4 -<1>3. QED - BY <1>1, <1>2 - -(***************************************************************************) -(* SetToBag of a set is a Bag *) -(***************************************************************************) - -THEOREM Bags_SetToBagIsABag == ASSUME NEW S - PROVE IsABag(SetToBag(S)) -BY DEF IsABag, SetToBag - -(***************************************************************************) -(* CopiesIn Monotone w.r.t \sqsubseteq *) -(***************************************************************************) - -THEOREM Bags_CopiesInBagsInMonotone == - ASSUME NEW B1, NEW B2, NEW e, IsABag(B1), IsABag(B2), B1 \sqsubseteq B2 - PROVE /\ BagIn(e, B1) => BagIn(e, B2) - /\ CopiesIn(e, B1) <= CopiesIn(e, B2) -<1>1. ASSUME BagIn(e, B1) PROVE BagIn(e, B2) - BY <1>1 DEF BagIn, BagToSet, \sqsubseteq -<1>2. CopiesIn(e, B1) <= CopiesIn(e, B2) - <2>1. CASE BagIn(e, B1) - BY <2>1 DEF CopiesIn, BagIn, \sqsubseteq, BagToSet - <2>2. CASE ~BagIn(e, B1) - BY <2>2, SMT DEF \sqsubseteq, IsABag, CopiesIn, BagIn, BagToSet - <2>3. QED - BY <2>1, <2>2 -<1>3. QED - BY <1>1, <1>2 - - -(***************************************************************************) -(* Given Bag B and Natural n, CopiesIn(e, Scaling(n, B))=n*CopiesIn(e, B) *) -(***************************************************************************) - -THEOREM Bags_CopiesInScaling == ASSUME NEW B, IsABag(B), NEW n \in Nat, NEW e - PROVE CopiesIn(e, Scaling(n, B))=n*CopiesIn(e, B) -PROOF -<1>1. CASE n=0 - BY <1>1, Bags_Scaling, Bags_EmptyBag, SMT DEF CopiesIn, IsABag -<1>2. CASE n>0 - BY <1>2, SMT DEF CopiesIn, IsABag, Scaling, BagIn, BagToSet -<1>3. QED - BY <1>1, <1>2, SMT - -(***************************************************************************) -(* Given set S, CopiesIn(e, SetToBag(S))=IF e \in B THEN 1 ELSE 0 *) -(***************************************************************************) - -THEOREM Bags_CopiesInSetToBag == ASSUME NEW B, NEW e - PROVE CopiesIn(e, SetToBag(B))=IF e \in B THEN 1 ELSE 0 -PROOF -<1>1. ASSUME e \in B PROVE CopiesIn(e, SetToBag(B))=1 - BY <1>1 DEF CopiesIn, BagIn, BagToSet, SetToBag -<1>2. ASSUME e \notin B PROVE CopiesIn(e, SetToBag(B))=0 - BY <1>2 DEF CopiesIn, BagIn, BagToSet, SetToBag -<1>3. QED - BY <1>2, <1>1 - -(***************************************************************************) -(* Given sequence seq, SeqToBag(seq) is a Bag *) -(***************************************************************************) - -THEOREM Bags_IsABagSeqToBag == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE IsABag(SeqToBag(seq)) -<1>1. \A x \in DOMAIN SeqToBag(seq): SeqToBag(seq)[x] \in Nat \ {0} - <2>1. TAKE x \in DOMAIN SeqToBag(seq) - <2>2. SeqToBag(seq)[x] \in Nat \ {0} - <3>1. CASE seq = << >> - <4>1. DOMAIN SeqToBag(seq)= {} - BY <3>1 DEF Range, SeqToBag - <4>2. QED - BY <4>1, Bags_EmptyBag - <3>2. CASE seq # << >> - <4>1. {i \in DOMAIN seq: seq[i]=x }#{} - BY <2>1, <3>2 DEF SeqToBag, Range - <4>. IsFiniteSet({i \in DOMAIN seq: seq[i]=x }) - <5>1. {i \in DOMAIN seq: seq[i]=x } \subseteq DOMAIN seq - OBVIOUS - <5>2. IsFiniteSet(DOMAIN seq) - BY SeqDef, FS_Interval - <5>3. QED - BY <5>1, <5>2, FS_Subset - <4>2. QED - BY <4>1, SMT, FS_EmptySet, FS_CardinalityType DEF SeqToBag - <3>3. QED - BY <3>1, <3>2 - <2>3. QED - BY <2>2 DEF SeqToBag -<1>2. QED - BY <1>1 DEF IsABag, SeqToBag - -============================================================================= - -(* Last modified on Fri 26 Jan 2007 at 8:45:03 PST by lamport *) - - 6 Apr 99 : Modified version for standard module set - 7 Dec 98 : Corrected error found by Stephan Merz. - 6 Dec 98 : Modified comments based on suggestions by Lyle Ramshaw. - 5 Dec 98 : Initial version. diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems.tla deleted file mode 100644 index 9fdc0a0b2b..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems.tla +++ /dev/null @@ -1,385 +0,0 @@ -------------------------- MODULE FiniteSetTheorems -------------------------- -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* Facts about finite sets and their cardinality. *) -(* Originally contributed by Tom Rodeheffer, MSR. *) -(* \vspace{12pt}}^' *) -(* Proofs of these theorems appear in module FiniteSetTheorems_proofs. *) -(***************************************************************************) - -EXTENDS - FiniteSets, - Functions, - WellFoundedInduction - - -(***************************************************************************) -(* `. .' *) -(* *) -(* A set S is finite iff there exists a natural number n such that there *) -(* exist a surjection (or a bijection) from 1..n to S. *) -(* *) -(* `. .' *) -(***************************************************************************) - -LEMMA FS_NatSurjection == - ASSUME NEW S - PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsSurjection(1..n,S) - - -LEMMA FS_NatBijection == - ASSUME NEW S - PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsBijection(1..n,S) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists n \in Nat such that a bijection exists from 1..n to S, *) -(* then Cardinality(S) = n. *) -(* *) -(* `. .' *) -(***************************************************************************) - -LEMMA FS_CountingElements == - ASSUME NEW S, NEW n \in Nat, ExistsBijection(1..n,S) - PROVE Cardinality(S) = n - - -(***************************************************************************) -(* Corollary: a surjection from 1..n to S provides a cardinality bound. *) -(***************************************************************************) -THEOREM FS_SurjCardinalityBound == - ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n, S) - PROVE Cardinality(S) <= n - - -(***************************************************************************) -(* `. .' *) -(* *) -(* For any finite set S, Cardinality(S) \in Nat. Moreover, there is a *) -(* bijection from 1 .. Cardinality(S) to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_CardinalityType == - ASSUME NEW S, IsFiniteSet(S) - PROVE /\ Cardinality(S) \in Nat - /\ ExistsBijection(1..Cardinality(S), S) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The image of a finite set under a bijection or surjection is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Bijection == - ASSUME NEW S, NEW T, IsFiniteSet(S), ExistsBijection(S,T) - PROVE /\ IsFiniteSet(T) - /\ Cardinality(T) = Cardinality(S) - - -THEOREM FS_SameCardinalityBij == - ASSUME NEW S, NEW T, IsFiniteSet(S), IsFiniteSet(T), - Cardinality(S) = Cardinality(T) - PROVE ExistsBijection(S,T) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Any surjection between two finite sets of equal cardinality is *) -(* an injection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_SurjSameCardinalityImpliesInj == - ASSUME NEW S, NEW T, IsFiniteSet(S), Cardinality(S) = Cardinality(T), - NEW f \in Surjection(S,T) - PROVE f \in Injection(S,T) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The image of a finite set is finite. *) -(* *) -(* NB: Note that any function is a surjection on its range by theorem *) -(* Fun_RangeProperties. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Surjection == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), IsFiniteSet(S) - PROVE /\ IsFiniteSet(T) - /\ Cardinality(T) <= Cardinality(S) - /\ Cardinality(T) = Cardinality(S) <=> f \in Injection(S,T) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The cardinality of a finite set S is 0 iff S is empty. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_EmptySet == - /\ IsFiniteSet({}) - /\ Cardinality({}) = 0 - /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 0 <=> S = {}) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If S is finite, so are S \cup {x} and S \ {x}. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_AddElement == - ASSUME NEW S, NEW x, IsFiniteSet(S) - PROVE /\ IsFiniteSet(S \cup {x}) - /\ Cardinality(S \cup {x}) = - IF x \in S THEN Cardinality(S) ELSE Cardinality(S)+1 - - -THEOREM FS_RemoveElement == - ASSUME NEW S, NEW x, IsFiniteSet(S) - PROVE /\ IsFiniteSet(S \ {x}) - /\ Cardinality(S \ {x}) = - IF x \in S THEN Cardinality(S)-1 ELSE Cardinality(S) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* In particular, a singleton set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Singleton == - /\ \A x : IsFiniteSet({x}) /\ Cardinality({x}) = 1 - /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 1 <=> \E x: S = {x}) - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Any subset of a finite set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Subset == - ASSUME NEW S, IsFiniteSet(S), NEW T \in SUBSET S - PROVE /\ IsFiniteSet(T) - /\ Cardinality(T) <= Cardinality(S) - /\ Cardinality(S) = Cardinality(T) => S = T - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* a..b is a finite set for any a,b \in Int. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Interval == - ASSUME NEW a \in Int, NEW b \in Int - PROVE /\ IsFiniteSet(a..b) - /\ Cardinality(a..b) = IF a > b THEN 0 ELSE b-a+1 - - -THEOREM FS_BoundedSetOfNaturals == - ASSUME NEW S \in SUBSET Nat, NEW n \in Nat, - \A s \in S : s <= n - PROVE /\ IsFiniteSet(S) - /\ Cardinality(S) \leq n+1 - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Induction for finite sets. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM FS_Induction == - ASSUME NEW S, IsFiniteSet(S), - NEW P(_), P({}), - ASSUME NEW T, NEW x, IsFiniteSet(T), P(T), x \notin T - PROVE P(T \cup {x}) - PROVE P(S) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The finite subsets form a well-founded ordering with respect to strict *) -(* set inclusion. *) -(* *) -(* `. .' *) -(***************************************************************************) - -FiniteSubsetsOf(S) == { T \in SUBSET S : IsFiniteSet(T) } -StrictSubsetOrdering(S) == { ss \in (SUBSET S) \X (SUBSET S) : - ss[1] \subseteq ss[2] /\ ss[1] # ss[2] } - -LEMMA FS_FiniteSubsetsOfFinite == - ASSUME NEW S, IsFiniteSet(S) - PROVE FiniteSubsetsOf(S) = SUBSET S - - -(*****************************************************************************) -(* The formulation of the following theorem doesn't require S being finite. *) -(* If S is finite, it implies *) -(* IsWellFoundedOn(StrictSubsetOrdering(S), SUBSET S) *) -(* using lemma FS_FiniteSubsetsOfFinite. *) -(*****************************************************************************) -THEOREM FS_StrictSubsetOrderingWellFounded == - ASSUME NEW S - PROVE IsWellFoundedOn(StrictSubsetOrdering(S), FiniteSubsetsOf(S)) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Well-founded induction for finite subsets. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM FS_WFInduction == - ASSUME NEW P(_), NEW S, IsFiniteSet(S), - ASSUME NEW T \in SUBSET S, - \A U \in (SUBSET T) \ {T} : P(U) - PROVE P(T) - PROVE P(S) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The union of two finite sets is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Union == - ASSUME NEW S, IsFiniteSet(S), - NEW T, IsFiniteSet(T) - PROVE /\ IsFiniteSet(S \cup T) - /\ Cardinality(S \cup T) = - Cardinality(S) + Cardinality(T) - Cardinality(S \cap T) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Corollary: two majorities intersect. More precisely, any two subsets *) -(* of a finite set U such that the sum of cardinalities of the subsets *) -(* exceeds that of U must have non-empty intersection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_MajoritiesIntersect == - ASSUME NEW U, NEW S, NEW T, IsFiniteSet(U), - S \subseteq U, T \subseteq U, - Cardinality(S) + Cardinality(T) > Cardinality(U) - PROVE S \cap T # {} - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The intersection of a finite set with an arbitrary set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) - - -THEOREM FS_Intersection == - ASSUME NEW S, IsFiniteSet(S), NEW T - PROVE /\ IsFiniteSet(S \cap T) - /\ IsFiniteSet(T \cap S) - /\ Cardinality(S \cap T) <= Cardinality(S) - /\ Cardinality(T \cap S) <= Cardinality(S) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The difference between a finite set and an arbitrary set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Difference == - ASSUME NEW S, NEW T, IsFiniteSet(S) - PROVE /\ IsFiniteSet(S \ T) - /\ Cardinality(S \ T) = Cardinality(S) - Cardinality(S \cap T) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The union of a finite number of finite sets is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_UNION == - ASSUME NEW S, IsFiniteSet(S), \A T \in S : IsFiniteSet(T) - PROVE IsFiniteSet(UNION S) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The product of two finite sets is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Product == - ASSUME NEW S, IsFiniteSet(S), - NEW T, IsFiniteSet(T) - PROVE /\ IsFiniteSet(S \X T) - /\ Cardinality(S \X T) = Cardinality(S) * Cardinality(T) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The powerset of a finite set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_SUBSET == - ASSUME NEW S, IsFiniteSet(S) - PROVE /\ IsFiniteSet(SUBSET S) - /\ Cardinality(SUBSET S) = 2^Cardinality(S) - - - - - - -============================================================================= -\* Modification History -\* Last modified Fri Feb 14 19:42:05 GMT-03:00 2014 by merz -\* Last modified Thu Jul 04 15:15:07 CEST 2013 by bhargav -\* Last modified Tue Jun 04 11:44:51 CEST 2013 by bhargav -\* Last modified Fri May 03 12:02:51 PDT 2013 by tomr -\* Created Fri Oct 05 15:04:18 PDT 2012 by tomr \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems_proofs.tla deleted file mode 100644 index 3da179777f..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSetTheorems_proofs.tla +++ /dev/null @@ -1,848 +0,0 @@ ----------------------- MODULE FiniteSetTheorems_proofs ---------------------- -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* Facts about finite sets and their cardinality. *) -(* Originally contributed by Tom Rodeheffer, MSR. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - -EXTENDS - FiniteSets, - Sequences, - FunctionTheorems, - WellFoundedInduction, - TLAPS - -(***************************************************************************) -(* Arithmetic lemma that is currently not proved. *) -(***************************************************************************) -LEMMA TwoExpLemma == - ASSUME NEW n \in Nat - PROVE 2^(n+1) = 2^n + 2^n -PROOF OMITTED - - -(***************************************************************************) -(* `. .' *) -(* *) -(* A set S is finite iff there exists a natural number n such that there *) -(* exist a surjection (or a bijection) from 1..n to S. *) -(* *) -(* `. .' *) -(***************************************************************************) - -LEMMA FS_NatSurjection == - ASSUME NEW S - PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsSurjection(1..n,S) - -<1>1. ASSUME IsFiniteSet(S) PROVE \E n \in Nat : ExistsSurjection(1..n,S) - <2>1. PICK Q \in Seq(S) : \A s \in S : \E i \in 1..Len(Q) : Q[i] = s - BY <1>1 DEF IsFiniteSet - <2>2. /\ Len(Q) \in Nat - /\ Q \in Surjection(1..Len(Q),S) - BY <2>1 DEF Surjection - <2> QED BY <2>2 DEF ExistsSurjection - -<1>2. ASSUME NEW n \in Nat, ExistsSurjection(1..n,S) PROVE IsFiniteSet(S) - BY <1>2 DEF IsFiniteSet, ExistsSurjection, Surjection - -<1> QED BY <1>1, <1>2 - - -LEMMA FS_NatBijection == - ASSUME NEW S - PROVE IsFiniteSet(S) <=> \E n \in Nat : ExistsBijection(1..n,S) -BY FS_NatSurjection, Fun_NatSurjEquivNatBij - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists n \in Nat such that a bijection exists from 1..n to S, *) -(* then Cardinality(S) = n. *) -(* *) -(* `. .' *) -(***************************************************************************) - -LEMMA FS_CountingElements == - ASSUME NEW S, NEW n \in Nat, ExistsBijection(1..n,S) - PROVE Cardinality(S) = n -PROOF - <1> DEFINE - (***********************************************************************) - (* Size of set T. *) - (***********************************************************************) - Size(T) == CHOOSE i \in Nat : ExistsBijection(1..i,T) - - (***********************************************************************) - (* Size function for subsets of S. *) - (***********************************************************************) - SZ == [ T \in SUBSET S |-> Size(T) ] - - (***********************************************************************) - (* Formula part of the CS property for element T. *) - (***********************************************************************) - fn(CS,T) == IF T = {} THEN 0 ELSE 1 + CS[T \ {CHOOSE x : x \in T}] - - (***********************************************************************) - (* The CS property. *) - (***********************************************************************) - IsCS(CS) == CS = [T \in SUBSET S |-> fn(CS,T)] - - (***********************************************************************) - (* CS function for subsets of S. Since this is defined as CHOOSE *) - (* something that satisfies the CS property, we do not know that the *) - (* CS function actually satisfies the CS property until we know that *) - (* there exists something that satisfies the CS property. *) - (***********************************************************************) - CS == CHOOSE CS : IsCS(CS) - - <1> HIDE DEF SZ, CS, fn - - - (*************************************************************************) - (* The SZ function satisfies the CS property. *) - (*************************************************************************) - <1>1. IsCS(SZ) - (***********************************************************************) - (* Use induction on the size of T to show that the values match at *) - (* each T \in SUBSET S. *) - (***********************************************************************) - <2> DEFINE - Prop(i) == \A T \in SUBSET S : ExistsBijection(1..i,T) => SZ[T] = fn(SZ,T) - - <2>1. \A i \in Nat : Prop(i) - <3>1. Prop(0) - (*******************************************************************) - (* Base step. *) - (*******************************************************************) - <4>1. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..0,T) - PROVE SZ[T] = fn(SZ,T) - OBVIOUS - <4>2. Size(T) = 0 BY <4>1, Fun_NatBijSame - <4>3. T = {} BY <4>1, Fun_NatBijEmpty - <4>4. SZ[T] = 0 BY <4>2 DEF SZ - <4>5. fn(SZ,T) = 0 BY <4>3 DEF fn - <4> QED BY <4>4, <4>5 - - <3>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) - (*******************************************************************) - (* Inductive step. *) - (*******************************************************************) - <4>1. PICK j \in Nat : j = i+1 BY Isa - <4>2. j # 0 BY <4>1, SMT - <4>3. i = j-1 BY <4>1, SMT - <4>4. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..j,T) - PROVE SZ[T] = fn(SZ,T) - BY <4>1 - <4>5. ~ExistsBijection(1..0,T) BY <4>2, <4>4, Fun_NatBijSame - <4>6. T # {} BY <4>5, Fun_NatBijEmpty - <4>7. Size(T) = j BY <4>4, Fun_NatBijSame - <4>8. PICK t \in T : t = CHOOSE x : x \in T BY <4>6 - <4>9. PICK U \in SUBSET S : U = T \ {t} OBVIOUS - <4>10. ExistsBijection(1..i,U) BY <4>3, <4>4, <4>9, Fun_NatBijSubElem - <4>11. SZ[U] = fn(SZ,U) BY <4>10, <3>2 - <4>12. SZ[U] = i BY <4>10, Fun_NatBijSame DEF SZ - <4>13. fn(SZ,T) = 1 + SZ[U] BY <4>6, <4>8, <4>9 DEF fn - <4>14. fn(SZ,T) = j BY <4>1, <4>12, <4>13, SMT - <4>15. SZ[T] = j BY <4>7 DEF SZ - <4> QED BY <4>14, <4>15 - - <3> HIDE DEF Prop - <3> QED BY Isa, <3>1, <3>2, NatInduction - - <2> SUFFICES ASSUME NEW T \in SUBSET S PROVE SZ[T] = fn(SZ,T) BY DEF SZ - <2>2. PICK i \in Nat : ExistsBijection(1..i,T) BY Fun_NatBijSubset - <2> QED BY <2>1, <2>2 - - - (*************************************************************************) - (* Any two things that satisfy the CS property must be equal. *) - (*************************************************************************) - <1>2. ASSUME - NEW CS1, IsCS(CS1), - NEW CS2, IsCS(CS2) - PROVE CS1 = CS2 - (***********************************************************************) - (* Use induction on the size of T to show that the values match at *) - (* each T \in SUBSET S. *) - (***********************************************************************) - <2> DEFINE - Prop(i) == \A T \in SUBSET S : ExistsBijection(1..i,T) => CS1[T] = CS2[T] - - <2>1. \A i \in Nat : Prop(i) - <3>1. Prop(0) - (*******************************************************************) - (* Base step. *) - (*******************************************************************) - <4>1. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..0,T) - PROVE CS1[T] = CS2[T] - OBVIOUS - <4>2. T = {} BY <4>1, Fun_NatBijEmpty - <4>3. fn(CS1,T) = 0 BY <4>2 DEF fn - <4>4. fn(CS2,T) = 0 BY <4>2 DEF fn - <4> QED BY <4>3, <4>4, <1>2 - - <3>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) - (*******************************************************************) - (* Inductive step. *) - (*******************************************************************) - <4>1. PICK j \in Nat : j = i+1 BY Isa - <4>2. j # 0 BY <4>1, SMT - <4>3. i = j-1 BY <4>1, SMT - <4>4. SUFFICES ASSUME NEW T \in SUBSET S, ExistsBijection(1..j,T) - PROVE CS1[T] = CS2[T] - BY <4>1 - <4>5. ~ExistsBijection(1..0,T) BY <4>2, <4>4, Fun_NatBijSame - <4>6. T # {} BY <4>5, Fun_NatBijEmpty - <4>7. PICK t \in T : t = CHOOSE x : x \in T BY <4>6 - <4>8. PICK U \in SUBSET S : U = T \ {t} OBVIOUS - <4>9. ExistsBijection(1..i,U) BY <4>3, <4>4, <4>8, Fun_NatBijSubElem - <4>10. CS1[U] = CS2[U] BY <4>9, <3>2 - <4>11. CS1[T] = 1 + CS1[U] BY <4>6, <4>7, <4>8, <1>2 DEF fn - <4>12. CS2[T] = 1 + CS2[U] BY <4>6, <4>7, <4>8, <1>2 DEF fn - <4> QED BY <4>10, <4>11, <4>12 - - <3> HIDE DEF Prop - <3> QED BY Isa, <3>1, <3>2, NatInduction - - <2> SUFFICES ASSUME NEW T \in SUBSET S PROVE CS1[T] = CS2[T] BY <1>2 - <2>2. PICK i \in Nat : ExistsBijection(1..i,T) BY Fun_NatBijSubset - <2> QED BY <2>1, <2>2 - - - (*************************************************************************) - (* Since SZ satisfies the CS property, the CS function must satisfy the *) - (* CS property. And it must be the same as SZ. *) - (*************************************************************************) - <1>3. IsCS(CS) BY <1>1 DEF CS - <1>4. CS = SZ BY <1>1, <1>2, <1>3 - - - <1>5. Cardinality(S) = CS[S] BY DEF Cardinality, CS, fn - <1>6. S \in SUBSET S OBVIOUS - <1>7. SZ[S] = Size(S) BY <1>6 DEF SZ - <1>8. Size(S) = n BY Fun_NatBijSame - <1> QED BY <1>4, <1>5, <1>7, <1>8 - - -(***************************************************************************) -(* Corollary: a surjection from 1..n to S provides a cardinality bound. *) -(***************************************************************************) -THEOREM FS_SurjCardinalityBound == - ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n, S) - PROVE Cardinality(S) <= n -BY Fun_NatSurjImpliesNatBij, FS_CountingElements - - -(***************************************************************************) -(* `. .' *) -(* *) -(* For any finite set S, Cardinality(S) \in Nat. Moreover, there is a *) -(* bijection from 1 .. Cardinality(S) to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_CardinalityType == - ASSUME NEW S, IsFiniteSet(S) - PROVE /\ Cardinality(S) \in Nat - /\ ExistsBijection(1..Cardinality(S), S) -BY FS_NatBijection, FS_CountingElements - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The image of a finite set under a bijection or surjection is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Bijection == - ASSUME NEW S, NEW T, IsFiniteSet(S), ExistsBijection(S,T) - PROVE /\ IsFiniteSet(T) - /\ Cardinality(T) = Cardinality(S) -BY FS_CardinalityType, Fun_ExistsBijTransitive, FS_CountingElements, - FS_NatBijection - - -THEOREM FS_SameCardinalityBij == - ASSUME NEW S, NEW T, IsFiniteSet(S), IsFiniteSet(T), - Cardinality(S) = Cardinality(T) - PROVE ExistsBijection(S,T) -BY FS_CardinalityType, Fun_ExistsBijSymmetric, Fun_ExistsBijTransitive - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Any surjection between two finite sets of equal cardinality is *) -(* an injection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_SurjSameCardinalityImpliesInj == - ASSUME NEW S, NEW T, IsFiniteSet(S), Cardinality(S) = Cardinality(T), - NEW f \in Surjection(S,T) - PROVE f \in Injection(S,T) - -<1>1. SUFFICES ASSUME NEW a \in S, NEW b \in S, a # b, f[a] = f[b] - PROVE FALSE - BY DEF Injection, Surjection -<1>. DEFINE n == Cardinality(S) -<1>. n \in Nat BY FS_CardinalityType -<1>. PICK g \in Bijection(1..n, S) : TRUE - BY FS_CardinalityType DEF ExistsBijection -<1>2. PICK i,j \in 1 .. n : - /\ i < j - /\ \/ g[i] = a /\ g[j] = b - \/ g[i] = b /\ g[j] = a - <2>1. PICK i,j \in 1 .. n : i # j /\ g[i] = a /\ g[j] = b - BY <1>1 DEF Bijection, Surjection - <2>2. CASE i < j BY <2>1, <2>2 - <2>3. CASE i > j BY <2>1, <2>3 - <2>. QED BY <2>1, <2>2, <2>3 -<1>. n-1 \in Nat BY <1>2 -<1>. DEFINE h == [ k \in 1 .. n-1 |-> IF k=j THEN f[g[n]] ELSE f[g[k]] ] -<1>3. h \in Surjection(1..n-1, T) - <2>1. h \in [1..n-1 -> T] BY DEF Bijection, Surjection - <2>2. ASSUME NEW t \in T PROVE \E k \in 1..n-1 : h[k] = t - <3>1. PICK s \in S : f[s] = t BY DEF Surjection - <3>2. PICK l \in 1..n : g[l] = s BY DEF Bijection, Surjection - <3>. QED BY <1>1, <1>2, <3>1, <3>2 - <2>. QED BY <2>1, <2>2 DEF Surjection -<1>4. Cardinality(T) <= n-1 BY <1>3, FS_SurjCardinalityBound DEF ExistsSurjection -<1>. QED BY <1>4 - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The image of a finite set is finite. *) -(* NB: Note that any function is a surjection on its range by theorem *) -(* Fun_RangeProperties. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Surjection == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), IsFiniteSet(S) - PROVE /\ IsFiniteSet(T) - /\ Cardinality(T) <= Cardinality(S) - /\ Cardinality(T) = Cardinality(S) <=> f \in Injection(S,T) - -<1>1. /\ Cardinality(S) \in Nat - /\ ExistsBijection(1 .. Cardinality(S), S) - BY FS_CardinalityType -<1>2. ExistsSurjection(1 .. Cardinality(S), T) - BY <1>1, Fun_ExistsBijEquiv, Fun_ExistsSurjTransitive DEF ExistsSurjection -<1>4. IsFiniteSet(T) /\ Cardinality(T) <= Cardinality(S) - BY <1>1, <1>2, FS_NatSurjection, FS_SurjCardinalityBound -<1>5. ASSUME Cardinality(T) = Cardinality(S) PROVE f \in Injection(S,T) - BY <1>5, FS_SurjSameCardinalityImpliesInj -<1>6. ASSUME f \in Injection(S,T) PROVE Cardinality(T) = Cardinality(S) - <2>1. ExistsBijection(S, T) BY <1>6 DEF Bijection, ExistsBijection - <2>2. ExistsBijection(1..Cardinality(S), T) - BY <1>1, <2>1, Fun_ExistsBijTransitive - <2>. QED BY <1>1, <2>2, FS_CountingElements -<1>. QED BY <1>4, <1>5, <1>6 - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The cardinality of a finite set S is 0 iff S is empty. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_EmptySet == - /\ IsFiniteSet({}) - /\ Cardinality({}) = 0 - /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 0 <=> S = {}) - -<1>1. IsFiniteSet({}) /\ Cardinality({}) = 0 - BY Fun_NatBijEmpty, FS_NatBijection, FS_CountingElements, Zenon -<1>2. ASSUME NEW S, IsFiniteSet(S), Cardinality(S) = 0 - PROVE S = {} - BY <1>2, FS_CardinalityType, Fun_NatBijEmpty -<1>. QED BY <1>1, <1>2 - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If S is finite, so are S \cup {x} and S \ {x}. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_AddElement == - ASSUME NEW S, NEW x, IsFiniteSet(S) - PROVE /\ IsFiniteSet(S \cup {x}) - /\ Cardinality(S \cup {x}) = - IF x \in S THEN Cardinality(S) ELSE Cardinality(S)+1 -<1>1. CASE x \notin S - BY <1>1, FS_CardinalityType, Fun_NatBijAddElem, FS_NatBijection, - FS_CountingElements -<1>. QED BY <1>1 \* the case "x \in S" is trivial - - -THEOREM FS_RemoveElement == - ASSUME NEW S, NEW x, IsFiniteSet(S) - PROVE /\ IsFiniteSet(S \ {x}) - /\ Cardinality(S \ {x}) = - IF x \in S THEN Cardinality(S)-1 ELSE Cardinality(S) -<1>1. CASE x \in S - BY <1>1, FS_CardinalityType, Fun_NatBijSubElem, FS_NatBijection, - FS_CountingElements, FS_EmptySet -<1>. QED BY <1>1 \* the case "x \notin S" is trivial - - -(***************************************************************************) -(* `. .' *) -(* *) -(* In particular, a singleton set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Singleton == - /\ \A x : IsFiniteSet({x}) /\ Cardinality({x}) = 1 - /\ \A S : IsFiniteSet(S) => (Cardinality(S) = 1 <=> \E x: S = {x}) - -<1>1. \A x : IsFiniteSet({x}) /\ Cardinality({x}) = 1 - BY FS_EmptySet, FS_AddElement -<1>2. ASSUME NEW S, IsFiniteSet(S), Cardinality(S) = 1 - PROVE \E x : S = {x} - BY <1>2, FS_CardinalityType, Fun_NatBijSingleton -<1>. QED BY <1>1, <1>2 - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Any subset of a finite set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Subset == - ASSUME NEW S, IsFiniteSet(S), NEW T \in SUBSET S - PROVE /\ IsFiniteSet(T) - /\ Cardinality(T) <= Cardinality(S) - /\ Cardinality(S) = Cardinality(T) => S = T -\* NB: Changing the last implication to an equivalence breaks proofs - -<1>1. /\ Cardinality(S) \in Nat - /\ ExistsBijection(1 .. Cardinality(S), S) - BY FS_CardinalityType -<1>2. PICK n \in Nat : ExistsBijection(1..n, T) /\ n <= Cardinality(S) - BY <1>1, Fun_NatBijSubset -<1>3. ASSUME Cardinality(S) = Cardinality(T), S # T - PROVE FALSE - <2>1. PICK x \in S \ T : TRUE BY <1>3 - <2>2. /\ IsFiniteSet(S \ {x}) - /\ Cardinality(S \ {x}) = Cardinality(S) - 1 - BY <2>1, FS_RemoveElement - <2>3. T \subseteq S \ {x} BY <2>1 - <2>4. PICK m \in Nat : ExistsBijection(1..m, T) /\ m <= Cardinality(S)-1 - BY <2>2, <2>3, FS_CardinalityType, Fun_NatBijSubset - <2>. QED BY <2>4, <1>3, FS_CountingElements -<1>. QED BY <1>2, <1>3, FS_NatBijection, FS_CountingElements - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* a..b is a finite set for any a,b \in Int. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Interval == - ASSUME NEW a \in Int, NEW b \in Int - PROVE /\ IsFiniteSet(a..b) - /\ Cardinality(a..b) = IF a > b THEN 0 ELSE b-a+1 - -<1>1. CASE a <= b - BY <1>1, Fun_ExistsBijInterval, FS_NatBijection, FS_CountingElements -<1>2. CASE a > b - <2>1. a..b = {} BY <1>2 - <2>. QED BY <2>1, <1>2, FS_EmptySet, Zenon -<1>. QED BY <1>1, <1>2 - - -THEOREM FS_BoundedSetOfNaturals == - ASSUME NEW S \in SUBSET Nat, NEW n \in Nat, - \A s \in S : s <= n - PROVE /\ IsFiniteSet(S) - /\ Cardinality(S) \leq n+1 -<1>1. S \subseteq 0 .. n OBVIOUS -<1>2. IsFiniteSet(0..n) /\ Cardinality(0..n) = n+1 BY FS_Interval -<1>. QED BY <1>1, <1>2, FS_Subset, Zenon - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Induction for finite sets. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM FS_Induction == - ASSUME NEW S, IsFiniteSet(S), - NEW P(_), P({}), - ASSUME NEW T, NEW x, IsFiniteSet(T), P(T), x \notin T - PROVE P(T \cup {x}) - PROVE P(S) -<1>. DEFINE Q(n) == \A T : IsFiniteSet(T) /\ Cardinality(T) = n => P(T) -<1>1. SUFFICES \A n \in Nat : Q(n) BY FS_CardinalityType -<1>2. Q(0) BY FS_EmptySet, Zenon -<1>3. ASSUME NEW n \in Nat, Q(n), - NEW T, IsFiniteSet(T), Cardinality(T) = n+1 - PROVE P(T) - <2>1. PICK x \in T : TRUE BY <1>3, FS_EmptySet - <2>2. /\ IsFiniteSet(T \ {x}) - /\ Cardinality(T \ {x}) = n - BY <1>3, FS_RemoveElement, Isa - <2>3. P(T \ {x}) BY <2>2, Q(n) - <2>4. P((T \ {x}) \cup {x}) BY <2>2, <2>3 - <2>. QED BY <2>4 -<1>4. QED BY <1>2, <1>3, NatInduction, Isa - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The finite subsets form a well-founded ordering with respect to strict *) -(* set inclusion. *) -(* *) -(* `. .' *) -(***************************************************************************) - -FiniteSubsetsOf(S) == { T \in SUBSET S : IsFiniteSet(T) } -StrictSubsetOrdering(S) == { ss \in (SUBSET S) \X (SUBSET S) : - ss[1] \subseteq ss[2] /\ ss[1] # ss[2] } - -LEMMA FS_FiniteSubsetsOfFinite == - ASSUME NEW S, IsFiniteSet(S) - PROVE FiniteSubsetsOf(S) = SUBSET S -BY FS_Subset DEF FiniteSubsetsOf - - -(*****************************************************************************) -(* The formulation of the following theorem doesn't require S being finite. *) -(* If S is finite, it implies *) -(* IsWellFoundedOn(StrictSubsetOrdering(S), SUBSET S) *) -(* using lemma FS_FiniteSubsetsOfFinite. *) -(*****************************************************************************) -THEOREM FS_StrictSubsetOrderingWellFounded == - ASSUME NEW S - PROVE IsWellFoundedOn(StrictSubsetOrdering(S), FiniteSubsetsOf(S)) - -<1>1. \A T \in FiniteSubsetsOf(S) : Cardinality(T) \in Nat - BY FS_CardinalityType, FS_Subset DEF FiniteSubsetsOf -<1>2. IsWellFoundedOn(PreImage(Cardinality, FiniteSubsetsOf(S), OpToRel(<,Nat)), - FiniteSubsetsOf(S)) - BY <1>1, PreImageWellFounded, NatLessThanWellFounded, Isa -<1>3. StrictSubsetOrdering(S) \cap (FiniteSubsetsOf(S) \X FiniteSubsetsOf(S)) - \subseteq PreImage(Cardinality, FiniteSubsetsOf(S), OpToRel(<, Nat)) - BY FS_Subset, <1>1 - DEF StrictSubsetOrdering, FiniteSubsetsOf, PreImage, OpToRel -<1>. QED BY <1>2, <1>3, IsWellFoundedOnSubrelation - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Well-founded induction for finite subsets. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM FS_WFInduction == - ASSUME NEW P(_), NEW S, IsFiniteSet(S), - ASSUME NEW T \in SUBSET S, - \A U \in (SUBSET T) \ {T} : P(U) - PROVE P(T) - PROVE P(S) -<1>. DEFINE SubS == SUBSET S -<1>1. IsWellFoundedOn(StrictSubsetOrdering(S), SubS) - BY FS_FiniteSubsetsOfFinite, FS_StrictSubsetOrderingWellFounded, Zenon -<1>2. \A T \in SubS : - (\A U \in SetLessThan(T, StrictSubsetOrdering(S), SubS) : P(U)) - => P(T) - BY DEF SetLessThan, StrictSubsetOrdering -<1>. HIDE DEF SubS -<1>3. \A T \in SubS : P(T) BY ONLY <1>1, <1>2, WFInduction, IsaM("blast") -<1>. QED BY <1>3 DEF SubS - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The union of two finite sets is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Union == - ASSUME NEW S, IsFiniteSet(S), - NEW T, IsFiniteSet(T) - PROVE /\ IsFiniteSet(S \cup T) - /\ Cardinality(S \cup T) = - Cardinality(S) + Cardinality(T) - Cardinality(S \cap T) - -<1>. DEFINE P(A) == /\ IsFiniteSet(S \cup A) - /\ Cardinality(S \cup A) = - Cardinality(S) + Cardinality(A) - Cardinality(S \cap A) -<1>1. P({}) BY FS_EmptySet, FS_CardinalityType -<1>2. ASSUME NEW A, NEW x, IsFiniteSet(A), P(A), x \notin A - PROVE P(A \cup {x}) - <2>1. IsFiniteSet(S \cup (A \cup {x})) BY P(A), FS_AddElement, Isa - <2>. /\ IsFiniteSet(S \cup A) - /\ IsFiniteSet(S \cap A) - /\ Cardinality(S) \in Nat - /\ Cardinality(A) \in Nat - /\ Cardinality(S \cap A) \in Nat - BY P(A), FS_Subset, FS_CardinalityType - <2>2. Cardinality(A \cup {x}) = Cardinality(A) + 1 - BY <1>2, FS_AddElement - <2>3. CASE x \in S - <3>1. Cardinality(S \cup (A \cup {x})) = Cardinality(S \cup A) BY <2>3, Zenon - <3>2. Cardinality(S \cap (A \cup {x})) = Cardinality((S \cap A) \cup {x}) BY <2>3, Zenon - <3>3. Cardinality(S \cap (A \cup {x})) = Cardinality(S \cap A) + 1 - BY <3>2, <1>2, FS_AddElement - <3>. QED BY <3>1, <3>3, <2>2, <2>1, P(A) - <2>4. CASE x \notin S - <3>1. Cardinality((S \cup A) \cup {x}) = Cardinality(S \cup A) + 1 - BY <1>2, <2>4, FS_AddElement - <3>1a. Cardinality(S \cup (A \cup {x})) = Cardinality(S \cup A) + 1 BY <3>1, Zenon - <3>2. Cardinality(S \cap (A \cup {x})) = Cardinality(S \cap A) BY <2>4, Zenon - <3>. QED BY <3>1a, <3>2, <2>2, <2>1, P(A) - <2>. QED BY <2>3, <2>4 -<1>. HIDE DEF P -<1>. P(T) BY <1>1, <1>2, FS_Induction, IsaM("blast") -<1>. QED BY DEF P - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Corollary: two majorities intersect. More precisely, any two subsets *) -(* of a finite set U such that the sum of cardinalities of the subsets *) -(* exceeds that of U must have non-empty intersection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_MajoritiesIntersect == - ASSUME NEW U, NEW S, NEW T, IsFiniteSet(U), - S \subseteq U, T \subseteq U, - Cardinality(S) + Cardinality(T) > Cardinality(U) - PROVE S \cap T # {} - -<1>. /\ IsFiniteSet(S) - /\ IsFiniteSet(T) - /\ Cardinality(S) \in Nat - /\ Cardinality(T) \in Nat - /\ Cardinality(U) \in Nat - /\ Cardinality(S \cap T) \in Nat - /\ Cardinality(S \cup T) <= Cardinality(U) - BY FS_Subset, FS_CardinalityType -<1>1. Cardinality(S \cup T) = - Cardinality(S) + Cardinality(T) - Cardinality(S \cap T) - BY FS_Union, Zenon -<1>2. Cardinality(S \cap T) # 0 BY <1>1 -<1>3. QED BY <1>2, FS_EmptySet, Zenon - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The intersection of a finite set with an arbitrary set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) - - -THEOREM FS_Intersection == - ASSUME NEW S, IsFiniteSet(S), NEW T - PROVE /\ IsFiniteSet(S \cap T) - /\ IsFiniteSet(T \cap S) - /\ Cardinality(S \cap T) <= Cardinality(S) - /\ Cardinality(T \cap S) <= Cardinality(S) -BY FS_Subset - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The difference between a finite set and an arbitrary set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Difference == - ASSUME NEW S, NEW T, IsFiniteSet(S) - PROVE /\ IsFiniteSet(S \ T) - /\ Cardinality(S \ T) = Cardinality(S) - Cardinality(S \cap T) - -<1>. /\ IsFiniteSet(S \ T) - /\ IsFiniteSet(S \cap T) - /\ Cardinality(S \ T) \in Nat - /\ Cardinality(S \cap T) \in Nat - BY FS_Subset, FS_CardinalityType -<1>2. Cardinality(S \ T) = Cardinality(S) - Cardinality(S \cap T) - <2>1. Cardinality(S) = Cardinality((S \cap T) \cup (S \ T)) BY Zenon - <2>2. Cardinality((S \cap T) \cup (S \ T)) = - Cardinality(S \cap T) + Cardinality(S \ T) - Cardinality((S \cap T) \cap (S \ T)) - BY FS_Union, Zenon - <2>3. Cardinality((S \cap T) \cap (S \ T)) = 0 BY FS_EmptySet, Zenon - <2>. QED BY <2>1, <2>2, <2>3 -<1>3. QED BY <1>2 - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The union of a finite number of finite sets is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_UNION == - ASSUME NEW S, IsFiniteSet(S), \A T \in S : IsFiniteSet(T) - PROVE IsFiniteSet(UNION S) - -<1>. DEFINE P(U) == (\A T \in U : IsFiniteSet(T)) => IsFiniteSet(UNION U) -<1>1. P({}) BY FS_EmptySet -<1>2. ASSUME NEW U, NEW x, P(U), x \notin U - PROVE P(U \cup {x}) - BY <1>2, FS_Union, Isa -<1>. HIDE DEF P -<1>. P(S) BY <1>1, <1>2, FS_Induction, IsaM("blast") -<1>. QED BY DEF P - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The product of two finite sets is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_Product == - ASSUME NEW S, IsFiniteSet(S), - NEW T, IsFiniteSet(T) - PROVE /\ IsFiniteSet(S \X T) - /\ Cardinality(S \X T) = Cardinality(S) * Cardinality(T) - -<1>. DEFINE P(A) == /\ IsFiniteSet(S \X A) - /\ Cardinality(S \X A) = Cardinality(S) * Cardinality(A) -<1>1. P({}) - <2>1. /\ S \X {} = {} - /\ IsFiniteSet(S \X {}) - /\ Cardinality(S \X {}) = 0 - /\ Cardinality({}) = 0 - /\ Cardinality(S) \in Nat - BY FS_EmptySet, FS_CardinalityType, Zenon - <2>. QED BY <2>1 -<1>2. ASSUME NEW A, NEW x, IsFiniteSet(A), P(A), x \notin A - PROVE P(A \cup {x}) - <2>. /\ Cardinality(A) \in Nat - /\ Cardinality(S) \in Nat - BY <1>2, FS_CardinalityType - <2>. DEFINE SX == { <> : s \in S } - <2>1. /\ IsFiniteSet(A \cup {x}) - /\ Cardinality(A \cup {x}) = Cardinality(A) + 1 - BY <1>2, FS_AddElement - <2>2. S \X (A \cup {x}) = (S \X A) \cup SX - BY <1>2, Isa - <2>3. ExistsBijection(S, SX) - <3>. DEFINE f == [s \in S |-> <>] - <3>. f \in Bijection(S, SX) BY DEF Bijection, Injection, Surjection - <3>. QED BY DEF ExistsBijection - <2>4. /\ IsFiniteSet(SX) - /\ Cardinality(SX) = Cardinality(S) - BY <2>3, FS_Bijection - <2>5. /\ IsFiniteSet(S \X (A \cup {x})) - /\ Cardinality(S \X (A \cup {x})) = - Cardinality(S \X A) + Cardinality(SX) - Cardinality((S \X A) \cap SX) - BY <2>2, <2>4, P(A), FS_Union, Isa - <2>6. (S \X A) \cap SX = {} BY <1>2 - <2>7. Cardinality((S \X A) \cap SX) = 0 BY <2>6, FS_EmptySet, Zenon - <2>. QED BY <2>1, <2>5, <2>4, <2>7, P(A) -<1>. HIDE DEF P -<1>. P(T) BY <1>1, <1>2, FS_Induction, IsaM("blast") -<1>. QED BY DEF P - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The powerset of a finite set is finite. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM FS_SUBSET == - ASSUME NEW S, IsFiniteSet(S) - PROVE /\ IsFiniteSet(SUBSET S) - /\ Cardinality(SUBSET S) = 2^Cardinality(S) - -<1>. DEFINE P(A) == /\ IsFiniteSet(SUBSET A) - /\ Cardinality(SUBSET A) = 2^Cardinality(A) -<1>1. P({}) - <2>1. /\ IsFiniteSet({{}}) - /\ Cardinality({{}}) = 1 - BY FS_Singleton, Zenon - <2>2. 1 = 2^0 OBVIOUS - <2>. QED BY <2>1, <2>2, FS_EmptySet, Zenon -<1>2. ASSUME NEW A, NEW x, IsFiniteSet(A), x \notin A, P(A) - PROVE P(A \cup {x}) - <2>. DEFINE Ax == {B \cup {x} : B \in SUBSET A} - <2>1. Cardinality(A \cup {x}) = Cardinality(A) + 1 BY <1>2, FS_AddElement - <2>2. 2^Cardinality(A \cup {x}) = 2^Cardinality(A) + 2^Cardinality(A) - BY <2>1, <1>2, FS_CardinalityType, TwoExpLemma, Zenon - <2>3. SUBSET (A \cup {x}) = (SUBSET A) \cup Ax BY <1>2, Isa - <2>4. ExistsBijection(SUBSET A, Ax) - <3>. DEFINE f == [B \in SUBSET A |-> B \cup {x}] - <3>1. ASSUME NEW B \in SUBSET A, NEW C \in SUBSET A, f[B] = f[C] - PROVE B = C - BY <3>1, <1>2, Zenon - <3>2. f \in Surjection(SUBSET A, Ax) BY DEF Surjection - <3>3. f \in Bijection(SUBSET A, Ax) - BY <3>1, <3>2 DEF Bijection, Injection - <3>. QED BY <3>3 DEF ExistsBijection - <2>5. /\ IsFiniteSet(Ax) - /\ Cardinality(Ax) = Cardinality(SUBSET A) - BY <2>4, P(A), FS_Bijection - <2>6. /\ IsFiniteSet(SUBSET (A \cup {x})) - /\ Cardinality(SUBSET (A \cup {x})) = - Cardinality(SUBSET A) + Cardinality(Ax) - Cardinality((SUBSET A) \cap Ax) - BY <2>3, <2>5, P(A), FS_Union, Isa - <2>7. (SUBSET A) \cap Ax = {} BY <1>2 - <2>8. Cardinality((SUBSET A) \cap Ax) = 0 BY <2>7, FS_EmptySet, Zenon - <2>. QED BY <2>2, <2>5, <2>6, <2>8, P(A), FS_CardinalityType -<1>. HIDE DEF P -<1>. P(S) BY <1>1, <1>2, FS_Induction, IsaM("blast") -<1>. QED BY DEF P - - - - - - -============================================================================= -\* Modification History -\* Last modified Fri Feb 14 21:24:26 GMT-03:00 2014 by merz -\* Last modified Thu Jul 04 15:15:07 CEST 2013 by bhargav -\* Last modified Tue Jun 04 11:44:51 CEST 2013 by bhargav -\* Last modified Fri May 03 12:02:51 PDT 2013 by tomr -\* Created Fri Oct 05 15:04:18 PDT 2012 by tomr \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSets.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSets.tla deleted file mode 100644 index 57ac402350..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/FiniteSets.tla +++ /dev/null @@ -1,23 +0,0 @@ ----------------------------- MODULE FiniteSets ----------------------------- -LOCAL INSTANCE Naturals -LOCAL INSTANCE Sequences - (*************************************************************************) - (* Imports the definitions from Naturals and Sequences, but doesn't *) - (* export them. *) - (*************************************************************************) - -IsFiniteSet(S) == - (*************************************************************************) - (* A set S is finite iff there is a finite sequence containing all its *) - (* elements. *) - (*************************************************************************) - \E seq \in Seq(S) : \A s \in S : \E n \in 1..Len(seq) : seq[n] = s - -Cardinality(S) == - (*************************************************************************) - (* Cardinality is defined only for finite sets. *) - (*************************************************************************) - LET CS[T \in SUBSET S] == IF T = {} THEN 0 - ELSE 1 + CS[T \ {CHOOSE x : x \in T}] - IN CS[S] -============================================================================= diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems.tla deleted file mode 100644 index 644b6414f1..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems.tla +++ /dev/null @@ -1,575 +0,0 @@ -------------------------- MODULE FunctionTheorems --------------------------- -(***************************************************************************) -(* `^{\large\vspace{12pt} *) -(* Facts about functions. *) -(* Originally contributed by Tom Rodeheffer, MSR. *) -(* For the proofs of these theorems, see module FunctionTheorems\_proofs. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - -EXTENDS - Functions, - Integers - -(***************************************************************************) -(* `. .' *) -(* *) -(* Function restriction. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_RestrictProperties == - ASSUME NEW S, NEW T, NEW f \in [S -> T], NEW A \in SUBSET S - PROVE /\ Restrict(f,A) \in [A -> T] - /\ \A x \in A : Restrict(f,A)[x] = f[x] - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Range of a function. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_RangeProperties == - ASSUME NEW S, NEW T, NEW f \in [S -> T] - PROVE /\ Range(f) \subseteq T - /\ \A y \in Range(f) : \E x \in S : f[x] = y - /\ f \in Surjection(S, Range(f)) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Range of a function. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InverseProperties == - ASSUME NEW S, NEW T, NEW f \in [S -> T] - PROVE /\ (S = {} => T = {}) => Inverse(f,S,T) \in [T -> S] - /\ \A y \in Range(f) : f[Inverse(f,S,T)[y]] = y - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Introduction rules for injections, surjections, bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_IsInj == - ASSUME NEW S, NEW T, NEW F \in [S -> T], - \A a,b \in S : F[a] = F[b] => a = b - PROVE F \in Injection(S,T) - - -THEOREM Fun_IsSurj == - ASSUME NEW S, NEW T, NEW F \in [S -> T], - \A t \in T : \E s \in S : F[s] = t - PROVE F \in Surjection(S,T) - - -THEOREM Fun_IsBij == - ASSUME NEW S, NEW T, NEW F, - \/ F \in Injection(S,T) - \/ (F \in [S -> T] /\ \A a,b \in S : F[a] = F[b] => a = b), - - \/ F \in Surjection(S,T) - \/ (F \in [S -> T] /\ \A t \in T : \E s \in S : F[s] = t) - PROVE F \in Bijection(S,T) - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of injections, surjections, and bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjectionProperties == - ASSUME NEW S, NEW T, NEW F \in Injection(S,T) - PROVE /\ F \in [S -> T] - /\ \A a,b \in S : F[a] = F[b] => a = b - - -THEOREM Fun_SurjectionProperties == - ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) - PROVE /\ F \in [S -> T] - /\ \A t \in T : \E s \in S : F[s] = t - /\ Range(F) = T - - -THEOREM Fun_BijectionProperties == - ASSUME NEW S, NEW T, NEW F \in Bijection(S,T) - PROVE /\ F \in [S -> T] - /\ F \in Injection(S,T) - /\ F \in Surjection(S,T) - /\ \A a,b \in S : F[a] = F[b] => a = b - /\ \A t \in T : \E s \in S : F[s] = t - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* A surjection in [S -> T] such that there is no surjection from any *) -(* subset of S to T is a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_SmallestSurjectionIsBijection == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), - \A U \in SUBSET S : U # S => Surjection(U,T) = {} - PROVE f \in Bijection(S,T) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Transitivity of injections, surjections, bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Injection(S,T), - NEW G \in Injection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Injection(S,U) - - -THEOREM Fun_SurjTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Surjection(S,T), - NEW G \in Surjection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Surjection(S,U) - - -THEOREM Fun_BijTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Bijection(S,T), - NEW G \in Bijection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Bijection(S,U) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The inverse of a surjection is an injection and vice versa. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_SurjInverse == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T) - PROVE Inverse(f,S,T) \in Injection(T,S) - - -THEOREM Fun_InjInverse == - ASSUME NEW S, NEW T, NEW f \in Injection(S,T), S = {} => T = {} - PROVE Inverse(f,S,T) \in Surjection(T,S) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of the inverse of a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_BijInverse == - ASSUME NEW S, NEW T, NEW f \in Bijection(S,T) - PROVE /\ Inverse(f,S,T) \in Bijection(T,S) - /\ \A s \in S : Inverse(f,S,T)[f[s]] = s - /\ Inverse(Inverse(f,S,T), T,S) = f - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The restriction of a bijection is a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_BijRestrict == - ASSUME NEW S, NEW T, NEW F \in Bijection(S,T), - NEW R \in SUBSET S - PROVE Restrict(F, R) \in Bijection(R, Range(Restrict(F, R))) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Given F an injection from S to T, then F is a bijection from S to F(S). *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjMeansBijImage == - ASSUME NEW S, NEW T, NEW F \in Injection(S,T) - PROVE F \in Bijection(S, Range(F)) - - - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* Facts about exists jections. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Definitions restated as facts. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsInj == - \A S,T : ExistsInjection(S,T) <=> Injection(S,T) # {} - - -THEOREM Fun_ExistsSurj == - \A S,T : ExistsSurjection(S,T) <=> Surjection(S,T) # {} - - -THEOREM Fun_ExistsBij == - \A S,T : ExistsBijection(S,T) <=> Bijection(S,T) # {} - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is a surjection from any set S to any non-empty subset T of S. *) -(* (Note that there cannot be a surjection to {} except if S is empty.) *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsSurjSubset == - ASSUME NEW S, NEW T \in SUBSET S, T # {} - PROVE ExistsSurjection(S,T) - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there is a surjection from S to T, then there is an injection from T *) -(* to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsSurjMeansExistsRevInj == - ASSUME NEW S, NEW T, ExistsSurjection(S,T) - PROVE ExistsInjection(T,S) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* ExistsBijection is reflexive, symmetric, and transitive. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijReflexive == - ASSUME NEW S - PROVE ExistsBijection(S,S) - - -THEOREM Fun_ExistsBijSymmetric == - ASSUME NEW S, NEW T, ExistsBijection(S,T) - PROVE ExistsBijection(T,S) - - -THEOREM Fun_ExistsBijTransitive == - ASSUME NEW S, NEW T, NEW U, ExistsBijection(S,T), ExistsBijection(T,U) - PROVE ExistsBijection(S,U) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Existence of injections and surjections is reflexive and transitive. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsInjReflexive == - ASSUME NEW S - PROVE ExistsInjection(S,S) - - -THEOREM Fun_ExistsSurjReflexive == - ASSUME NEW S - PROVE ExistsSurjection(S,S) - - -THEOREM Fun_ExistsInjTransitive == - ASSUME NEW S, NEW T, NEW U, - ExistsInjection(S,T), ExistsInjection(T,U) - PROVE ExistsInjection(S,U) - - -THEOREM Fun_ExistsSurjTransitive == - ASSUME NEW S, NEW T, NEW U, - ExistsSurjection(S,T), ExistsSurjection(T,U) - PROVE ExistsSurjection(S,U) - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* The Cantor-Bernstein-Schroeder theorem. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists an injection from S to T, where T is a subset of S, *) -(* then there exists a bijection from S to T. *) -(* *) -(* A lemma for the Cantor-Bernstein-Schroeder theorem. *) -(* *) -(* This proof is formalized from *) -(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder\_Theorem/Lemma}^' *) -(* retrieved April 29, 2013. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_CantorBernsteinSchroeder_Lemma == - ASSUME NEW S, NEW T, T \subseteq S, ExistsInjection(S,T) - PROVE ExistsBijection(S,T) - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If an injection exists from S to T and an injection exists from T to S, *) -(* then there is a bijection from S to T. *) -(* *) -(* This is the Cantor-Bernstein-Schroeder theorem. *) -(* *) -(* This proof is formalized from *) -(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Proof_5}^' *) -(* retrieved April 29, 2013. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_CantorBernsteinSchroeder == - ASSUME NEW S, NEW T, - ExistsInjection(S,T), ExistsInjection(T,S) - PROVE ExistsBijection(S,T) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Applications of the Cantor-Bernstein-Schroeder Theorem. *) -(* If there exists an injection f: A->B and a surjection g: A->B, then *) -(* there exists a bijection between A and B. *) -(* Also, if there are surjections between A and B, then there is a *) -(* bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM Fun_ExistInjAndSurjThenBij == - ASSUME NEW S, NEW T, - ExistsInjection(S,T), ExistsSurjection(S,T) - PROVE ExistsBijection(S,T) - - - -THEOREM Fun_ExistSurjAndSurjThenBij == - ASSUME NEW S, NEW T, - ExistsSurjection(S,T), ExistsSurjection(T,S) - PROVE ExistsBijection(S,T) - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Equivalences for ExistsBijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijEquiv == - ASSUME NEW S, NEW T - PROVE /\ ExistsBijection(S,T) <=> ExistsBijection(T,S) - /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsInjection(T,S) - /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsSurjection(S,T) - /\ ExistsBijection(S,T) <=> ExistsInjection(T,S) /\ ExistsSurjection(T,S) - /\ ExistsBijection(S,T) <=> ExistsSurjection(S,T) /\ ExistsSurjection(T,S) - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* Facts about functions involving integer intervals. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is a bijection from 1..b-a+1 to a..b for integers a,b with a <= b.*) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijInterval == - ASSUME NEW a \in Int, NEW b \in Int, a <= b - PROVE ExistsBijection(1 .. b-a+1, a .. b) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is an injection from 1..n to 1..m iff n \leq m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatInjLeq == - ASSUME NEW n \in Nat, NEW m \in Nat - PROVE ExistsInjection(1..n,1..m) <=> n \leq m - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If a surjection from 1..n to S exists (for some n \in Nat) then a *) -(* bijection from 1..m to S exists (for some m \in Nat) and m \leq n. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatSurjImpliesNatBij == - ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n,S) - PROVE \E m \in Nat : ExistsBijection(1..m,S) /\ m \leq n - - -(***************************************************************************) -(* Simple corollary. *) -(***************************************************************************) -THEOREM Fun_NatSurjEquivNatBij == - ASSUME NEW S - PROVE (\E n \in Nat : ExistsSurjection(1..n,S)) - <=> (\E m \in Nat : ExistsBijection(1..m,S)) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* For any set S, given n, m \in Nat such that bijections exist from 1..n *) -(* to S and from 1..m to S, then it must be the case that n = m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSame == - ASSUME NEW S, - NEW n \in Nat, ExistsBijection(1..n,S), - NEW m \in Nat, ExistsBijection(1..m,S) - PROVE n = m - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* S is empty iff there exists a bijection from 1..0 to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijEmpty == - ASSUME NEW S - PROVE ExistsBijection(1..0,S) <=> S = {} - - -(***************************************************************************) -(* `. .' *) -(* *) -(* S is a singleton iff there exists a bijection from 1..1 to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSingleton == - ASSUME NEW S - PROVE ExistsBijection(1..1,S) <=> \E s : S = {s} - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..n to T (for some n \in Nat), where T *) -(* is a subset of S. Furthermore n \leq m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSubset == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW T \in SUBSET S - PROVE \E n \in Nat : ExistsBijection(1..n,T) /\ n \leq m - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..(m+1) to S \cup {x}, where x \notin S. *) -(* *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijAddElem == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW x, x \notin S - PROVE ExistsBijection(1..(m+1), S \cup {x}) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..(m-1) to S \ {x}, where x \in S. *) -(* *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSubElem == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW x, x \in S - PROVE ExistsBijection(1..(m-1), S \ {x}) - - - -============================================================================= -\* Modification History -\* Last modified Thu Feb 13 14:49:08 GMT-03:00 2014 by merz -\* Last modified Tue Jun 11 12:30:05 CEST 2013 by bhargav -\* Last modified Fri May 31 15:27:41 CEST 2013 by bhargav -\* Last modified Fri May 03 12:55:32 PDT 2013 by tomr -\* Created Thu Apr 11 10:36:10 PDT 2013 by tomr diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems_proofs.tla deleted file mode 100644 index 6cb01fde93..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/FunctionTheorems_proofs.tla +++ /dev/null @@ -1,947 +0,0 @@ ---------------------- MODULE FunctionTheorems_proofs ------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* Proofs of facts about functions. *) -(* Originally contributed by Tom Rodeheffer, MSR. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - -EXTENDS - Functions, - Integers, - NaturalsInduction, - WellFoundedInduction, - TLAPS - -(***************************************************************************) -(* `. .' *) -(* *) -(* Function restriction. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_RestrictProperties == - ASSUME NEW S, NEW T, NEW f \in [S -> T], NEW A \in SUBSET S - PROVE /\ Restrict(f,A) \in [A -> T] - /\ \A x \in A : Restrict(f,A)[x] = f[x] -BY DEF Restrict - -(***************************************************************************) -(* `. .' *) -(* *) -(* Range of a function. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_RangeProperties == - ASSUME NEW S, NEW T, NEW f \in [S -> T] - PROVE /\ Range(f) \subseteq T - /\ \A y \in Range(f) : \E x \in S : f[x] = y - /\ f \in Surjection(S, Range(f)) -BY DEF Range, Surjection - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Range of a function. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InverseProperties == - ASSUME NEW S, NEW T, NEW f \in [S -> T] - PROVE /\ (S = {} => T = {}) => Inverse(f,S,T) \in [T -> S] - /\ \A y \in Range(f) : f[Inverse(f,S,T)[y]] = y -BY DEF Inverse, Range - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Introduction rules for injections, surjections, bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_IsInj == - ASSUME NEW S, NEW T, NEW F \in [S -> T], - \A a,b \in S : F[a] = F[b] => a = b - PROVE F \in Injection(S,T) -BY DEF Injection - - -THEOREM Fun_IsSurj == - ASSUME NEW S, NEW T, NEW F \in [S -> T], - \A t \in T : \E s \in S : F[s] = t - PROVE F \in Surjection(S,T) -BY DEF Surjection - - -THEOREM Fun_IsBij == - ASSUME NEW S, NEW T, NEW F, - \/ F \in Injection(S,T) - \/ (F \in [S -> T] /\ \A a,b \in S : F[a] = F[b] => a = b), - - \/ F \in Surjection(S,T) - \/ (F \in [S -> T] /\ \A t \in T : \E s \in S : F[s] = t) - PROVE F \in Bijection(S,T) -BY DEF Bijection, Injection, Surjection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of injections, surjections, and bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjectionProperties == - ASSUME NEW S, NEW T, NEW F \in Injection(S,T) - PROVE /\ F \in [S -> T] - /\ \A a,b \in S : F[a] = F[b] => a = b -BY DEF Injection - - -THEOREM Fun_SurjectionProperties == - ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) - PROVE /\ F \in [S -> T] - /\ \A t \in T : \E s \in S : F[s] = t - /\ Range(F) = T -BY DEF Surjection, Range - - -THEOREM Fun_BijectionProperties == - ASSUME NEW S, NEW T, NEW F \in Bijection(S,T) - PROVE /\ F \in [S -> T] - /\ F \in Injection(S,T) - /\ F \in Surjection(S,T) - /\ \A a,b \in S : F[a] = F[b] => a = b - /\ \A t \in T : \E s \in S : F[s] = t -BY DEF Bijection, Injection, Surjection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* A surjection in [S -> T] such that there is no surjection from any *) -(* subset of S to T is a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_SmallestSurjectionIsBijection == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), - \A U \in SUBSET S : U # S => Surjection(U,T) = {} - PROVE f \in Bijection(S,T) -<1>1. f \in [S -> T] - BY Fun_SurjectionProperties -<1>2. SUFFICES ASSUME f \notin Injection(S,T) PROVE FALSE - BY Fun_IsBij -<1>3. PICK a,b \in S : a # b /\ f[a] = f[b] - BY <1>1, <1>2, Fun_IsInj -<1>. DEFINE U == S \ {b} -<1>4. U \in SUBSET S /\ U # S - OBVIOUS -<1>. DEFINE g == [x \in U |-> f[x]] -<1>5. g \in Surjection(U,T) - <2>1. g \in [U -> T] BY <1>1 - <2>2. ASSUME NEW t \in T PROVE \E u \in U : g[u] = t - <3>1. CASE t = f[b] BY <1>3, <3>1 - <3>2. CASE t # f[b] - <4>1. PICK s \in S : f[s] = t - BY SMT, Fun_SurjectionProperties \** Zenon/Isa fail ?? - <4>2. s \in U BY <3>2, <4>1 - <4>. QED BY <4>1, <4>2 - <3>3. QED BY <3>1, <3>2 - <2>3. QED BY <2>1, <2>2, Fun_IsSurj -<1>. QED BY <1>4, <1>5 - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Transitivity of injections, surjections, bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Injection(S,T), - NEW G \in Injection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Injection(S,U) -BY DEF Injection - - -THEOREM Fun_SurjTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Surjection(S,T), - NEW G \in Surjection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Surjection(S,U) -BY DEF Surjection - - -THEOREM Fun_BijTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Bijection(S,T), - NEW G \in Bijection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Bijection(S,U) -BY Fun_SurjTransitive, Fun_InjTransitive DEF Bijection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The inverse of a surjection is an injection and vice versa. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_SurjInverse == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T) - PROVE Inverse(f,S,T) \in Injection(T,S) -BY DEF Inverse, Surjection, Injection, Range - - -THEOREM Fun_InjInverse == - ASSUME NEW S, NEW T, NEW f \in Injection(S,T), S = {} => T = {} - PROVE Inverse(f,S,T) \in Surjection(T,S) -<1>. DEFINE g == Inverse(f,S,T) -<1>0. f \in [S -> T] BY DEF Injection -<1>1. g \in [T -> S] BY <1>0, Fun_InverseProperties -<1>2. ASSUME NEW s \in S PROVE \E t \in T : g[t] = s - <2>10. g[f[s]] = s BY DEF Inverse, Range, Injection - <2>. QED BY <2>10, <1>0 -<1>. QED BY <1>1, <1>2 DEF Surjection - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of the inverse of a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_BijInverse == - ASSUME NEW S, NEW T, NEW f \in Bijection(S,T) - PROVE /\ Inverse(f,S,T) \in Bijection(T,S) - /\ \A s \in S : Inverse(f,S,T)[f[s]] = s - /\ Inverse(Inverse(f,S,T), T,S) = f - -<1>. DEFINE g == Inverse(f,S,T) -<1>1. f \in [S -> T] BY DEF Bijection, Injection -<1>2. f \in Surjection(S,T) BY DEF Bijection -<1>3. \A a,b \in S : f[a] = f[b] => a = b BY DEF Bijection, Injection -<1>4. g \in Injection(T,S) BY <1>2, Fun_SurjInverse - -<1>5. \A t \in T : f[g[t]] = t BY <1>2 DEF Surjection, Inverse, Range -<1>6. \A s \in S : g[f[s]] = s BY <1>1, <1>3 DEF Inverse, Range - -<1>7. \A a,b \in T : g[a] = g[b] => a = b BY <1>5 -<1>8. \A s \in S : \E t \in T : g[t] = s BY <1>1, <1>6 - -<1>9. g \in Bijection(T,S) BY <1>4, <1>8 DEF Bijection, Injection, Surjection - -<1>10. Inverse(g,T,S) = f - <2>1. ASSUME NEW s \in S PROVE f[s] = CHOOSE t \in T : s \in Range(g) => g[t] = s - <3>1. PICK a \in T : g[a] = s BY <1>9 DEF Bijection, Surjection - <3>2. \A b \in T : g[b] = s => a = b BY <3>1, <1>7 - <3>3. f[s] = a BY <3>1, <1>5 - <3>4. s \in Range(g) BY <3>1, <1>4 DEF Injection, Range - <3>. QED BY <3>1, <3>2, <3>3, <3>4 - <2>. QED BY <2>1, <1>1 DEF Inverse -<1>. QED BY <1>9, <1>6, <1>10 - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The restriction of a bijection is a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_BijRestrict == - ASSUME NEW S, NEW T, NEW F \in Bijection(S,T), - NEW R \in SUBSET S - PROVE Restrict(F, R) \in Bijection(R, Range(Restrict(F, R))) -BY DEF Bijection, Injection, Surjection, Range, Restrict - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Given F an injection from S to T, then F is a bijection from S to F(S). *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjMeansBijImage == - ASSUME NEW S, NEW T, NEW F \in Injection(S,T) - PROVE F \in Bijection(S, Range(F)) -BY DEF Bijection, Injection, Surjection, Range - - - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* Facts about exists jections. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Definitions restated as facts. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsInj == - \A S,T : ExistsInjection(S,T) <=> Injection(S,T) # {} -BY DEF ExistsInjection - - -THEOREM Fun_ExistsSurj == - \A S,T : ExistsSurjection(S,T) <=> Surjection(S,T) # {} -BY DEF ExistsSurjection - - -THEOREM Fun_ExistsBij == - \A S,T : ExistsBijection(S,T) <=> Bijection(S,T) # {} -BY DEF ExistsBijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is a surjection from any set S to any non-empty subset T of S. *) -(* (Note that there cannot be a surjection to {} except if S is empty.) *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsSurjSubset == - ASSUME NEW S, NEW T \in SUBSET S, T # {} - PROVE ExistsSurjection(S,T) -<1>. PICK x \in T : TRUE OBVIOUS -<1>. [s \in S |-> IF s \in T THEN s ELSE x] \in Surjection(S,T) - BY DEF Surjection -<1>. QED BY DEF ExistsSurjection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there is a surjection from S to T, then there is an injection from T *) -(* to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsSurjMeansExistsRevInj == - ASSUME NEW S, NEW T, ExistsSurjection(S,T) - PROVE ExistsInjection(T,S) -BY Fun_SurjInverse DEF ExistsSurjection, ExistsInjection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* ExistsBijection is reflexive, symmetric, and transitive. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijReflexive == - ASSUME NEW S - PROVE ExistsBijection(S,S) -<1>. [s \in S |-> s] \in Bijection(S,S) BY DEF Bijection, Injection, Surjection -<1>. QED BY DEF ExistsBijection - - -THEOREM Fun_ExistsBijSymmetric == - ASSUME NEW S, NEW T, ExistsBijection(S,T) - PROVE ExistsBijection(T,S) -BY Fun_BijInverse DEF ExistsBijection - - -THEOREM Fun_ExistsBijTransitive == - ASSUME NEW S, NEW T, NEW U, ExistsBijection(S,T), ExistsBijection(T,U) - PROVE ExistsBijection(S,U) -BY Fun_BijTransitive DEF ExistsBijection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Existence of injections and surjections is reflexive and transitive. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsInjReflexive == - ASSUME NEW S - PROVE ExistsInjection(S,S) -BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsInjection, Bijection - - -THEOREM Fun_ExistsSurjReflexive == - ASSUME NEW S - PROVE ExistsSurjection(S,S) -BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsSurjection, Bijection - - -THEOREM Fun_ExistsInjTransitive == - ASSUME NEW S, NEW T, NEW U, - ExistsInjection(S,T), ExistsInjection(T,U) - PROVE ExistsInjection(S,U) -BY Fun_InjTransitive DEF ExistsInjection - - -THEOREM Fun_ExistsSurjTransitive == - ASSUME NEW S, NEW T, NEW U, - ExistsSurjection(S,T), ExistsSurjection(T,U) - PROVE ExistsSurjection(S,U) -BY Fun_SurjTransitive DEF ExistsSurjection - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* The Cantor-Bernstein-Schroeder theorem. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists an injection from S to T, where T is a subset of S, *) -(* then there exists a bijection from S to T. *) -(* *) -(* A lemma for the Cantor-Bernstein-Schroeder theorem. *) -(* *) -(* This proof is formalized from *) -(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder\_Theorem/Lemma}^' *) -(* retrieved April 29, 2013. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_CantorBernsteinSchroeder_Lemma == - ASSUME NEW S, NEW T, T \subseteq S, ExistsInjection(S,T) - PROVE ExistsBijection(S,T) -PROOF - <1> PICK F \in Injection(S,T) : TRUE BY Fun_ExistsInj - - <1>1. /\ F \in [S -> T] - /\ \A a,b \in S : F[a] = F[b] => a = b - BY Fun_InjectionProperties - - (*************************************************************************) - (* Pick Y as S excluding T. *) - (*************************************************************************) - <1>2. PICK Y : Y = S \ T OBVIOUS - - (*************************************************************************) - (* Define Ci[0] as Y, and Ci[i+1] as the image of Ci[i] under F. *) - (*************************************************************************) - <1> DEFINE Ci[i \in Nat] == - IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} - <1> HIDE DEF Ci - - <1>3. \A i \in Nat : Ci[i] = - IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} - (***********************************************************************) - (* Use NatInductiveDef to prove that Ci equals its definition. *) - (***********************************************************************) - <2> DEFINE - f0 == Y - Def(v,i) == {F[s] : s \in v} - f == CHOOSE f : f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1],i)] - <2> SUFFICES \A i \in Nat : f[i] = IF i = 0 THEN f0 ELSE Def(f[i-1],i) BY DEF Ci - <2> HIDE DEF f0, Def, f - <2> SUFFICES NatInductiveDefConclusion(f,f0,Def) BY DEF NatInductiveDefConclusion - <2> SUFFICES NatInductiveDefHypothesis(f,f0,Def) BY NatInductiveDef - <2> QED BY DEF NatInductiveDefHypothesis, f - - (*************************************************************************) - (* Applying F to an element of Ci[i] produces an element of Ci[i+1]. *) - (*************************************************************************) - <1>4. ASSUME NEW i \in Nat, NEW s \in Ci[i] - PROVE F[s] \in Ci[i+1] - BY <1>3, SMT - - (*************************************************************************) - (* Each element of Ci[i+1] is the application of F to some element in *) - (* Ci[i]. *) - (*************************************************************************) - <1>5. ASSUME NEW i \in Nat, NEW t \in Ci[i+1] - PROVE \E s \in Ci[i] : F[s] = t - BY <1>3, SMT - - (*************************************************************************) - (* Each Ci[i] \subseteq S. *) - (*************************************************************************) - <1>6. \A i \in Nat : Ci[i] \subseteq S - <2> DEFINE Prop(i) == Ci[i] \subseteq S - <2> SUFFICES \A i \in Nat : Prop(i) OBVIOUS - <2>1. Prop(0) BY <1>2, <1>3 - <2>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) - <3> SUFFICES ASSUME NEW t \in Ci[i+1] PROVE t \in S OBVIOUS - <3>1. PICK s \in Ci[i] : F[s] = t BY <1>5 - <3>2. s \in S BY <2>2 - <3> QED BY <3>1, <3>2, <1>1 - <2> HIDE DEF Prop - <2> QED BY <2>1, <2>2, NatInduction, Isa - - (*************************************************************************) - (* Pick C as the union of all Ci[i]. *) - (*************************************************************************) - <1>7. PICK C : C = UNION {Ci[i] : i \in Nat} OBVIOUS - <1>8. C \subseteq S BY <1>6, <1>7 - - (*************************************************************************) - (* Pick FC as the image of C under F. *) - (*************************************************************************) - <1>9. PICK FC : FC = {F[c] : c \in C} OBVIOUS - <1>10. FC \subseteq T BY <1>1, <1>8, <1>9, Isa - - (*************************************************************************) - (* C = Y \cup FC because Ci[0] = Y and Ci[i+1] = image of Ci[i] under F. *) - (*************************************************************************) - <1>11. C = Y \cup FC - <2>1. ASSUME NEW c \in C PROVE c \in Y \cup FC - <3>1. PICK i \in Nat : c \in Ci[i] BY <1>7 - <3>2. CASE i = 0 BY <3>1, <3>2, <1>3 - <3>3. CASE i # 0 - <4>1. PICK s \in Ci[i-1] : F[s] = c BY <3>1, <3>3, <1>5, SMT - <4>2. s \in C BY <3>3, <1>7, SMT - <4> QED BY <4>1, <4>2, <1>9 - <3> QED BY <3>2, <3>3 - <2>2. ASSUME NEW c \in Y \cup FC PROVE c \in C - <3>1. CASE c \in Y BY <3>1, <1>3, <1>7 - <3>2. CASE c \in FC - <4>1. PICK s \in C : F[s] = c BY <3>2, <1>9 - <4>2. PICK i \in Nat : s \in Ci[i] BY <4>1, <1>7 - <4>3. F[s] \in Ci[i+1] BY <4>2, <1>4 - <4> QED BY <4>1, <4>3, <1>7, SMT - <3> QED BY <3>1, <3>2 - <2> QED BY <2>1, <2>2 - - (*************************************************************************) - (* S \ C is the same as T \ FC. *) - (*************************************************************************) - <1>12. S \ C = T \ FC BY <1>2, <1>11 - - (*************************************************************************) - (* Pick H as F on C and the identity on S \ C. Since F (restricted to *) - (* C) is a bijection from C to FC and S \ C = T \ FC, this makes H a *) - (* bijection from S to T. *) - (*************************************************************************) - <1>13. PICK H : H = [s \in S |-> IF s \in C THEN F[s] ELSE s] OBVIOUS - <1>14. H \in Bijection(S,T) - (***********************************************************************) - (* A useful lemma. If a \in C and b \notin C, then H[a] # H[b]. *) - (***********************************************************************) - <2>1. ASSUME NEW a \in S, NEW b \in S, a \in C, b \notin C PROVE H[a] # H[b] - <3>1. H[a] \in FC BY <2>1, <1>1, <1>9, <1>13 - <3>2. H[b] \in T \ FC BY <2>1, <1>12, <1>13 - <3> QED BY <3>1, <3>2 - - <2>2. H \in [S -> T] - <3> SUFFICES ASSUME NEW s \in S PROVE H[s] \in T BY <1>13 - <3>1. CASE s \in C BY <3>1, <1>1, <1>10, <1>13 - <3>2. CASE s \notin C BY <3>2, <1>12, <1>13 - <3> QED BY <3>1, <3>2 - - <2>3. ASSUME NEW a \in S, NEW b \in S, H[a] = H[b] PROVE a = b - <3> H[a] = H[b] BY <2>3 - <3>1. CASE a \in C /\ b \in C BY <3>1, <1>1, <1>13 - <3>2. CASE a \in C /\ b \notin C BY <3>2, <2>1 (* impossible by lemma *) - <3>3. CASE a \notin C /\ b \in C BY <3>3, <2>1 (* impossible by lemma *) - <3>4. CASE a \notin C /\ b \notin C BY <3>4, <1>13 - <3> QED BY <3>1, <3>2, <3>3, <3>4 - - <2>4. ASSUME NEW t \in T PROVE \E s \in S : H[s] = t - <3>1. CASE t \in FC BY <3>1, <1>8, <1>9, <1>13 - <3>2. CASE t \notin FC BY <3>2, <1>12, <1>13 - <3> QED BY <3>1, <3>2 - - <2> QED BY <2>2, <2>3, <2>4, Fun_IsBij - - <1> QED BY <1>14, Fun_ExistsBij - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If an injection exists from S to T and an injection exists from T to S, *) -(* then there is a bijection from S to T. *) -(* *) -(* This is the Cantor-Bernstein-Schroeder theorem. *) -(* *) -(* This proof is formalized from *) -(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Proof_5}^' *) -(* retrieved April 29, 2013. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_CantorBernsteinSchroeder == - ASSUME NEW S, NEW T, - ExistsInjection(S,T), ExistsInjection(T,S) - PROVE ExistsBijection(S,T) - -<1>1. PICK F : F \in Injection(S,T) BY DEF ExistsInjection -<1>2. PICK G : G \in Injection(T,S) BY DEF ExistsInjection -<1>. DEFINE GF == [s \in S |-> G[F[s]]] -<1>3. Range(G) \subseteq S BY <1>2, Fun_RangeProperties DEF Injection -<1>4. GF \in Injection(S, Range(G)) BY <1>1, <1>2 DEF Injection, Range -<1>5. ExistsBijection(S, Range(G)) - BY <1>3, <1>4, Fun_CantorBernsteinSchroeder_Lemma DEF ExistsInjection -<1>6. ExistsBijection(T, Range(G)) - BY <1>2, Fun_InjMeansBijImage DEF ExistsBijection -<1>. QED BY <1>5, <1>6, Fun_ExistsBijSymmetric, Fun_ExistsBijTransitive - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Applications of the Cantor-Bernstein-Schroeder Theorem. *) -(* If there exists an injection f: A->B and a surjection g: A->B, then *) -(* there exists a bijection between A and B. *) -(* Also, if there are surjections between A and B, then there is a *) -(* bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM Fun_ExistInjAndSurjThenBij == - ASSUME NEW S, NEW T, - ExistsInjection(S,T), ExistsSurjection(S,T) - PROVE ExistsBijection(S,T) -<1>. ExistsInjection(T,S) BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection -<1>. QED BY Fun_CantorBernsteinSchroeder - - - -THEOREM Fun_ExistSurjAndSurjThenBij == - ASSUME NEW S, NEW T, - ExistsSurjection(S,T), ExistsSurjection(T,S) - PROVE ExistsBijection(S,T) -<1>. ExistsInjection(S,T) BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection -<1>2. QED BY Fun_ExistInjAndSurjThenBij - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Equivalences for ExistsBijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijEquiv == - ASSUME NEW S, NEW T - PROVE /\ ExistsBijection(S,T) <=> ExistsBijection(T,S) - /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsInjection(T,S) - /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsSurjection(S,T) - /\ ExistsBijection(S,T) <=> ExistsInjection(T,S) /\ ExistsSurjection(T,S) - /\ ExistsBijection(S,T) <=> ExistsSurjection(S,T) /\ ExistsSurjection(T,S) - -<1>1. ExistsBijection(S,T) <=> ExistsBijection(T,S) - BY Fun_ExistsBijSymmetric -<1>2. ExistsInjection(S,T) /\ ExistsInjection(T,S) => ExistsBijection(S,T) - BY Fun_CantorBernsteinSchroeder -<1>3. \A S1, T1 : ExistsBijection(S1,T1) => ExistsSurjection(S1,T1) - BY DEF ExistsBijection, ExistsSurjection, Bijection -<1>4. \A S1,T1 : ExistsSurjection(S1,T1) => ExistsInjection(T1,S1) - BY Fun_ExistsSurjMeansExistsRevInj -<1> QED BY <1>1, <1>2, <1>3, <1>4 - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large \vspace{12pt} *) -(* Facts about jections involving 1..n. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is a bijection from 1..b-a+1 to a..b for integers a,b with a <= b.*) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijInterval == - ASSUME NEW a \in Int, NEW b \in Int, a <= b - PROVE ExistsBijection(1 .. b-a+1, a .. b) - -<1>. DEFINE f == [i \in 1 .. b-a+1 |-> i+a-1] -<1>1. f \in [1 .. b-a+1 -> a .. b] BY SMT -<1>2. f \in Injection(1 .. b-a+1, a .. b) BY SMT DEF Injection -<1>3. f \in Surjection(1 .. b-a+1, a .. b) BY SMT DEF Surjection -<1>. QED BY <1>1, <1>2, <1>3 DEF ExistsBijection, Bijection - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is an injection from 1..n to 1..m iff n \leq m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatInjLeq == - ASSUME NEW n \in Nat, NEW m \in Nat - PROVE ExistsInjection(1..n,1..m) <=> n \leq m -PROOF - (*************************************************************************) - (* n \leq m means Injection exists. This part is easy. *) - (*************************************************************************) - <1>1. ASSUME n \leq m PROVE [i \in 1..n |-> i] \in Injection(1..n, 1..m) - BY SMT, <1>1 DEF Injection - - (*************************************************************************) - (* Injection exists means n \leq m. This part is harder. *) - (*************************************************************************) - <1>2. ASSUME ExistsInjection(1..n,1..m) PROVE n \leq m - <2>. DEFINE P(mm) == \A nn \in Nat : nn > mm => Injection(1..nn, 1..mm) = {} - <2>1. SUFFICES \A mm \in Nat : P(mm) BY SMT, <1>2 DEF ExistsInjection - <2>2. P(0) BY Z3 DEF Injection - <2>3. ASSUME NEW mm \in Nat, P(mm) PROVE P(mm+1) - <3>1. SUFFICES ASSUME NEW nn \in Nat, nn > mm+1, - NEW f \in Injection(1..nn, 1..mm+1) - PROVE FALSE - OBVIOUS - <3>2. ASSUME NEW i \in 1..nn, f[i] = mm+1 PROVE FALSE - <4>. DEFINE g == [j \in 1..nn-1 |-> IF j1. nn-1 \in Nat /\ nn-1 > mm BY SMT, <3>1 - <4>2. g \in Injection(1..nn-1, 1..mm) BY SMT, <3>2 DEF Injection - <4>. QED BY <4>1, <4>2, P(mm) DEF Injection - <3>3. ASSUME ~\E i \in 1..nn : f[i] = mm+1 PROVE FALSE - <4>1. f \in Injection(1..nn, 1..mm) BY SMT, <3>3 DEF Injection - <4>. QED BY SMT, <4>1, <3>1, P(mm) - <3>. QED BY <3>2, <3>3 - <2>. QED BY Isa, NatInduction, <2>2, <2>3 - - <1> QED BY <1>1, <1>2 DEF ExistsInjection - - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If a surjection from 1..n to S exists (for some n \in Nat) then a *) -(* bijection from 1..m to S exists (for some m \in Nat) and m \leq n. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatSurjImpliesNatBij == - ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n,S) - PROVE \E m \in Nat : ExistsBijection(1..m,S) /\ m \leq n - - (*************************************************************************) - (* Pick the smallest m \in Nat for which there is a surjection from *) - (* 1..m to S. *) - (*************************************************************************) -<1>1. PICK m \in Nat : - /\ ExistsSurjection(1..m, S) - /\ \A k \in Nat : k < m => ~ExistsSurjection(1..k, S) - <2>. DEFINE NN == { m \in Nat : ExistsSurjection(1..m, S) } - <2>1. PICK m \in NN : \A k \in NN : <> \notin OpToRel(<, Nat) - BY WFMin, NatLessThanWellFounded - <2>. QED - BY <2>1 DEF OpToRel - -<1>2. m <= n BY SMT, <1>1 - (*************************************************************************) - (* Any surjection from 1..m to S is bijective. *) - (*************************************************************************) -<1>3. PICK f \in Surjection(1..m, S) : TRUE BY <1>1 DEF ExistsSurjection -<1>4. ASSUME f \notin Injection(1..m, S) PROVE FALSE - <2>1. f \in [1..m -> S] BY <1>3 DEF Surjection - <2>2. PICK i,j \in 1..m : i < j /\ f[i] = f[j] - <3>1. PICK ii,jj \in 1..m : ii # jj /\ f[ii] = f[jj] - BY <2>1, <1>4 DEF Injection - <3>2. CASE ii < jj BY <3>1, <3>2 - <3>3. CASE jj < ii BY <3>1, <3>3 - <3>. QED BY SMT, <3>1, <3>2, <3>3 - <2>3. m-1 \in Nat BY SMT, <2>2 - <2>. DEFINE g == [k \in 1..m-1 |-> IF k=j THEN f[m] ELSE f[k]] - <2>4. g \in Surjection(1..m-1, S) - <3>1. g \in [1..m-1 -> S] BY SMT, <2>1 - <3>2. ASSUME NEW s \in S PROVE \E k \in 1..m-1 : g[k] = s - <4>. PICK l \in 1..m : f[l] = s BY <1>3 DEF Surjection - <4>. QED BY SMT, <2>2 - <3>. QED BY <3>1, <3>2 DEF Surjection - <2>. QED BY SMT, <2>3, <2>4, <1>1 DEF ExistsSurjection - -<1>. QED BY <1>2, <1>3, <1>4 DEF ExistsBijection, Bijection - - -(***************************************************************************) -(* Simple corollary. *) -(***************************************************************************) -THEOREM Fun_NatSurjEquivNatBij == - ASSUME NEW S - PROVE (\E n \in Nat : ExistsSurjection(1..n,S)) - <=> (\E m \in Nat : ExistsBijection(1..m,S)) -BY Fun_NatSurjImpliesNatBij, Fun_ExistsBijEquiv - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* For any set S, given n, m \in Nat such that bijections exist from 1..n *) -(* to S and from 1..m to S, then it must be the case that n = m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSame == - ASSUME NEW S, - NEW n \in Nat, ExistsBijection(1..n,S), - NEW m \in Nat, ExistsBijection(1..m,S) - PROVE n = m -BY SMT, Fun_NatInjLeq, Fun_ExistsBijEquiv, Fun_ExistsBijTransitive - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* S is empty iff there exists a bijection from 1..0 to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijEmpty == - ASSUME NEW S - PROVE ExistsBijection(1..0,S) <=> S = {} - -<1>1. ASSUME ExistsBijection(1..0, S), S # {} PROVE FALSE - <2>. ExistsInjection(S, 1..0) BY <1>1, Fun_ExistsBijEquiv - <2>. QED BY SMT, <1>1 DEF ExistsInjection, Injection -<1>2. ASSUME S = {} PROVE ExistsBijection(1..0, S) - BY SMT, <1>2, Fun_ExistsBijReflexive -<1>3. QED BY <1>1, <1>2 - - -(***************************************************************************) -(* `. .' *) -(* *) -(* S is a singleton iff there exists a bijection from 1..1 to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSingleton == - ASSUME NEW S - PROVE ExistsBijection(1..1,S) <=> \E s : S = {s} -<1>1. ASSUME NEW f \in Bijection(1..1, S) PROVE \E s : S = {s} - BY SMT DEF Bijection, Injection, Surjection -<1>2. ASSUME NEW s, S = {s} PROVE [i \in 1..1 |-> s] \in Bijection(1..1, S) - BY SMT, <1>2 DEF Bijection, Injection, Surjection -<1>. QED BY <1>1, <1>2 DEF ExistsBijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..n to T (for some n \in Nat), where T *) -(* is a subset of S. Furthermore n \leq m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSubset == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW T \in SUBSET S - PROVE \E n \in Nat : ExistsBijection(1..n,T) /\ n \leq m - -<1>1. CASE T = {} BY Force, <1>1, Fun_NatBijEmpty -<1>2. CASE T # {} - <2>0. ExistsSurjection(1..m, S) BY Fun_ExistsBijEquiv - <2>1. ExistsSurjection(S, T) BY <1>2, Fun_ExistsSurjSubset - <2>2. ExistsSurjection(1..m, T) BY <2>0, <2>1, Fun_ExistsSurjTransitive - <2>. QED BY <2>2, Fun_NatSurjImpliesNatBij -<1> QED BY <1>1, <1>2 - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..(m+1) to S \cup {x}, where x \notin S. *) -(* *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijAddElem == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW x, x \notin S - PROVE ExistsBijection(1..(m+1), S \cup {x}) - -<1>1. PICK F \in Bijection(1..m, S) : TRUE BY DEF ExistsBijection -<1>2. F \in [1..m -> S] BY <1>1 DEF Bijection, Injection -<1>3. \A s \in S : \E i \in 1..m : F[i] = s BY <1>1 DEF Bijection, Surjection -<1>4. \A i,j \in 1..m : F[i] = F[j] => i = j BY <1>1 DEF Bijection, Injection - -<1>. DEFINE G == [i \in 1..m+1 |-> IF i <= m THEN F[i] ELSE x] -<1>10. G \in [1..m+1 -> S \cup {x}] BY SMT, <1>2 -<1>20. ASSUME NEW t \in S \cup {x} PROVE \E i \in 1..m+1 : G[i] = t BY SMT, <1>3 -<1>30. ASSUME NEW i \in 1..m+1, NEW j \in 1..m+1, G[i] = G[j] PROVE i = j - BY SMT, <1>2, <1>4, <1>30 -<1>40. G \in Bijection(1..m+1, S \cup {x}) - BY <1>10, <1>20, <1>30 DEF Bijection, Injection, Surjection -<1>. QED BY <1>40 DEF ExistsBijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..(m-1) to S \ {x}, where x \in S. *) -(* *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSubElem == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW x, x \in S - PROVE ExistsBijection(1..(m-1), S \ {x}) - -<1>1. PICK n \in Nat : ExistsBijection(1..n, S \ {x}) BY Fun_NatBijSubset -<1>2. ExistsBijection(1..n+1, (S \ {x}) \cup {x}) BY <1>1, Fun_NatBijAddElem -<1>3. ExistsBijection(1..n+1, S) BY <1>2 -<1>4. n = m-1 BY SMT, <1>3, Fun_NatBijSame -<1>. QED BY <1>1, <1>4 - - - -============================================================================= -\* Modification History -\* Last modified Thu Feb 13 14:51:29 GMT-03:00 2014 by merz -\* Last modified Tue Jun 11 12:30:05 CEST 2013 by bhargav -\* Last modified Fri May 31 15:27:41 CEST 2013 by bhargav -\* Last modified Fri May 03 12:55:32 PDT 2013 by tomr -\* Created Thu Apr 11 10:36:10 PDT 2013 by tomr diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/Functions.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Functions.tla deleted file mode 100644 index a96195acb6..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/Functions.tla +++ /dev/null @@ -1,63 +0,0 @@ ------------------------------- MODULE Functions ----------------------------- -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* Notions about functions including injection, surjection, and bijection.*) -(* Originally contributed by Tom Rodeheffer, MSR. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* Restriction of a function to a set (should be a subset of the domain). *) -(***************************************************************************) -Restrict(f,S) == [ x \in S |-> f[x] ] - -(***************************************************************************) -(* Range of a function. *) -(* Note: The image of a set under function f can be defined as *) -(* Range(Restrict(f,S)). *) -(***************************************************************************) -Range(f) == { f[x] : x \in DOMAIN f } - - -(***************************************************************************) -(* The inverse of a function. *) -(***************************************************************************) -Inverse(f,S,T) == [t \in T |-> CHOOSE s \in S : t \in Range(f) => f[s] = t] - - -(***************************************************************************) -(* A map is an injection iff each element in the domain maps to a distinct *) -(* element in the range. *) -(***************************************************************************) -Injection(S,T) == { M \in [S -> T] : \A a,b \in S : M[a] = M[b] => a = b } - - -(***************************************************************************) -(* A map is a surjection iff for each element in the range there is some *) -(* element in the domain that maps to it. *) -(***************************************************************************) -Surjection(S,T) == { M \in [S -> T] : \A t \in T : \E s \in S : M[s] = t } - - -(***************************************************************************) -(* A map is a bijection iff it is both an injection and a surjection. *) -(***************************************************************************) -Bijection(S,T) == Injection(S,T) \cap Surjection(S,T) - - -(***************************************************************************) -(* An injection, surjection, or bijection exists if the corresponding set *) -(* is nonempty. *) -(***************************************************************************) -ExistsInjection(S,T) == Injection(S,T) # {} -ExistsSurjection(S,T) == Surjection(S,T) # {} -ExistsBijection(S,T) == Bijection(S,T) # {} - - -============================================================================= -\* Modification History -\* Last modified Wed Jul 10 20:32:37 CEST 2013 by merz -\* Last modified Wed Jun 05 12:14:19 CEST 2013 by bhargav -\* Last modified Fri May 03 12:55:35 PDT 2013 by tomr -\* Created Thu Apr 11 10:30:48 PDT 2013 by tomr diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/JectionThm.tla b/x/ccv/provider/keyguard/prototyping/tla/library/JectionThm.tla deleted file mode 100644 index 1ab1d0cbd4..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/JectionThm.tla +++ /dev/null @@ -1,1130 +0,0 @@ ----------------------------- MODULE JectionThm ------------------------------ -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* Facts about injections, surjections, and bijections. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - -EXTENDS - Naturals, - Jections, - NaturalsInduction, - WellFoundedInduction, - TLAPS, - Sequences - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Definitions of injections, surjections, bijections restated as facts. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_IsInj == - ASSUME NEW S, NEW T, NEW F \in [S -> T], - \A a,b \in S : F[a] = F[b] => a = b - PROVE F \in Injection(S,T) -BY DEF Injection - - -THEOREM Fun_IsSurj == - ASSUME NEW S, NEW T, NEW F \in [S -> T], - \A t \in T : \E s \in S : F[s] = t - PROVE F \in Surjection(S,T) -BY DEF Surjection - - -THEOREM Fun_IsBij == - ASSUME NEW S, NEW T, NEW F, - \/ F \in Injection(S,T) - \/ (F \in [S -> T] /\ \A a,b \in S : F[a] = F[b] => a = b), - - \/ F \in Surjection(S,T) - \/ (F \in [S -> T] /\ \A t \in T : \E s \in S : F[s] = t) - PROVE F \in Bijection(S,T) -BY DEF Bijection, Injection, Surjection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of an injection. *) -(* *) -(* `. .' *) -(***************************************************************************) -Fun_InjProp_Qed(S,T,F) == -/\ F \in [S -> T] -/\ \A a,b \in S : F[a] = F[b] => a = b - - -THEOREM Fun_InjProp == - ASSUME NEW S, NEW T, NEW F \in Injection(S,T) - PROVE Fun_InjProp_Qed(S,T,F) -BY DEF Injection, Fun_InjProp_Qed - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of a surjection. *) -(* *) -(* `. .' *) -(***************************************************************************) -Fun_SurjProp_Qed(S,T,F) == -/\ F \in [S -> T] -/\ \A t \in T : \E s \in S : F[s] = t - - -THEOREM Fun_SurjProp == - ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) - PROVE Fun_SurjProp_Qed(S,T,F) -BY DEF Surjection, Fun_SurjProp_Qed - - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -Fun_BijProp_Qed(S,T,F) == -/\ F \in [S -> T] -/\ F \in Injection(S,T) -/\ F \in Surjection(S,T) -/\ \A a,b \in S : F[a] = F[b] => a = b -/\ \A t \in T : \E s \in S : F[s] = t - - -THEOREM Fun_BijProp == - ASSUME NEW S, NEW T, NEW F \in Bijection(S,T) - PROVE Fun_BijProp_Qed(S,T,F) -BY DEF Bijection, Injection, Surjection, Fun_BijProp_Qed - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* A surjection in [S -> T] such that there is no surjection from any *) -(* subset of S to T is a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_SmallestSurjectionIsBijection == - ASSUME NEW S, NEW T, NEW f \in Surjection(S,T), - \A U \in SUBSET S : U # S => Surjection(U,T) = {} - PROVE f \in Bijection(S,T) -<1>1. f \in [S -> T] - BY Fun_SurjProp DEF Fun_SurjProp_Qed -<1>2. SUFFICES ASSUME f \notin Injection(S,T) PROVE FALSE - BY Fun_IsBij -<1>3. PICK a,b \in S : a # b /\ f[a] = f[b] - BY <1>1, <1>2, Fun_IsInj -<1>. DEFINE U == S \ {b} -<1>4. U \in SUBSET S /\ U # S - OBVIOUS -<1>. DEFINE g == [x \in U |-> f[x]] -<1>5. g \in Surjection(U,T) - <2>1. g \in [U -> T] BY <1>1 - <2>2. ASSUME NEW t \in T PROVE \E u \in U : g[u] = t - <3>1. CASE t = f[b] BY <1>3, <3>1 - <3>2. CASE t # f[b] - <4>1. PICK s \in S : f[s] = t - BY SMT, Fun_SurjProp DEF Fun_SurjProp_Qed \** Zenon fails ?? - <4>2. s \in U BY <3>2, <4>1 - <4>. QED BY <4>1, <4>2 - <3>3. QED BY <3>1, <3>2 - <2>3. QED BY <2>1, <2>2, Fun_IsSurj -<1>. QED BY <1>4, <1>5 - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Transitivity of injections, surjections, bijections. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Injection(S,T), - NEW G \in Injection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Injection(S,U) -BY DEF Injection - - -THEOREM Fun_SurjTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Surjection(S,T), - NEW G \in Surjection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Surjection(S,U) -BY DEF Surjection - - -THEOREM Fun_BijTransitive == - ASSUME NEW S, NEW T, NEW U, - NEW F \in Bijection(S,T), - NEW G \in Bijection(T,U) - PROVE [s \in S |-> G[F[s]]] \in Bijection(S,U) -BY Fun_SurjTransitive, Fun_InjTransitive DEF Bijection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* The inverse of a surjection is an injection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_SurjInverse == - ASSUME NEW S, NEW T, NEW F \in Surjection(S,T) - PROVE JectionInverse(S,T,F) \in Injection(T,S) -BY DEF JectionInverse, Surjection, Injection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Properties of the inverse of a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -Fun_BijInverse_Qed(S,T,F,G) == - /\ G \in Bijection(T,S) - /\ \A s \in S : G[F[s]] = s - /\ \A t \in T : F[G[t]] = t - /\ F = JectionInverse(T,S,G) - - -THEOREM Fun_BijInverse == - ASSUME NEW S, NEW T, - NEW F \in Bijection(S,T), - NEW G, G = JectionInverse(S,T,F) - PROVE Fun_BijInverse_Qed(S,T,F,G) - -<1>1. \A a,b \in S : F[a] = F[b] => a = b BY DEF Bijection, Injection -<1>2. \A t \in T : \E s \in S : F[s] = t BY DEF Bijection, Surjection -<1>3. F \in [S -> T] BY DEF Bijection, Injection - -<1>4. G = [t \in T |-> CHOOSE s \in S : F[s] = t] BY DEF JectionInverse -<1>5. G \in [T -> S] BY <1>2, <1>4 - -<1>6. \A t \in T : F[G[t]] = t BY <1>2, <1>4 -<1>7. \A s \in S : G[F[s]] = s BY <1>1, <1>3, <1>4 - -<1>8. \A a,b \in T : G[a] = G[b] => a = b BY <1>6 -<1>9. \A s \in S : \E t \in T : G[t] = s BY <1>3, <1>7 -<1>10. G \in Bijection(T,S) BY <1>5, <1>8, <1>9, Fun_IsBij - -<1>11. F = JectionInverse(T,S,G) - <2>10. ASSUME NEW s \in S PROVE F[s] = CHOOSE t \in T : G[t] = s - <3>1. PICK a \in T : G[a] = s BY <1>3, <1>7 - <3>2. \A b \in T : G[b] = s => a = b BY <3>1, <1>8 - <3>3. F[s] = a BY <3>1, <1>6 - <3> QED BY <3>1, <3>2, <3>3 - <2> QED BY <2>10, <1>3 DEF JectionInverse - -<1> QED BY <1>6, <1>7, <1>11, <1>10 DEF Fun_BijInverse_Qed - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Subset of a bijection is a bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -Fun_BijSubset_Qed(S,T,F,S1,T1,F1) == - /\ T1 \in SUBSET T - /\ F1 \in Bijection(S1,T1) - - -THEOREM Fun_BijSubset == - ASSUME - NEW S, NEW T, NEW F \in Bijection(S,T), - NEW S1 \in SUBSET S - PROVE - LET - T1 == {F[s] : s \in S1} - F1 == [s \in S1 |-> F[s]] - IN - Fun_BijSubset_Qed(S,T,F,S1,T1,F1) -PROOF - <1>1. PICK T1 : T1 = {F[s] : s \in S1} OBVIOUS - <1>2. PICK F1 : F1 = [s \in S1 |-> F[s]] OBVIOUS - - <1> HIDE DEF Fun_BijProp_Qed - <1>3. Fun_BijProp_Qed(S,T,F) BY Fun_BijProp - <1> USE DEF Fun_BijProp_Qed - - <1>4. F \in [S -> T] BY <1>3 - <1>5. \A a,b \in S : F[a] = F[b] => a = b BY <1>3 - <1>6. \A t \in T : \E s \in S : F[s] = t BY <1>3 - - <1>7. T1 \in SUBSET T BY <1>1, <1>4 - - <1>8. F1 \in [S1 -> T1] BY <1>1, <1>2 - <1>9. \A a,b \in S1 : F1[a] = F1[b] => a = b BY <1>2, <1>5 - <1>10. \A t \in T1 : \E s \in S1 : F1[s] = t BY <1>1, <1>2, <1>6 - - <1>11. F1 \in Bijection(S1,T1) BY <1>8, <1>9, <1>10, Fun_IsBij - - <1> USE DEF Fun_BijSubset_Qed - <1> QED BY <1>1, <1>2, <1>7, <1>11 - - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Given F an injection from S to T, then F is a bijection from S to F(S). *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_InjMeansBijImage == - ASSUME NEW S, NEW T, - NEW F \in Injection(S,T), - NEW FS, FS = {F[s] : s \in S} - PROVE F \in Bijection(S,FS) -BY DEF Bijection, Injection, Surjection - - - - - - - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* Facts about exists jections. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Definitions restated as facts. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsInj == - \A S,T : ExistsInjection(S,T) <=> Injection(S,T) # {} -BY DEF ExistsInjection - - -THEOREM Fun_ExistsSurj == - \A S,T : ExistsSurjection(S,T) <=> Surjection(S,T) # {} -BY DEF ExistsSurjection - - -THEOREM Fun_ExistsBij == - \A S,T : ExistsBijection(S,T) <=> Bijection(S,T) # {} -BY DEF ExistsBijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is a surjection from any set S to any non-empty subset T of S. *) -(* (Note that there cannot be a surjection to {} except if S is empty.) *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsSurjSubset == - ASSUME NEW S, NEW T \in SUBSET S, T # {} - PROVE ExistsSurjection(S,T) -<1>. PICK x \in T : TRUE OBVIOUS -<1>. [s \in S |-> IF s \in T THEN s ELSE x] \in Surjection(S,T) - BY DEF Surjection -<1>. QED BY DEF ExistsSurjection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there is a surjection from S to T, then there is an injection from T *) -(* to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsSurjMeansExistsRevInj == - ASSUME NEW S, NEW T - PROVE ExistsSurjection(S,T) => ExistsInjection(T,S) -BY Fun_SurjInverse DEF ExistsSurjection, ExistsInjection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* ExistsBijection is reflexive, symmetric, and transitive. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijReflexive == - ASSUME NEW S - PROVE ExistsBijection(S,S) -<1>. [s \in S |-> s] \in Bijection(S,S) BY DEF Bijection, Injection, Surjection -<1>. QED BY DEF ExistsBijection - - -THEOREM Fun_ExistsBijSymmetric == - ASSUME NEW S, NEW T, ExistsBijection(S,T) - PROVE ExistsBijection(T,S) -BY Fun_BijInverse DEF Fun_BijInverse_Qed, ExistsBijection - - -THEOREM Fun_ExistsBijTransitive == - ASSUME NEW S, NEW T, NEW U, ExistsBijection(S,T), ExistsBijection(T,U) - PROVE ExistsBijection(S,U) -BY Fun_BijTransitive DEF ExistsBijection - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Existence of injections and surjections is reflexive and transitive. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsInjReflexive == - ASSUME NEW S - PROVE ExistsInjection(S,S) -BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsInjection, Bijection - - -THEOREM Fun_ExistsSurjReflexive == - ASSUME NEW S - PROVE ExistsSurjection(S,S) -BY Fun_ExistsBijReflexive DEF ExistsBijection, ExistsSurjection, Bijection - - -THEOREM Fun_ExistsInjTransitive == - ASSUME NEW S, NEW T, NEW U, - ExistsInjection(S,T), ExistsInjection(T,U) - PROVE ExistsInjection(S,U) -BY Fun_InjTransitive DEF ExistsInjection - - -THEOREM Fun_ExistsSurjTransitive == - ASSUME NEW S, NEW T, NEW U, - ExistsSurjection(S,T), ExistsSurjection(T,U) - PROVE ExistsSurjection(S,U) -BY Fun_SurjTransitive DEF ExistsSurjection - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* The Cantor-Bernstein-Schroeder theorem. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists an injection from S to T, where T is a subset of S, *) -(* then there exists a bijection from S to T. *) -(* *) -(* A lemma for the Cantor-Bernstein-Schroeder theorem. *) -(* *) -(* This proof is formalized from *) -(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Lemma}^' *) -(* retrieved April 29, 2013. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_CantorBernsteinSchroeder_Lemma == - ASSUME NEW S, NEW T, T \subseteq S, ExistsInjection(S,T) - PROVE ExistsBijection(S,T) -PROOF - <1> PICK F \in Injection(S,T) : TRUE BY Fun_ExistsInj - - <1> USE DEF Fun_InjProp_Qed - <1>1. Fun_InjProp_Qed(S,T,F) BY Fun_InjProp - <1> USE DEF Fun_InjProp_Qed - - (*************************************************************************) - (* Pick Y as S excluding T. *) - (*************************************************************************) - <1>2. PICK Y : Y = S \ T OBVIOUS - - (*************************************************************************) - (* Define Ci[0] as Y, and Ci[i+1] as the image of Ci[i] under F. *) - (*************************************************************************) - <1> DEFINE Ci[i \in Nat] == - IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} - <1> HIDE DEF Ci - - <1>3. \A i \in Nat : Ci[i] = - IF i = 0 THEN Y ELSE {F[s] : s \in Ci[i-1]} - (***********************************************************************) - (* Use NatInductiveDef to prove that Ci equals its definition. *) - (***********************************************************************) - <2> DEFINE - f0 == Y - Def(v,i) == {F[s] : s \in v} - f == CHOOSE f : f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1],i)] - <2> SUFFICES \A i \in Nat : f[i] = IF i = 0 THEN f0 ELSE Def(f[i-1],i) BY DEF Ci - <2> HIDE DEF f0, Def, f - <2> SUFFICES NatInductiveDefConclusion(f,f0,Def) BY DEF NatInductiveDefConclusion - <2> SUFFICES NatInductiveDefHypothesis(f,f0,Def) BY NatInductiveDef - <2> QED BY DEF NatInductiveDefHypothesis, f - - (*************************************************************************) - (* Applying F to an element of Ci[i] produces an element of Ci[i+1]. *) - (*************************************************************************) - <1>4. ASSUME NEW i \in Nat, NEW s \in Ci[i] - PROVE F[s] \in Ci[i+1] - BY <1>3, SMT - - (*************************************************************************) - (* Each element of Ci[i+1] is the application of F to some element in *) - (* Ci[i]. *) - (*************************************************************************) - <1>5. ASSUME NEW i \in Nat, NEW t \in Ci[i+1] - PROVE \E s \in Ci[i] : F[s] = t - BY <1>3, SMT - - (*************************************************************************) - (* Each Ci[i] \subseteq S. *) - (*************************************************************************) - <1>6. \A i \in Nat : Ci[i] \subseteq S - <2> DEFINE Prop(i) == Ci[i] \subseteq S - <2> SUFFICES \A i \in Nat : Prop(i) OBVIOUS - <2>1. Prop(0) BY <1>2, <1>3 - <2>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) - <3> SUFFICES ASSUME NEW t \in Ci[i+1] PROVE t \in S OBVIOUS - <3>1. PICK s \in Ci[i] : F[s] = t BY <1>5 - <3>2. s \in S BY <2>2 - <3> QED BY <3>1, <3>2, <1>1 - <2> HIDE DEF Prop - <2> QED BY <2>1, <2>2, NatInduction, Isa - - (*************************************************************************) - (* Pick C as the union of all Ci[i]. *) - (*************************************************************************) - <1>7. PICK C : C = UNION {Ci[i] : i \in Nat} OBVIOUS - <1>8. C \subseteq S BY <1>6, <1>7 - - (*************************************************************************) - (* Pick FC as the image of C under F. *) - (*************************************************************************) - <1>9. PICK FC : FC = {F[c] : c \in C} OBVIOUS - <1>10. FC \subseteq T BY <1>1, <1>8, <1>9, Isa - - (*************************************************************************) - (* C = Y \cup FC because Ci[0] = Y and Ci[i+1] = image of Ci[i] under F. *) - (*************************************************************************) - <1>11. C = Y \cup FC - <2>1. ASSUME NEW c \in C PROVE c \in Y \cup FC - <3>1. PICK i \in Nat : c \in Ci[i] BY <1>7 - <3>2. CASE i = 0 BY <3>1, <3>2, <1>3 - <3>3. CASE i # 0 - <4>1. PICK s \in Ci[i-1] : F[s] = c BY <3>1, <3>3, <1>5, SMT - <4>2. s \in C BY <3>3, <1>7, SMT - <4> QED BY <4>1, <4>2, <1>9 - <3> QED BY <3>2, <3>3 - <2>2. ASSUME NEW c \in Y \cup FC PROVE c \in C - <3>1. CASE c \in Y BY <3>1, <1>3, <1>7 - <3>2. CASE c \in FC - <4>1. PICK s \in C : F[s] = c BY <3>2, <1>9 - <4>2. PICK i \in Nat : s \in Ci[i] BY <4>1, <1>7 - <4>3. F[s] \in Ci[i+1] BY <4>2, <1>4 - <4> QED BY <4>1, <4>3, <1>7, SMT - <3> QED BY <3>1, <3>2 - <2> QED BY <2>1, <2>2 - - (*************************************************************************) - (* S \ C is the same as T \ FC. *) - (*************************************************************************) - <1>12. S \ C = T \ FC BY <1>2, <1>11 - - (*************************************************************************) - (* Pick H as F on C and the identity on S \ C. Since F (restricted to *) - (* C) is a bijection from C to FC and S \ C = T \ FC, this makes H a *) - (* bijection from S to T. *) - (*************************************************************************) - <1>13. PICK H : H = [s \in S |-> IF s \in C THEN F[s] ELSE s] OBVIOUS - <1>14. H \in Bijection(S,T) - (***********************************************************************) - (* A useful lemma. If a \in C and b \notin C, then H[a] # H[b]. *) - (***********************************************************************) - <2>1. ASSUME NEW a \in S, NEW b \in S, a \in C, b \notin C PROVE H[a] # H[b] - <3>1. H[a] \in FC BY <2>1, <1>1, <1>9, <1>13 - <3>2. H[b] \in T \ FC BY <2>1, <1>12, <1>13 - <3> QED BY <3>1, <3>2 - - <2>2. H \in [S -> T] - <3> SUFFICES ASSUME NEW s \in S PROVE H[s] \in T BY <1>13 - <3>1. CASE s \in C BY <3>1, <1>1, <1>10, <1>13 - <3>2. CASE s \notin C BY <3>2, <1>12, <1>13 - <3> QED BY <3>1, <3>2 - - <2>3. ASSUME NEW a \in S, NEW b \in S, H[a] = H[b] PROVE a = b - <3> H[a] = H[b] BY <2>3 - <3>1. CASE a \in C /\ b \in C BY <3>1, <1>1, <1>13 - <3>2. CASE a \in C /\ b \notin C BY <3>2, <2>1 (* impossible by lemma *) - <3>3. CASE a \notin C /\ b \in C BY <3>3, <2>1 (* impossible by lemma *) - <3>4. CASE a \notin C /\ b \notin C BY <3>4, <1>13 - <3> QED BY <3>1, <3>2, <3>3, <3>4 - - <2>4. ASSUME NEW t \in T PROVE \E s \in S : H[s] = t - <3>1. CASE t \in FC BY <3>1, <1>8, <1>9, <1>13 - <3>2. CASE t \notin FC BY <3>2, <1>12, <1>13 - <3> QED BY <3>1, <3>2 - - <2> QED BY <2>2, <2>3, <2>4, Fun_IsBij - - <1> QED BY <1>14, Fun_ExistsBij - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If an injection exists from S to T and an injection exists from T to S, *) -(* then there is a bijection from S to T. *) -(* *) -(* This is the Cantor-Bernstein-Schroeder theorem. *) -(* *) -(* This proof is formalized from *) -(* `^\url{http://www.proofwiki.org/wiki/Cantor-Bernstein-Schroeder_Theorem/Proof_5}^' *) -(* retrieved April 29, 2013. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_CantorBernsteinSchroeder == - ASSUME NEW S, NEW T, - ExistsInjection(S,T), ExistsInjection(T,S) - PROVE ExistsBijection(S,T) - -<1>1. PICK F : F \in Injection(S,T) BY DEF ExistsInjection -<1>2. PICK G : G \in Injection(T,S) BY DEF ExistsInjection -<1>. DEFINE RngG == {G[t] : t \in T} - GF == [s \in S |-> G[F[s]]] -<1>3. RngG \subseteq S BY <1>2 DEF Injection -<1>4. GF \in Injection(S, RngG) BY <1>1, <1>2 DEF Injection -<1>5. ExistsBijection(S, RngG) BY <1>3, <1>4, Fun_CantorBernsteinSchroeder_Lemma DEF ExistsInjection -<1>6. ExistsBijection(T, RngG) BY <1>2, Fun_InjMeansBijImage DEF ExistsBijection -<1>. QED BY <1>5, <1>6, Fun_ExistsBijSymmetric, Fun_ExistsBijTransitive - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Applications of the Cantor-Bernstein-Schroeder Theorem. *) -(* If there exists an injection f: A->B and a surjection g: A->B, then *) -(* there exists a bijection between A and B. *) -(* Also, if there are surjections between A and B, then there is a *) -(* bijection. *) -(* *) -(* `. .' *) -(***************************************************************************) - -THEOREM Fun_ExistInjAndSurjThenBij == - ASSUME NEW S, NEW T, - ExistsInjection(S,T), ExistsSurjection(S,T) - PROVE ExistsBijection(S,T) -<1>. ExistsInjection(T,S) - BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection -<1>. QED BY Fun_CantorBernsteinSchroeder - - - -THEOREM Fun_ExistSurjAndSurjThenBij == - ASSUME NEW S, NEW T, - ExistsSurjection(S,T), ExistsSurjection(T,S) - PROVE ExistsBijection(S,T) -<1>. ExistsInjection(S,T) - BY Fun_SurjInverse DEF ExistsInjection, ExistsSurjection -<1>2. QED BY Fun_ExistInjAndSurjThenBij - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* Equivalences for ExistsBijection. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_ExistsBijEquiv == - ASSUME NEW S, NEW T - PROVE /\ ExistsBijection(S,T) <=> ExistsBijection(T,S) - /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsInjection(T,S) - /\ ExistsBijection(S,T) <=> ExistsInjection(S,T) /\ ExistsSurjection(S,T) - /\ ExistsBijection(S,T) <=> ExistsInjection(T,S) /\ ExistsSurjection(T,S) - /\ ExistsBijection(S,T) <=> ExistsSurjection(S,T) /\ ExistsSurjection(T,S) - -<1>1. ExistsBijection(S,T) <=> ExistsBijection(T,S) - BY Fun_ExistsBijSymmetric -<1>2. ExistsInjection(S,T) /\ ExistsInjection(T,S) => ExistsBijection(S,T) - BY Fun_CantorBernsteinSchroeder -<1>3. \A S1, T1 : ExistsBijection(S1,T1) => ExistsSurjection(S1,T1) - BY DEF ExistsBijection, ExistsSurjection, Bijection -<1>4. \A S1,T1 : ExistsSurjection(S1,T1) => ExistsInjection(T1,S1) - BY Fun_ExistsSurjMeansExistsRevInj -<1> QED BY <1>1, <1>2, <1>3, <1>4 - - ------------------------------------------------------------------------------ -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* Facts about jections involving 1..n. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* There is an injection from 1..n to 1..m iff n \leq m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatInjLeq == - ASSUME NEW n \in Nat, NEW m \in Nat - PROVE ExistsInjection(1..n,1..m) <=> n \leq m -PROOF - (*************************************************************************) - (* n \leq m means Injection exists. This part is easy. *) - (*************************************************************************) - <1>1. ASSUME n \leq m PROVE [i \in 1..n |-> i] \in Injection(1..n, 1..m) - BY SMT, <1>1 DEF Injection - - (*************************************************************************) - (* Injection exists means n \leq m. This part is harder. *) - (*************************************************************************) - <1>2. ASSUME ExistsInjection(1..n,1..m) PROVE n \leq m - <2>. DEFINE P(mm) == \A nn \in Nat : nn > mm => Injection(1..nn, 1..mm) = {} - <2>1. SUFFICES \A mm \in Nat : P(mm) BY SMT, <1>2 DEF ExistsInjection - <2>2. P(0) BY Z3 DEF Injection - <2>3. ASSUME NEW mm \in Nat, P(mm) PROVE P(mm+1) - <3>1. SUFFICES ASSUME NEW nn \in Nat, nn > mm+1, - NEW f \in Injection(1..nn, 1..mm+1) - PROVE FALSE - OBVIOUS - <3>2. ASSUME NEW i \in 1..nn, f[i] = mm+1 PROVE FALSE - <4>. DEFINE g == [j \in 1..nn-1 |-> IF j1. nn-1 \in Nat /\ nn-1 > mm BY SMT, <3>1 - <4>2. g \in Injection(1..nn-1, 1..mm) BY SMT, <3>2 DEF Injection - <4>. QED BY <4>1, <4>2, P(mm) DEF Injection - <3>3. ASSUME ~\E i \in 1..nn : f[i] = mm+1 PROVE FALSE - <4>1. f \in Injection(1..nn, 1..mm) BY SMT, <3>3 DEF Injection - <4>. QED BY SMT, <4>1, <3>1, P(mm) - <3>. QED BY <3>2, <3>3 - <2>. QED BY Isa, NatInduction, <2>2, <2>3 - - <1> QED BY <1>1, <1>2 DEF ExistsInjection - - - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If a surjection from 1..n to S exists (for some n \in Nat) then a *) -(* bijection from 1..m to S exists (for some m \in Nat) and m \leq n. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatSurjImpliesNatBij == - ASSUME NEW S, NEW n \in Nat, ExistsSurjection(1..n,S) - PROVE \E m \in Nat : ExistsBijection(1..m,S) /\ m \leq n - - (*************************************************************************) - (* Pick the smallest m \in Nat for which there is a surjection from *) - (* 1..m to S. *) - (*************************************************************************) -<1>1. PICK m \in Nat : - /\ ExistsSurjection(1..m, S) - /\ \A k \in Nat : k < m => ~ExistsSurjection(1..k, S) - <2>. DEFINE NN == { m \in Nat : ExistsSurjection(1..m, S) } - <2>1. PICK m \in NN : \A k \in NN : <> \notin OpToRel(<, Nat) - BY WFMin, NatLessThanWellFounded - <2>. QED - BY <2>1 DEF OpToRel - -<1>2. m <= n BY SMT, <1>1 - (*************************************************************************) - (* Any surjection from 1..m to S is bijective. *) - (*************************************************************************) -<1>3. PICK f \in Surjection(1..m, S) : TRUE BY <1>1 DEF ExistsSurjection -<1>4. ASSUME f \notin Injection(1..m, S) PROVE FALSE - <2>1. f \in [1..m -> S] BY <1>3 DEF Surjection - <2>2. PICK i,j \in 1..m : i < j /\ f[i] = f[j] - <3>1. PICK ii,jj \in 1..m : ii # jj /\ f[ii] = f[jj] - BY <2>1, <1>4 DEF Injection - <3>2. CASE ii < jj BY <3>1, <3>2 - <3>3. CASE jj < ii BY <3>1, <3>3 - <3>. QED BY SMT, <3>1, <3>2, <3>3 - <2>3. m-1 \in Nat BY SMT, <2>2 - <2>. DEFINE g == [k \in 1..m-1 |-> IF k=j THEN f[m] ELSE f[k]] - <2>4. g \in Surjection(1..m-1, S) - <3>1. g \in [1..m-1 -> S] BY SMT, <2>1 - <3>2. ASSUME NEW s \in S PROVE \E k \in 1..m-1 : g[k] = s - <4>. PICK l \in 1..m : f[l] = s BY <1>3 DEF Surjection - <4>. QED BY SMT, <2>2 - <3>. QED BY <3>1, <3>2 DEF Surjection - <2>. QED BY SMT, <2>3, <2>4, <1>1 DEF ExistsSurjection - -<1>. QED BY <1>2, <1>3, <1>4 DEF ExistsBijection, Bijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* A surjection from some 1..n to S exists iff a bijection from some *) -(* 1..m to S exists. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatSurjEquivNatBij == - ASSUME NEW S - PROVE (\E n \in Nat : ExistsSurjection(1..n,S)) - <=> (\E m \in Nat : ExistsBijection(1..m,S)) -BY Fun_NatSurjImpliesNatBij, Fun_ExistsBijEquiv - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* For any set S, given n, m \in Nat such that bijections exist from 1..n *) -(* to S and from 1..m to S, then it must be the case that n = m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSame == - ASSUME NEW S, - NEW n \in Nat, ExistsBijection(1..n,S), - NEW m \in Nat, ExistsBijection(1..m,S) - PROVE n = m -BY SMT, Fun_NatInjLeq, Fun_ExistsBijEquiv, Fun_ExistsBijTransitive - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* S is empty iff there exists a bijection from 1..0 to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijEmpty == - ASSUME NEW S - PROVE ExistsBijection(1..0,S) <=> S = {} -<1>1. ASSUME ExistsBijection(1..0, S), S # {} PROVE FALSE - <2>1. ExistsInjection(S, 1..0) BY <1>1, Fun_ExistsBijEquiv - <2>2. QED BY SMT, <1>1, <2>1 DEF ExistsInjection, Injection -<1>2. ASSUME S = {} PROVE ExistsBijection(1..0, S) - BY SMT, <1>2, Fun_ExistsBijReflexive -<1>3. QED BY <1>1, <1>2 - - -(***************************************************************************) -(* `. .' *) -(* *) -(* S is a singleton iff there exists a bijection from 1..1 to S. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSingleton == - ASSUME NEW S - PROVE ExistsBijection(1..1,S) <=> \E s : S = {s} -<1>1. ASSUME NEW f \in Bijection(1..1, S) PROVE \E s : S = {s} - BY SMT DEF Bijection, Injection, Surjection -<1>2. ASSUME NEW s, S = {s} PROVE [i \in 1..1 |-> s] \in Bijection(1..1, S) - BY SMT, <1>2 DEF Bijection, Injection, Surjection -<1>. QED BY <1>1, <1>2 DEF ExistsBijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..n to T (for some n \in Nat), where T *) -(* is a subset of S. Furthermore n \leq m. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSubset == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW T \in SUBSET S - PROVE \E n \in Nat : ExistsBijection(1..n,T) /\ n \leq m - -<1>1. CASE T = {} BY Force, <1>1, Fun_NatBijEmpty -<1>2. CASE T # {} - <2>0. ExistsSurjection(1..m, S) BY Fun_ExistsBijEquiv - <2>1. ExistsSurjection(S, T) BY <1>2, Fun_ExistsSurjSubset - <2>2. ExistsSurjection(1..m, T) BY <2>0, <2>1, Fun_ExistsSurjTransitive - <2>. QED BY <2>2, Fun_NatSurjImpliesNatBij -<1> QED BY <1>1, <1>2 - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..(m+1) to S \cup {x}, where x \notin S. *) -(* *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijAddElem == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW x, x \notin S - PROVE ExistsBijection(1..(m+1), S \cup {x}) - -<1>1. PICK F \in Bijection(1..m, S) : TRUE BY DEF ExistsBijection -<1>2. F \in [1..m -> S] BY <1>1 DEF Bijection, Injection -<1>3. \A s \in S : \E i \in 1..m : F[i] = s BY <1>1 DEF Bijection, Surjection -<1>4. \A i,j \in 1..m : F[i] = F[j] => i = j BY <1>1 DEF Bijection, Injection - -<1>. DEFINE G == [i \in 1..m+1 |-> IF i <= m THEN F[i] ELSE x] -<1>10. G \in [1..m+1 -> S \cup {x}] BY SMT, <1>2 -<1>20. ASSUME NEW t \in S \cup {x} PROVE \E i \in 1..m+1 : G[i] = t BY SMT, <1>3 -<1>30. ASSUME NEW i \in 1..m+1, NEW j \in 1..m+1, G[i] = G[j] PROVE i = j - BY SMT, <1>2, <1>4, <1>30 -<1>40. G \in Bijection(1..m+1, S \cup {x}) - BY <1>10, <1>20, <1>30 DEF Bijection, Injection, Surjection -<1>. QED BY <1>40 DEF ExistsBijection - - - - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat), then *) -(* there exists a bijection from 1..(m-1) to S \ {x}, where x \in S. *) -(* *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijSubElem == - ASSUME NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW x, x \in S - PROVE ExistsBijection(1..(m-1), S \ {x}) - -<1>1. PICK n \in Nat : ExistsBijection(1..n, S \ {x}) BY Fun_NatBijSubset -<1>2. ExistsBijection(1..n+1, (S \ {x}) \cup {x}) BY <1>1, Fun_NatBijAddElem -<1>3. ExistsBijection(1..n+1, S) BY <1>2 -<1>4. n = m-1 BY SMT, <1>3, Fun_NatBijSame -<1>. QED BY <1>1, <1>4 - - - -(* doesn't seem to be used anywhere, and is superseded in practice by cardinality theorems - -(***************************************************************************) -(* `. .' *) -(* *) -(* If there exists a bijection from 1..m to S (for some m \in Nat) and *) -(* there exists a bijection from 1..n to T (for some n \in Nat), and S and *) -(* T are disjoint, then there exists a bijection from 1..(m+n) to S \cup *) -(* T. *) -(* *) -(* `. .' *) -(***************************************************************************) -THEOREM Fun_NatBijDisjointUnion == - ASSUME - NEW S, NEW m \in Nat, ExistsBijection(1..m,S), - NEW T, NEW n \in Nat, ExistsBijection(1..n,T), - S \cap T = {} - PROVE - ExistsBijection(1..(m+n),S \cup T) -PROOF - (*************************************************************************) - (* Restate the assumptions and then remove them from automatic use. It *) - (* seems these assumptions cause some of the SMT appeals to fail. *) - (*************************************************************************) - <1>1. ExistsBijection(1..m,S) OBVIOUS - <1>2. ExistsBijection(1..n,T) OBVIOUS - <1>3. S \cap T = {} OBVIOUS - <1> USE ONLY TRUE - - <1> USE DEF ExistsBijection - <1> USE DEF Fun_BijProp_Qed - - (*************************************************************************) - (* Proof by induction on n. *) - (*************************************************************************) - <1> DEFINE - Prop(i) == - \A T1 : - ExistsBijection(1..i,T1) /\ T1 \cap S = {} => - ExistsBijection(1..(m+i),S \cup T1) - - <1>4. \A i \in Nat : Prop(i) - <2>1. Prop(0) - (*********************************************************************) - (* Base case. *) - (*********************************************************************) - <3>1. SUFFICES ASSUME NEW T1, ExistsBijection(1..0,T1), T1 \cap S = {} - PROVE ExistsBijection(1..(m+0),S \cup T1) - OBVIOUS - <3>2. T1 = {} BY <3>1, Fun_NatBijEmpty - <3>3. m+0 = m BY SMT - <3>4. S \cup T1 = S BY <3>2 - <3> QED BY <3>3, <3>4, <1>1 - - <2>2. ASSUME NEW i \in Nat, Prop(i) PROVE Prop(i+1) - (*********************************************************************) - (* Inductive case. *) - (*********************************************************************) - <3>1. PICK j \in Nat : j = i+1 BY SMT - <3>2. SUFFICES ASSUME NEW T1, ExistsBijection(1..j,T1), T1 \cap S = {} - PROVE ExistsBijection(1..(m+j),S \cup T1) - BY <3>1 - - <3>3. j # 0 BY <3>1, SMT - <3>4. ~ExistsBijection(1..0,T1) BY <3>2, <3>3, Fun_NatBijSame - <3>5. T1 # {} BY <3>4, Fun_NatBijEmpty - - (*********************************************************************) - (* Construct T2 by removing element t from T1. *) - (*********************************************************************) - <3>6. PICK t : t \in T1 BY <3>5 - <3>7. t \notin S BY <3>2, <3>6 - <3>8. PICK T2 : T2 = T1 \ {t} OBVIOUS - <3>9. t \notin T2 BY <3>8 - <3>10. T2 \subseteq T1 BY <3>8 - <3>11. T1 = T2 \cup {t} BY <3>6, <3>8 - <3>12. T2 \cap S = {} BY <3>2, <3>8 - - (*********************************************************************) - (* Show that there exists a bijection from 1..i to T2. *) - (*********************************************************************) - <3>13. PICK j2 \in Nat : ExistsBijection(1..j2,T2) BY <3>2, <3>10, Fun_NatBijSubset - <3>14. ExistsBijection(1..(j2+1),T1) BY <3>9, <3>11, <3>13, Fun_NatBijAddElem - <3>15. j2+1 \in Nat BY SMT - <3>16. j = j2 + 1 BY <3>2, <3>14, <3>15, Fun_NatBijSame - <3>17. j2 = i BY <3>1, <3>16, SMT - <3>18. ExistsBijection(1..(m+i),S \cup T2) BY <3>12, <3>13, <3>17, <2>2 - - (*********************************************************************) - (* By the inductive hypothesis, there exists a bijection F from *) - (* 1..(m+i) to S \cup T2. *) - (*********************************************************************) - <3>19. PICK F : F \in Bijection(1..(m+i),S \cup T2) BY <3>18 - <3>20. Fun_BijProp_Qed(1..(m+i),S \cup T2,F) - <4> HIDE DEF Fun_BijProp_Qed - <4> QED BY <3>19, Fun_BijProp - <3>21. F \in [1..(m+i) -> S \cup T2] BY <3>20 - <3>22. \A s \in S \cup T2 : \E k \in 1..(m+i) : F[k] = s BY <3>20 - <3>23. \A a,b \in 1..(m+i) : F[a] = F[b] => a = b BY <3>20 - - (*********************************************************************) - (* Construct G by extending F to cover t. G is a bijection from *) - (* 1..(m+j) to S \cup T1. *) - (*********************************************************************) - <3>24. PICK G : G = [k \in 1..(m+j) |-> IF k \leq (m+i) THEN F[k] ELSE t] OBVIOUS - <3>25. G \in Bijection(1..(m+j),S \cup T1) - <4>1. \A a \in 1..(m+j) : a \leq m+i => a \in 1..(m+i) BY <3>1, SMT - <4>2. \A a,b \in 1..(m+j) : a \leq m+i /\ ~(b \leq m+i) => G[a] # G[b] - BY <4>1, <3>7, <3>9, <3>21, <3>24 - - <4>3. G \in [1..(m+j) -> S \cup T1] - (*****************************************************************) - (* Function. *) - (*****************************************************************) - <5>1. SUFFICES ASSUME NEW k \in 1..(m+j) PROVE G[k] \in S \cup T1 BY <3>24 - <5>2. CASE k \leq (m+i) - <6>1. G[k] = F[k] BY <5>2, <3>24 - <6>2. F[k] \in S \cup T2 BY <5>2, <4>1, <3>21 - <6> QED BY <6>1, <6>2, <3>10 - <5>3. CASE ~(k \leq (m+i)) - <6>1. G[k] = t BY <5>3, <3>24 - <6> QED BY <6>1, <3>6 - <5> QED BY <5>2, <5>3 - <4>4. ASSUME NEW s \in S \cup T1 PROVE \E k \in 1..(m+j) : G[k] = s - (*****************************************************************) - (* Injective. *) - (*****************************************************************) - <5>1. CASE s \in S \cup T2 - <6>1. PICK k \in 1..(m+i) : F[k] = s BY <5>1, <3>22 - <6>2. k \in 1..(m+j) BY <3>1, SMT - <6>3. k \leq m+i BY SMT - <6>4. G[k] = F[k] BY <6>2, <6>3, <3>24 - <6> QED BY <6>1, <6>2, <6>4 - <5>2. CASE s = t - <6>1. m+j \in 1..(m+j) BY <3>3, SMT - <6>2. ~(m+j \leq m+i) BY <3>1, SMT - <6>3. G[m+j] = t BY <6>1, <6>2, <3>24 - <6> QED BY <6>1, <6>3, <5>2 - <5> QED BY <5>1, <5>2, <3>11 - <4>5. ASSUME NEW a \in 1..(m+j), NEW b \in 1..(m+j), G[a] = G[b] PROVE a = b - (*****************************************************************) - (* Surjective. *) - (*****************************************************************) - <5> G[a] = G[b] BY <4>5 - <5>1. CASE (a \leq m+i) /\ (b \leq m+i) BY <5>1, <4>1, <3>23, <3>24 - <5>2. CASE (a \leq m+i) /\ ~(b \leq m+i) BY <5>2, <4>2 (* impossible *) - <5>3. CASE ~(a \leq m+i) /\ (b \leq m+i) BY <5>3, <4>2 (* impossible *) - <5>4. CASE ~(a \leq m+i) /\ ~(b \leq m+i) BY <5>4, <3>1, SMT - <5> QED BY <5>1, <5>2, <5>3, <5>4 - <4> QED BY <4>3, <4>4, <4>5, Fun_IsBij - <3> QED BY <3>1, <3>25 - <2> HIDE DEF Prop - <2> QED BY <2>1, <2>2, NatInduction - - <1> QED BY <1>1, <1>2, <1>3, <1>4 - -*) - - - -============================================================================= -\* Modification History -\* Last modified Tue Jul 09 19:00:04 CEST 2013 by merz -\* Last modified Tue Jun 11 12:30:05 CEST 2013 by bhargav -\* Last modified Fri May 31 15:27:41 CEST 2013 by bhargav -\* Last modified Fri May 03 12:55:32 PDT 2013 by tomr -\* Created Thu Apr 11 10:36:10 PDT 2013 by tomr diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/Jections.tla b/x/ccv/provider/keyguard/prototyping/tla/library/Jections.tla deleted file mode 100644 index cb58eac0b4..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/Jections.tla +++ /dev/null @@ -1,48 +0,0 @@ ------------------------------- MODULE Jections ------------------------------ -(***************************************************************************) -(* `^{\large\bf \vspace{12pt} *) -(* Definition of injection, surjection, and bijection. *) -(* \vspace{12pt}}^' *) -(***************************************************************************) - - -(***************************************************************************) -(* A map is an injection iff each element in the domain maps to a distinct *) -(* element in the range. *) -(***************************************************************************) -Injection(S,T) == { M \in [S -> T] : \A a,b \in S : M[a] = M[b] => a = b } - - -(***************************************************************************) -(* A map is a surjection iff for each element in the range there is some *) -(* element in the domain that maps to it. *) -(***************************************************************************) -Surjection(S,T) == { M \in [S -> T] : \A t \in T : \E s \in S : M[s] = t } - - -(***************************************************************************) -(* A map is a bijection iff it is both an injection and a surjection. *) -(***************************************************************************) -Bijection(S,T) == Injection(S,T) \cap Surjection(S,T) - - -(***************************************************************************) -(* An injection, surjection, or bijection exists if the corresponding set *) -(* is nonempty. *) -(***************************************************************************) -ExistsInjection(S,T) == Injection(S,T) # {} -ExistsSurjection(S,T) == Surjection(S,T) # {} -ExistsBijection(S,T) == Bijection(S,T) # {} - - -(***************************************************************************) -(* The inverse of a jection. *) -(***************************************************************************) -JectionInverse(S,T,M) == [t \in T |-> CHOOSE s \in S : M[s] = t] - -JectionInverseSets(S, T, M, B) == { s \in S : M[s] \in B } -============================================================================= -\* Modification History -\* Last modified Wed Jun 05 12:14:19 CEST 2013 by bhargav -\* Last modified Fri May 03 12:55:35 PDT 2013 by tomr -\* Created Thu Apr 11 10:30:48 PDT 2013 by tomr diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction.tla b/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction.tla deleted file mode 100755 index 219853474c..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction.tla +++ /dev/null @@ -1,210 +0,0 @@ -------------------------- MODULE NaturalsInduction ------------------------- -(***************************************************************************) -(* This module contains useful theorems for inductive proofs and recursive *) -(* definitions over the naturals. *) -(* *) -(* Some of the statements of the theorems are decomposed in terms of *) -(* definitions. This is done for two reasons: *) -(* *) -(* - It makes it easier for the backends to instantiate the theorems *) -(* when those definitions are not expanded. *) -(* *) -(* - It can be convenient when writing proofs to use those definitions *) -(* rather than having to write out their expansions. *) -(* *) -(* The proofs of these theorems appear in module NaturalsInduction\_proofs.*) -(***************************************************************************) -EXTENDS Integers, TLAPS - -(***************************************************************************) -(* The following is the simple statement of inductions over the naturals. *) -(* For predicates P defined by a moderately complex operator, it is often *) -(* useful to hide the operator definition before using this theorem. That *) -(* is, you first define a suitable operator P (not necessarily by that *) -(* name), prove the two hypotheses of the theorem, and then hide the *) -(* definition of P when using the theorem. *) -(***************************************************************************) -THEOREM NatInduction == - ASSUME NEW P(_), - P(0), - \A n \in Nat : P(n) => P(n+1) - PROVE \A n \in Nat : P(n) - -(***************************************************************************) -(* A useful corollary of NatInduction *) -(***************************************************************************) -THEOREM DownwardNatInduction == - ASSUME NEW P(_), NEW m \in Nat, P(m), - \A n \in 1 .. m : P(n) => P(n-1) - PROVE P(0) - -(***************************************************************************) -(* The following theorem expresses a stronger induction principle, *) -(* also known as course-of-values induction, where the induction *) -(* hypothesis is available for all strictly smaller natural numbers. *) -(***************************************************************************) -THEOREM GeneralNatInduction == - ASSUME NEW P(_), - \A n \in Nat : (\A m \in 0..(n-1) : P(m)) => P(n) - PROVE \A n \in Nat : P(n) - -(***************************************************************************) -(* The following theorem expresses the ``least-number principle'': *) -(* if P(n) is true for some natural number n then there is a *) -(* smallest natural number for which P is true. It could be derived in *) -(* module WellFoundedInduction as a corollary of the fact that the natural *) -(* numbers are well ordered, but we give a direct proof. *) -(***************************************************************************) -THEOREM SmallestNatural == - ASSUME NEW P(_), NEW n \in Nat, P(n) - PROVE \E m \in Nat : /\ P(m) - /\ \A k \in 0 .. m-1 : ~ P(k) - -(***************************************************************************) -(* The following theorem says that a recursively defined function f over *) -(* the natural numbers is well-defined if for every n \in Nat the *) -(* definition of f[n] depends only on arguments smaller than n. *) -(***************************************************************************) -THEOREM RecursiveFcnOfNat == - ASSUME NEW Def(_,_), - ASSUME NEW n \in Nat, NEW g, NEW h, - \A i \in 0..(n-1) : g[i] = h[i] - PROVE Def(g, n) = Def(h, n) - PROVE LET f[n \in Nat] == Def(f, n) - IN f = [n \in Nat |-> Def(f, n)] - - -(***************************************************************************) -(* The following theorem NatInductiveDef is what you use to justify a *) -(* function defined by primitive recursion over the naturals. *) -(***************************************************************************) -NatInductiveDefHypothesis(f, f0, Def(_,_)) == - (f = CHOOSE g : g = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(g[i-1], i)]) -NatInductiveDefConclusion(f, f0, Def(_,_)) == - f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1], i)] - -THEOREM NatInductiveDef == - ASSUME NEW Def(_,_), NEW f, NEW f0, - NatInductiveDefHypothesis(f, f0, Def) - PROVE NatInductiveDefConclusion(f, f0, Def) - - -(***************************************************************************) -(* The following two theorems allow you to prove the type of a recursively *) -(* defined function over the natural numbers. *) -(***************************************************************************) -THEOREM RecursiveFcnOfNatType == - ASSUME NEW f, NEW S, NEW Def(_,_), f = [n \in Nat |-> Def(f,n)], - ASSUME NEW n \in Nat, NEW g, \A i \in 0 .. n-1 : g[i] \in S - PROVE Def(g,n) \in S - PROVE f \in [Nat -> S] - -THEOREM NatInductiveDefType == - ASSUME NEW Def(_,_), NEW S, NEW f, NEW f0 \in S, - NatInductiveDefConclusion(f, f0, Def), - f0 \in S, - \A v \in S, n \in Nat \ {0} : Def(v, n) \in S - PROVE f \in [Nat -> S] - -(***************************************************************************) -(* The following theorems show uniqueness of functions recursively defined *) -(* over Nat. *) -(***************************************************************************) -THEOREM RecursiveFcnOfNatUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, - f = [n \in Nat |-> Def(f,n)], - g = [n \in Nat |-> Def(g,n)], - ASSUME NEW n \in Nat, NEW ff, NEW gg, - \A i \in 0..(n-1) : ff[i] = gg[i] - PROVE Def(ff, n) = Def(gg, n) - PROVE f = g - -THEOREM NatInductiveUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, NEW f0, - NatInductiveDefConclusion(f, f0, Def), - NatInductiveDefConclusion(g, f0, Def) - PROVE f = g - -(***************************************************************************) -(* The following theorems are analogous to the preceding ones but for *) -(* functions defined over intervals of natural numbers. *) -(***************************************************************************) - -FiniteNatInductiveDefHypothesis(f, c, Def(_,_), m, n) == - (f = CHOOSE g : g = [i \in m..n |-> IF i = m THEN c ELSE Def(g[i-1], i)]) -FiniteNatInductiveDefConclusion(f, c, Def(_,_), m, n) == - f = [i \in m..n |-> IF i = m THEN c ELSE Def(f[i-1], i)] - -THEOREM FiniteNatInductiveDef == - ASSUME NEW Def(_,_), NEW f, NEW c, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefHypothesis(f, c, Def, m, n) - PROVE FiniteNatInductiveDefConclusion(f, c, Def, m, n) - -THEOREM FiniteNatInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW c \in S, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefConclusion(f, c, Def, m, n), - \A v \in S, i \in (m+1) .. n : Def(v,i) \in S - PROVE f \in [m..n -> S] - -THEOREM FiniteNatInductiveUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, NEW c, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefConclusion(f, c, Def, m, n), - FiniteNatInductiveDefConclusion(g, c, Def, m, n) - PROVE f = g - -============================================================================= -(***************************************************************************) -(* The following theorems are analogous to the preceding ones but for *) -(* functions defined over intervals of natural numbers. *) -(***************************************************************************) - -FiniteNatInductiveDefHypothesis(f, c, Def(_,_), m, n) == - (f = CHOOSE g : g = [i \in m..n |-> IF i = m THEN c ELSE Def(g[i-1], i)]) -FiniteNatInductiveDefConclusion(f, c, Def(_,_), m, n) == - f = [i \in m..n |-> IF i = m THEN c ELSE Def(f[i-1], i)] - -THEOREM FiniteNatInductiveDef == - ASSUME NEW Def(_,_), NEW f, NEW c, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefHypothesis(f, c, Def, m, n) - PROVE FiniteNatInductiveDefConclusion(f, c, Def, m, n) - -THEOREM FiniteNatInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW c \in S, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefConclusion(f, c, Def, m, n), - \A v \in S, i \in (m+1) .. n : Def(v,i) \in S - PROVE f \in [m..n -> S] - -THEOREM FiniteNatInductiveUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, NEW c, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefConclusion(f, c, Def, m, n), - FiniteNatInductiveDefConclusion(g, c, Def, m, n) - PROVE f = g - -(***************************************************************************) -(* The following example shows how this module is used. *) -(***************************************************************************) - -factorial[n \in Nat] == IF n = 0 THEN 1 ELSE n * factorial[n-1] - -THEOREM FactorialDefConclusion == NatInductiveDefConclusion(factorial, 1, LAMBDA v,n : n*v) -<1>1. NatInductiveDefHypothesis(factorial, 1, LAMBDA v,n : n*v) - BY DEF NatInductiveDefHypothesis, factorial -<1>2. QED - BY <1>1, NatInductiveDef - -THEOREM FactorialDef == \A n \in Nat : factorial[n] = IF n = 0 THEN 1 ELSE n * factorial[n-1] -BY FactorialDefConclusion DEFS NatInductiveDefConclusion - -THEOREM FactorialType == factorial \in [Nat -> Nat] -<1>1. \A v \in Nat, n \in Nat \ {0} : n * v \in Nat - OBVIOUS -<1>2. QED - BY <1>1, 1 \in Nat, NatInductiveDefType, FactorialDefConclusion, Isa - -============================================================================= -\* Modification History -\* Last modified Thu May 08 12:29:46 CEST 2014 by merz -\* Last modified Tue Oct 15 12:06:48 CEST 2013 by shaolin -\* Last modified Sat Nov 26 08:49:59 CET 2011 by merz -\* Last modified Mon Nov 07 08:58:05 PST 2011 by lamport -\* Created Mon Oct 31 02:52:05 PDT 2011 by lamport diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction_proofs.tla deleted file mode 100644 index fa2cb70bef..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/NaturalsInduction_proofs.tla +++ /dev/null @@ -1,454 +0,0 @@ ----------------------- MODULE NaturalsInduction_proofs ---------------------- -(***************************************************************************) -(* This module contains useful theorems for inductive proofs and recursive *) -(* definitions over the naturals. *) -(* *) -(* Some of the statements of the theorems are decomposed in terms of *) -(* definitions. This is done for two reasons: *) -(* *) -(* - It makes it easier for the backends to instantiate the theorems *) -(* when those definitions are not expanded. *) -(* *) -(* - It can be convenient when writing proofs to use those definitions *) -(* rather than having to write out their expansions. *) -(***************************************************************************) -EXTENDS Integers, TLAPS - -(***************************************************************************) -(* The following is the simple statement of inductions over the naturals. *) -(* For predicates P defined by a moderately complex operator, it is often *) -(* useful to hide the operator definition before using this theorem. That *) -(* is, you first define a suitable operator P (not necessarily by that *) -(* name), prove the two hypotheses of the theorem, and then hide the *) -(* definition of P when using the theorem. *) -(***************************************************************************) -THEOREM NatInduction == - ASSUME NEW P(_), - P(0), - \A n \in Nat : P(n) => P(n+1) - PROVE \A n \in Nat : P(n) -BY IsaM("(intro natInduct, auto)") - -(***************************************************************************) -(* A useful corollary of NatInduction *) -(***************************************************************************) -THEOREM DownwardNatInduction == - ASSUME NEW P(_), NEW m \in Nat, P(m), - \A n \in 1 .. m : P(n) => P(n-1) - PROVE P(0) -<1>. DEFINE Q(i) == i \leq m => P(m-i) -<1>1. Q(0) OBVIOUS -<1>2. ASSUME NEW n \in Nat, Q(n) - PROVE Q(n+1) - BY <1>2 -<1>3. \A n \in Nat : Q(n) BY <1>1, <1>2, NatInduction, Isa -<1>. QED BY <1>3, Isa - -(***************************************************************************) -(* The following theorem expresses a stronger induction principle, *) -(* also known as course-of-values induction, where the induction *) -(* hypothesis is available for all strictly smaller natural numbers. *) -(***************************************************************************) -THEOREM GeneralNatInduction == - ASSUME NEW P(_), - \A n \in Nat : (\A m \in 0..(n-1) : P(m)) => P(n) - PROVE \A n \in Nat : P(n) -<1> DEFINE Q(n) == \A m \in 0..n : P(m) -<1>1. Q(0) BY SMT -<1>2. \A n \in Nat : Q(n) => Q(n+1) BY SMT -<1>3. \A n \in Nat : Q(n) BY <1>1, <1>2, NatInduction, Isa -<1>4. QED BY ONLY <1>3, SMT - -(***************************************************************************) -(* The following theorem expresses the ``least-number principle'': *) -(* if P(n) is true for some natural number n then there is a *) -(* smallest natural number for which P is true. It could be derived in *) -(* module WellFoundedInduction as a corollary of the fact that the natural *) -(* numbers are well ordered, but we give a direct proof. *) -(***************************************************************************) -THEOREM SmallestNatural == - ASSUME NEW P(_), NEW n \in Nat, P(n) - PROVE \E m \in Nat : /\ P(m) - /\ \A k \in 0 .. m-1 : ~ P(k) -<1>. DEFINE Q(k) == ~ P(k) -<1>. SUFFICES ASSUME \A m \in Nat : P(m) => \E k \in 0 .. m-1 : P(k) - PROVE \A m \in Nat : Q(m) - OBVIOUS -<1>1. ASSUME NEW l \in Nat, \A k \in 0 .. l-1 : Q(k) - PROVE Q(l) - BY <1>1 -<1>. HIDE DEF Q -<1>. QED BY ONLY <1>1, GeneralNatInduction, Isa - -(***************************************************************************) -(* The following theorem says that a recursively defined function f over *) -(* the natural numbers is well-defined if for every n \in Nat the *) -(* definition of f[n] depends only on arguments smaller than n. *) -(***************************************************************************) -THEOREM RecursiveFcnOfNat == - ASSUME NEW Def(_,_), - ASSUME NEW n \in Nat, NEW g, NEW h, - \A i \in 0..(n-1) : g[i] = h[i] - PROVE Def(g, n) = Def(h, n) - PROVE LET f[n \in Nat] == Def(f, n) - IN f = [n \in Nat |-> Def(f, n)] -<1>. SUFFICES \E ff : ff = [n \in Nat |-> Def(ff, n)] - OBVIOUS - (*************************************************************************) - (* The strategy of the proof is to define a sequence F of approximations *) - (* such that F[n] is a function with domain 0 .. n-1 that computes *) - (* F[n][i] by applying the definition to the preceding approximation *) - (* function F[n-1]. *) - (*************************************************************************) -<1>. DEFINE F[n \in Nat] == [i \in 0 .. n-1 |-> Def(F[n-1], i)] - f[n \in Nat] == F[n+1][n] - - (*************************************************************************) - (* We first show that F itself is well-defined by diagonalization *) - (* over functions that are defined over finite intervals of integers. *) - (*************************************************************************) -<1>1. F = [n \in Nat |-> [i \in 0 .. n-1 |-> Def(F[n-1], i)]] - <2>. SUFFICES \E FF : FF = [n \in Nat |-> [i \in 0 .. n-1 |-> Def(FF[n-1], i)]] - OBVIOUS - <2>. DEFINE P(g,k) == g = [n \in 0 .. k |-> [i \in 0 .. n-1 |-> Def(g[n-1], i)]] - G(k) == CHOOSE g : P(g,k) - FF == [n \in Nat |-> [i \in 0 .. n-1 |-> G(n)[n][i] ]] - <2>0. ASSUME NEW g, NEW k \in Nat, P(g,k), - NEW n \in 0 .. k, NEW i \in 0 .. n-1 - PROVE g[n][i] = Def(g[n-1], i) - <3>. DEFINE gg == [m \in 0 .. k |-> [j \in 0 .. m-1 |-> Def(g[m-1], j)]] - <3>1. gg[n][i] = Def(g[n-1],i) OBVIOUS - <3>2. g = gg BY <2>0, Zenon - <3>. QED BY <3>1, <3>2, Zenon - <2>1. \A k \in Nat : \E g : P(g,k) - <3>. DEFINE Q(k) == \E g : P(g,k) - <3>. SUFFICES \A k \in Nat : Q(k) OBVIOUS - <3>1. Q(0) - <4>. DEFINE g0 == [n \in {0} |-> [i \in {} |-> {}]] - <4>1. P(g0, 0) OBVIOUS - <4>. QED BY <4>1 - <3>2. ASSUME NEW k \in Nat, Q(k) - PROVE Q(k+1) - <4>1. PICK g : P(g,k) BY <3>2 - <4>1a. ASSUME NEW n \in 0 .. k, NEW i \in 0 .. n-1 - PROVE g[n][i] = Def(g[n-1], i) - BY <4>1, <2>0 - <4>. DEFINE h == [n \in 0 .. k+1 |-> [i \in 0 .. n-1 |-> Def(g[n-1], i) ]] - <4>2. h = [n \in 0 .. k+1 |-> [i \in 0 .. n-1 |-> Def(h[n-1], i)]] - <5>. SUFFICES ASSUME NEW n \in 0 .. k+1, NEW i \in 0 .. n-1 - PROVE h[n][i] = Def(h[n-1], i) - BY Zenon - <5>1. h[n][i] = Def(g[n-1], i) OBVIOUS - <5>2. ASSUME NEW j \in 0 .. i-1 - PROVE g[n-1][j] = h[n-1][j] - BY <4>1a - <5>. HIDE DEF h - <5>3. Def(g[n-1],i) = Def(h[n-1],i) BY <5>2 - <5>. QED BY <5>1, <5>3 - <4>. HIDE DEF h - <4>. QED BY <4>2 - <3>. HIDE DEF Q - <3>. QED BY <3>1, <3>2, NatInduction, Blast - <2>2. \A k \in Nat : P(G(k), k) BY <2>1 - <2>3. \A k \in Nat : \A l \in 0 .. k : \A i \in 0 .. l-1 : \A g,h : - P(g,k) /\ P(h,l) => g[l][i] = h[l][i] - <3>. DEFINE Q(k) == \A l \in 0 .. k : \A i \in 0 .. l-1 : \A g,h : - P(g,k) /\ P(h,l) => g[l][i] = h[l][i] - <3>. SUFFICES \A k \in Nat : Q(k) OBVIOUS - <3>0. Q(0) OBVIOUS - <3>1. ASSUME NEW k \in Nat, Q(k) - PROVE Q(k+1) - <4>. HIDE DEF P - <4>. SUFFICES ASSUME NEW l \in 0 .. k+1, NEW i \in 0 .. l-1, NEW g, NEW h, - P(g,k+1), P(h,l) - PROVE g[l][i] = h[l][i] - OBVIOUS - <4>1. /\ g[l][i] = Def(g[l-1],i) - /\ h[l][i] = Def(h[l-1],i) - BY <2>0 - <4>. DEFINE gg == [nn \in 0 .. k |-> [ii \in 0 .. nn-1 |-> Def(g[nn-1],ii)]] - hh == [nn \in 0 .. l-1 |-> [ii \in 0 .. nn-1 |-> Def(h[nn-1],ii)]] - <4>2. P(gg,k) - <5>1. ASSUME NEW nn \in 0 .. k, NEW j \in 0 .. nn-1 - PROVE gg[nn-1] = g[nn-1] - <6>. /\ nn-1 \in 0 .. k - /\ nn-1 \in 0 .. k+1 - OBVIOUS - <6>1. gg[nn-1] = [ii \in 0 .. nn-2 |-> Def(g[nn-2],ii)] OBVIOUS - <6>2. g[nn-1] = [ii \in 0 .. (nn-1)-1 |-> Def(g[(nn-1)-1],ii)] BY DEF P - <6>. QED BY <6>1, <6>2 - <5>. QED BY <5>1 DEF P - <4>3. P(hh,l-1) - <5>1. ASSUME NEW nn \in 0 .. l-1, NEW j \in 0 .. nn-1 - PROVE hh[nn-1] = h[nn-1] - <6>. /\ nn-1 \in 0 .. l-1 - /\ nn-1 \in 0 .. l - OBVIOUS - <6>1. hh[nn-1] = [ii \in 0 .. nn-2 |-> Def(h[nn-2],ii)] OBVIOUS - <6>2. h[nn-1] = [ii \in 0 .. (nn-1)-1 |-> Def(h[(nn-1)-1],ii)] BY DEF P - <6>. QED BY <6>1, <6>2 - <5>. QED BY <5>1 DEF P - <4>4. \A m \in 0 .. i-1 : gg[l-1][m] = hh[l-1][m] BY <3>1, <4>2, <4>3 - <4>5. \A m \in 0 .. i-1 : g[l-1][m] = gg[l-1][m] BY <2>0 - <4>6. \A m \in 0 .. i-1 : h[l-1][m] = hh[l-1][m] BY <2>0 - <4>7. \A m \in 0 .. i-1 : g[l-1][m] = h[l-1][m] BY <4>4, <4>5, <4>6 - <4>8. Def(g[l-1],i) = Def(h[l-1],i) BY <4>7 - <4>. QED BY <4>8, <2>0 - <3>. HIDE DEF Q - <3>. QED BY <3>0, <3>1, NatInduction, Blast - <2>4. FF = [n \in Nat |-> [i \in 0 .. n-1 |-> Def(FF[n-1], i)]] - <3>. HIDE DEF G - <3>. SUFFICES ASSUME NEW k \in Nat, NEW i \in 0 .. k-1 - PROVE FF[k][i] = Def(FF[k-1], i) - OBVIOUS - <3>1. FF[k][i] = G(k)[k][i] OBVIOUS - <3>2. G(k)[k][i] = Def(G(k)[k-1], i) BY <2>2 - <3>. HIDE DEF P - <3>3. \A j \in 0 .. i-1 : G(k)[k-1][j] = FF[k-1][j] BY <2>2, <2>3 - <3>. HIDE DEF FF - <3>4. Def(G(k)[k-1], i) = Def(FF[k-1], i) BY <3>3 - <3>. QED BY <3>1, <3>2, <3>4 - <2>. QED BY <2>4 - -<1>. HIDE DEF F \* from now on, use step <1>1 rather than the definition - - (*************************************************************************) - (* The following step is a trivial consequence of <1>1 but the backend *) - (* provers are currently unable to prove it directly. *) - (*************************************************************************) -<1>2. ASSUME NEW n \in Nat, NEW i \in 0 .. n-1 - PROVE F[n][i] = Def(F[n-1], i) - <2>. DEFINE G == [m \in Nat |-> [j \in 0 .. m-1 |-> Def(F[m-1],j)]] - <2>1. G[n][i] = Def(F[n-1],i) OBVIOUS - <2>2. F = G BY <1>1, Zenon - <2>. QED BY <2>1, <2>2, Zenon - - (*************************************************************************) - (* Any two approximations F[n] and F[m] agree for arguments where they *) - (* are both defined. *) - (*************************************************************************) -<1>. DEFINE P(n) == \A m \in 0 .. n : \A i \in 0 .. m-1 : F[n][i] = F[m][i] -<1>3. \A n \in Nat : P(n) - <2>1. ASSUME NEW n \in Nat, \A k \in 0 .. n-1 : P(k) - PROVE P(n) - <3>. SUFFICES ASSUME NEW m \in 0 .. n, NEW i \in 0 .. m-1 - PROVE F[n][i] = F[m][i] - OBVIOUS - <3>2. CASE m = n BY <3>2 - <3>3. CASE n = 0 BY <3>3, SMT - <3>4. CASE 0 < n /\ m \in 0 .. n-1 - <4>1. F[n][i] = Def(F[n-1],i) BY <1>2 - <4>2. \A j \in 0 .. i-1 : F[n-1][j] = F[m-1][j] BY <2>1, <3>4 - <4>3. Def(F[n-1],i) = Def(F[m-1],i) BY <4>2 - <4>4. Def(F[m-1],i) = F[m][i] BY <1>2 - <4>. QED BY <4>1, <4>3, <4>4 - <3>. QED BY <3>2, <3>3, <3>4, SMT - <2>. HIDE DEF P - <2>. QED BY <2>1, GeneralNatInduction, Blast - - (*************************************************************************) - (* The assertion follows immediately from the two preceding steps. *) - (*************************************************************************) -<1>4. f = [n \in Nat |-> Def(f,n)] - <2>. SUFFICES ASSUME NEW n \in Nat - PROVE f[n] = Def(f,n) - OBVIOUS - <2>1. f[n] = Def(F[n], n) BY <1>2 - <2>2. \A i \in 0 .. n-1 : F[n][i] = f[i] BY <1>3 - <2>3. Def(F[n],n) = Def(f,n) BY <2>2 - <2>. QED BY <2>1, <2>3 - -<1>. QED BY <1>4 - - -(***************************************************************************) -(* The following theorem NatInductiveDef is what you use to justify a *) -(* function defined by primitive recursion over the naturals. *) -(***************************************************************************) -NatInductiveDefHypothesis(f, f0, Def(_,_)) == - (f = CHOOSE g : g = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(g[i-1], i)]) -NatInductiveDefConclusion(f, f0, Def(_,_)) == - f = [i \in Nat |-> IF i = 0 THEN f0 ELSE Def(f[i-1], i)] - -THEOREM NatInductiveDef == - ASSUME NEW Def(_,_), NEW f, NEW f0, - NatInductiveDefHypothesis(f, f0, Def) - PROVE NatInductiveDefConclusion(f, f0, Def) -<1>. DEFINE PRDef(g,n) == IF n = 0 THEN f0 ELSE Def(g[n-1], n) - ff[n \in Nat] == PRDef(ff,n) -<1>1. ASSUME NEW n \in Nat, NEW g, NEW h, - \A i \in 0 .. n-1 : g[i] = h[i] - PROVE PRDef(g,n) = PRDef(h,n) - BY <1>1, Z3 -<1>. HIDE DEF PRDef -<1>2. ff = [n \in Nat |-> PRDef(ff,n)] BY <1>1, RecursiveFcnOfNat, Isa -<1>. USE DEF PRDef -<1>3. ff = f BY DEF NatInductiveDefHypothesis -<1>. HIDE DEF ff -<1>. QED BY <1>2, <1>3 DEF NatInductiveDefConclusion - -(***************************************************************************) -(* The following two theorems allow you to prove the type of a recursively *) -(* defined function over the natural numbers. *) -(***************************************************************************) -THEOREM RecursiveFcnOfNatType == - ASSUME NEW f, NEW S, NEW Def(_,_), f = [n \in Nat |-> Def(f,n)], - ASSUME NEW n \in Nat, NEW g, \A i \in 0 .. n-1 : g[i] \in S - PROVE Def(g,n) \in S - PROVE f \in [Nat -> S] -<1>1. SUFFICES \A n \in Nat : f[n] \in S - OBVIOUS -<1>2. ASSUME NEW n \in Nat, \A i \in 0 .. n-1 : f[i] \in S - PROVE f[n] \in S - BY <1>2, Zenon -<1>. QED BY <1>2, GeneralNatInduction, Isa - -THEOREM NatInductiveDefType == - ASSUME NEW Def(_,_), NEW S, NEW f, NEW f0 \in S, - NatInductiveDefConclusion(f, f0, Def), - f0 \in S, - \A v \in S, n \in Nat \ {0} : Def(v, n) \in S - PROVE f \in [Nat -> S] -<1>. USE DEF NatInductiveDefConclusion -<1> SUFFICES \A n \in Nat : f[n] \in S - OBVIOUS -<1>1. f[0] \in S OBVIOUS -<1>2. ASSUME NEW n \in Nat, f[n] \in S - PROVE f[n+1] \in S - <2>1. /\ n+1 \in Nat \ {0} - /\ (n+1)-1 = n - OBVIOUS - <2>. QED BY <2>1, <1>2 -<1>. QED BY <1>1, <1>2, NatInduction, Isa - -(***************************************************************************) -(* The following theorems show uniqueness of functions recursively defined *) -(* over Nat. *) -(***************************************************************************) -THEOREM RecursiveFcnOfNatUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, - f = [n \in Nat |-> Def(f,n)], - g = [n \in Nat |-> Def(g,n)], - ASSUME NEW n \in Nat, NEW ff, NEW gg, - \A i \in 0..(n-1) : ff[i] = gg[i] - PROVE Def(ff, n) = Def(gg, n) - PROVE f = g -<1>1. SUFFICES \A n \in Nat : f[n] = g[n] - OBVIOUS -<1>2. ASSUME NEW n \in Nat, \A i \in 0 .. n-1 : f[i] = g[i] - PROVE f[n] = g[n] - <2>1. Def(f,n) = Def(g,n) BY <1>2 - <2>. QED BY <2>1, Zenon -<1>. QED - BY <1>2, GeneralNatInduction, Isa - -THEOREM NatInductiveUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, NEW f0, - NatInductiveDefConclusion(f, f0, Def), - NatInductiveDefConclusion(g, f0, Def) - PROVE f = g -<1>. USE DEF NatInductiveDefConclusion -<1>1. SUFFICES \A n \in Nat : f[n] = g[n] - OBVIOUS -<1>2. f[0] = g[0] OBVIOUS -<1>3. ASSUME NEW n \in Nat, f[n] = g[n] - PROVE f[n+1] = g[n+1] - BY <1>3 -<1>. QED - BY <1>2, <1>3, NatInduction, Isa - -(***************************************************************************) -(* The following theorems are analogous to the preceding ones but for *) -(* functions defined over intervals of natural numbers. *) -(***************************************************************************) - -FiniteNatInductiveDefHypothesis(f, c, Def(_,_), m, n) == - (f = CHOOSE g : g = [i \in m..n |-> IF i = m THEN c ELSE Def(g[i-1], i)]) -FiniteNatInductiveDefConclusion(f, c, Def(_,_), m, n) == - f = [i \in m..n |-> IF i = m THEN c ELSE Def(f[i-1], i)] - -THEOREM FiniteNatInductiveDef == - ASSUME NEW Def(_,_), NEW f, NEW c, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefHypothesis(f, c, Def, m, n) - PROVE FiniteNatInductiveDefConclusion(f, c, Def, m, n) -<1>. DEFINE PRDef(g,i) == IF i <= m THEN c ELSE Def(g[i-1], i) - ff[i \in Nat] == PRDef(ff,i) - gg == [i \in m..n |-> ff[i]] -<1>1. ASSUME NEW i \in Nat, NEW g, NEW h, - \A j \in 0 .. i-1 : g[j] = h[j] - PROVE PRDef(g,i) = PRDef(h,i) - BY <1>1, Z3 -<1>. HIDE DEF PRDef -<1>2. ff = [i \in Nat |-> PRDef(ff,i)] - BY <1>1, RecursiveFcnOfNat, Isa -<1>. HIDE DEF ff -<1>. USE DEF PRDef -<1>3. gg = [i \in m..n |-> IF i=m THEN c ELSE Def(gg[i-1],i)] - BY <1>2, Z3 -<1>. HIDE DEF gg -<1>. QED - BY <1>3 DEF FiniteNatInductiveDefHypothesis, FiniteNatInductiveDefConclusion - -THEOREM FiniteNatInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW c \in S, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefConclusion(f, c, Def, m, n), - \A v \in S, i \in (m+1) .. n : Def(v,i) \in S - PROVE f \in [m..n -> S] -<1>. USE DEF FiniteNatInductiveDefConclusion -<1>. DEFINE P(i) == i \in m..n => f[i] \in S -<1>1. SUFFICES \A i \in Nat : P(i) - OBVIOUS -<1>2. P(0) - OBVIOUS -<1>3. ASSUME NEW i \in Nat, P(i) - PROVE P(i+1) - BY <1>3 -<1>. QED - BY <1>2, <1>3, NatInduction, Isa - -THEOREM FiniteNatInductiveUnique == - ASSUME NEW Def(_,_), NEW f, NEW g, NEW c, NEW m \in Nat, NEW n \in Nat, - FiniteNatInductiveDefConclusion(f, c, Def, m, n), - FiniteNatInductiveDefConclusion(g, c, Def, m, n) - PROVE f = g -<1>. USE DEF FiniteNatInductiveDefConclusion -<1>. DEFINE P(i) == i \in m..n => f[i] = g[i] -<1>1. SUFFICES \A i \in Nat : P(i) - BY m..n \subseteq Nat -<1>2. P(0) - OBVIOUS -<1>3. ASSUME NEW i \in Nat, P(i) - PROVE P(i+1) - BY <1>3 -<1>. QED - BY <1>2, <1>3, NatInduction, Isa - -============================================================================= - -(***************************************************************************) -(* The following example shows how this module is used. *) -(***************************************************************************) - -factorial[n \in Nat] == IF n = 0 THEN 1 ELSE n * factorial[n-1] - -THEOREM FactorialDefConclusion == NatInductiveDefConclusion(factorial, 1, LAMBDA v,n : n*v) -<1>1. NatInductiveDefHypothesis(factorial, 1, LAMBDA v,n : n*v) - BY DEF NatInductiveDefHypothesis, factorial -<1>2. QED - BY <1>1, NatInductiveDef - -THEOREM FactorialDef == \A n \in Nat : factorial[n] = IF n = 0 THEN 1 ELSE n * factorial[n-1] -BY FactorialDefConclusion DEFS NatInductiveDefConclusion - -THEOREM FactorialType == factorial \in [Nat -> Nat] -<1>1. \A v \in Nat, n \in Nat \ {0} : n * v \in Nat - BY SMT -<1>2. QED - BY <1>1, 1 \in Nat, NatInductiveDefType, FactorialDefConclusion, Auto - -\* Modification History -\* Last modified Mon Oct 20 09:16:03 CEST 2014 by merz -\* Last modified Tue Oct 15 12:06:48 CEST 2013 by shaolin -\* Last modified Sat Nov 26 08:49:59 CET 2011 by merz -\* Last modified Mon Nov 07 08:58:05 PST 2011 by lamport -\* Created Mon Oct 31 02:52:05 PDT 2011 by lamport diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/RealTime.tla b/x/ccv/provider/keyguard/prototyping/tla/library/RealTime.tla deleted file mode 100644 index 1026c66a4b..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/RealTime.tla +++ /dev/null @@ -1,22 +0,0 @@ ------------------------------ MODULE RealTime ------------------------------- -EXTENDS Reals -VARIABLE now - -RTBound(A, v, D, E) == - LET TNext(t) == t' = IF <>_v \/ ~(ENABLED <>_v)' - THEN 0 - ELSE t + (now'-now) - - Timer(t) == (t=0) /\ [][TNext(t)]_<> - - MaxTime(t) == [](t \leq E) - - MinTime(t) == [][A => t \geq D]_v - IN \EE t : Timer(t) /\ MaxTime(t) /\ MinTime(t) ------------------------------------------------------------------------------ -RTnow(v) == LET NowNext == /\ now' \in {r \in Real : r > now} - /\ UNCHANGED v - IN /\ now \in Real - /\ [][NowNext]_now - /\ \A r \in Real : WF_now(NowNext /\ (now'>r)) -============================================================================= diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems.tla b/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems.tla deleted file mode 100644 index 790578210f..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems.tla +++ /dev/null @@ -1,636 +0,0 @@ ------------------------ MODULE SequenceTheorems ----------------------------- -(***************************************************************************) -(* This module contains a library of theorems about sequences and the *) -(* corresponding operations. *) -(***************************************************************************) -EXTENDS Sequences, Integers, WellFoundedInduction, Functions, TLAPS - - -(***************************************************************************) -(* Elementary properties about Seq(S) *) -(***************************************************************************) - -LEMMA SeqDef == \A S : Seq(S) = UNION {[1..n -> S] : n \in Nat} - -THEOREM ElementOfSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW n \in 1..Len(seq) - PROVE seq[n] \in S - -THEOREM EmptySeq == - ASSUME NEW S - PROVE /\ << >> \in Seq(S) - /\ \A seq \in Seq(S) : (seq = << >>) <=> (Len(seq) = 0) - -THEOREM LenProperties == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ Len(seq) \in Nat - /\ seq \in [1..Len(seq) -> S] - /\ DOMAIN seq = 1 .. Len(seq) - -THEOREM ExceptSeq == - ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq), NEW e \in S - PROVE /\ [seq EXCEPT ![i] = e] \in Seq(S) - /\ Len([seq EXCEPT ![i] = e]) = Len(seq) - /\ \A j \in 1 .. Len(seq) : [seq EXCEPT ![i] = e][j] = IF j=i THEN e ELSE seq[j] - -THEOREM IsASeq == - ASSUME NEW n \in Nat, NEW e(_), NEW S, - \A i \in 1..n : e(i) \in S - PROVE [i \in 1..n |-> e(i)] \in Seq(S) - -THEOREM SeqEqual == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), - Len(s) = Len(t), \A i \in 1 .. Len(s) : s[i] = t[i] - PROVE s = t - -(*************************************************************************** - Concatenation (\o) And Properties -***************************************************************************) - -THEOREM ConcatProperties == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) - PROVE /\ s1 \o s2 \in Seq(S) - /\ Len(s1 \o s2) = Len(s1) + Len(s2) - /\ \A i \in 1 .. Len(s1) + Len(s2) : (s1 \o s2)[i] = - IF i <= Len(s1) THEN s1[i] ELSE s2[i - Len(s1)] - -THEOREM ConcatEmptySeq == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ seq \o << >> = seq - /\ << >> \o seq = seq - -THEOREM ConcatAssociative == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S), NEW s3 \in Seq(S) - PROVE (s1 \o s2) \o s3 = s1 \o (s2 \o s3) - -THEOREM ConcatSimplifications == - ASSUME NEW S - PROVE /\ \A s,t \in Seq(S) : s \o t = s <=> t = <<>> - /\ \A s,t \in Seq(S) : s \o t = t <=> s = <<>> - /\ \A s,t \in Seq(S) : s \o t = <<>> <=> s = <<>> /\ t = <<>> - /\ \A s,t,u \in Seq(S) : s \o t = s \o u <=> t = u - /\ \A s,t,u \in Seq(S) : s \o u = t \o u <=> s = t - -(***************************************************************************) -(* SubSeq, Head and Tail *) -(***************************************************************************) - -THEOREM SubSeqProperties == - ASSUME NEW S, - NEW s \in Seq(S), - NEW m \in 1 .. Len(s)+1, - NEW n \in m-1 .. Len(s) - PROVE /\ SubSeq(s,m,n) \in Seq(S) - /\ Len(SubSeq(s, m, n)) = n-m+1 - /\ \A i \in 1 .. n-m+1 : SubSeq(s,m,n)[i] = s[m+i-1] - -THEOREM SubSeqEmpty == - ASSUME NEW s, NEW m \in Int, NEW n \in Int, n < m - PROVE SubSeq(s,m,n) = << >> - -THEOREM HeadTailProperties == - ASSUME NEW S, - NEW seq \in Seq(S), seq # << >> - PROVE /\ Head(seq) \in S - /\ Tail(seq) \in Seq(S) - /\ Len(Tail(seq)) = Len(seq)-1 - /\ \A i \in 1 .. Len(Tail(seq)) : Tail(seq)[i] = seq[i+1] - -THEOREM TailIsSubSeq == - ASSUME NEW S, - NEW seq \in Seq(S), seq # << >> - PROVE Tail(seq) = SubSeq(seq, 2, Len(seq)) - -THEOREM SubSeqRestrict == - ASSUME NEW S, NEW seq \in Seq(S), NEW n \in 0 .. Len(seq) - PROVE SubSeq(seq, 1, n) = Restrict(seq, 1 .. n) - -THEOREM HeadTailOfSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE /\ Head(SubSeq(seq,m,n)) = seq[m] - /\ Tail(SubSeq(seq,m,n)) = SubSeq(seq, m+1, n) - -THEOREM SubSeqRecursiveFirst == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE SubSeq(seq, m, n) = << seq[m] >> \o SubSeq(seq, m+1, n) - -THEOREM SubSeqRecursiveSecond == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE SubSeq(seq, m, n) = SubSeq(seq, m, n-1) \o << seq[n] >> - -THEOREM SubSeqFull == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE SubSeq(seq, 1, Len(seq)) = seq - -(*****************************************************************************) -(* Adjacent subsequences can be concatenated to obtain a longer subsequence. *) -(*****************************************************************************) -THEOREM ConcatAdjacentSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq)+1, - NEW k \in m-1 .. Len(seq), - NEW n \in k .. Len(seq) - PROVE SubSeq(seq, m, k) \o SubSeq(seq, k+1, n) = SubSeq(seq, m, n) - -(***************************************************************************) -(* Append, InsertAt, Cons & RemoveAt *) -(* Append(seq, elt) appends element elt at the end of sequence seq *) -(* Cons(elt, seq) prepends element elt at the beginning of sequence seq *) -(* InsertAt(seq, i, elt) inserts element elt in the position i and pushes *) -(* the *) -(* original element at i to i+1 and so on *) -(* RemoveAt(seq, i) removes the element at position i *) -(***************************************************************************) - -THEOREM AppendProperties == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S - PROVE /\ Append(seq, elt) \in Seq(S) - /\ Append(seq, elt) # << >> - /\ Len(Append(seq, elt)) = Len(seq)+1 - /\ \A i \in 1.. Len(seq) : Append(seq, elt)[i] = seq[i] - /\ Append(seq, elt)[Len(seq)+1] = elt - -THEOREM AppendIsConcat == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S - PROVE Append(seq, elt) = seq \o <> - -THEOREM HeadTailAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt - PROVE /\ Head(Append(seq, elt)) = IF seq = <<>> THEN elt ELSE Head(seq) - /\ Tail(Append(seq, elt)) = IF seq = <<>> THEN <<>> ELSE Append(Tail(seq), elt) - -Cons(elt, seq) == <> \o seq - -THEOREM ConsProperties == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S - PROVE /\ Cons(elt, seq) \in Seq(S) - /\ Cons(elt, seq) # <<>> - /\ Len(Cons(elt, seq)) = Len(seq)+1 - /\ Head(Cons(elt, seq)) = elt - /\ Tail(Cons(elt, seq)) = seq - /\ Cons(elt, seq)[1] = elt - /\ \A i \in 1 .. Len(seq) : Cons(elt, seq)[i+1] = seq[i] - -THEOREM ConsEmpty == - \A x : Cons(x, << >>) = << x >> - -THEOREM ConsHeadTail == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE Cons(Head(seq), Tail(seq)) = seq - -THEOREM ConsAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW x \in S, NEW y \in S - PROVE Cons(x, Append(seq, y)) = Append(Cons(x,seq), y) - -THEOREM ConsInjective == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) - PROVE Cons(e,s) = Cons(f,t) <=> e = f /\ s = t - -InsertAt(seq,i,elt) == SubSeq(seq, 1, i-1) \o <> \o SubSeq(seq, i, Len(seq)) - -THEOREM InsertAtProperties == - ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq)+1, NEW elt \in S - PROVE /\ InsertAt(seq,i,elt) \in Seq(S) - /\ Len(InsertAt(seq,i,elt)) = Len(seq)+1 - /\ \A j \in 1 .. Len(seq)+1 : InsertAt(seq,i,elt)[j] = - IF j> THEN 0 ELSE Len(seq)-1 - /\ \A i \in 1 .. Len(seq)-1 : Front(seq)[i] = seq[i] - -THEOREM FrontOfEmpty == Front(<< >>) = << >> - -THEOREM LastProperties == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE /\ Last(seq) \in S - /\ Append(Front(seq), Last(seq)) = seq - -THEOREM FrontLastOfSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE /\ Front(SubSeq(seq,m,n)) = SubSeq(seq, m, n-1) - /\ Last(SubSeq(seq,m,n)) = seq[n] - -THEOREM FrontLastAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S - PROVE /\ Front(Append(seq, e)) = seq - /\ Last(Append(seq, e)) = e - -THEOREM AppendInjective == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) - PROVE Append(s,e) = Append(t,f) <=> s = t /\ e = f - -(***************************************************************************) -(* As a corollary of the previous theorems it follows that a sequence is *) -(* either empty or can be obtained by appending an element to a sequence. *) -(***************************************************************************) -THEOREM SequenceEmptyOrAppend == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE \E s \in Seq(S), elt \in S : seq = Append(s, elt) - -(***************************************************************************) -(* REVERSE SEQUENCE And Properties *) -(* Reverse(seq) --> Reverses the sequence seq *) -(***************************************************************************) - -Reverse(seq) == [j \in 1 .. Len(seq) |-> seq[Len(seq)-j+1] ] - -THEOREM ReverseProperties == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ Reverse(seq) \in Seq(S) - /\ Len(Reverse(seq)) = Len(seq) - /\ Reverse(Reverse(seq)) = seq - -THEOREM ReverseEmpty == Reverse(<< >>) = << >> - -THEOREM ReverseEqual == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), Reverse(s) = Reverse(t) - PROVE s = t - -THEOREM ReverseEmptyIffEmpty == - ASSUME NEW S, NEW seq \in Seq(S), Reverse(seq) = <<>> - PROVE seq = <<>> - -THEOREM ReverseConcat == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) - PROVE Reverse(s1 \o s2) = Reverse(s2) \o Reverse(s1) - -THEOREM ReverseAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S - PROVE Reverse(Append(seq,e)) = Cons(e, Reverse(seq)) - -THEOREM ReverseCons == - ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S - PROVE Reverse(Cons(e,seq)) = Append(Reverse(seq), e) - -THEOREM ReverseSingleton == \A x : Reverse(<< x >>) = << x >> - -THEOREM ReverseSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1..Len(seq), NEW n \in 1..Len(seq) - PROVE Reverse(SubSeq(seq, m , n)) = SubSeq(Reverse(seq), Len(seq)-n+1, Len(seq)-m+1) - -THEOREM ReversePalindrome == - ASSUME NEW S, NEW seq \in Seq(S), - Reverse(seq) = seq - PROVE Reverse(seq \o seq) = seq \o seq - -THEOREM LastEqualsHeadReverse == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE Last(seq) = Head(Reverse(seq)) - -THEOREM ReverseFrontEqualsTailReverse == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE Reverse(Front(seq)) = Tail(Reverse(seq)) - -(***************************************************************************) -(* Induction principles for sequences *) -(***************************************************************************) - -THEOREM SequencesInductionAppend == - ASSUME NEW P(_), NEW S, - P(<< >>), - \A s \in Seq(S), e \in S : P(s) => P(Append(s,e)) - PROVE \A seq \in Seq(S) : P(seq) - -THEOREM SequencesInductionCons == - ASSUME NEW P(_), NEW S, - P(<< >>), - \A s \in Seq(S), e \in S : P(s) => P(Cons(e,s)) - PROVE \A seq \in Seq(S) : P(seq) - -(***************************************************************************) -(* RANGE OF SEQUENCE *) -(***************************************************************************) - -THEOREM RangeOfSeq == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE Range(seq) \in SUBSET S - -THEOREM RangeEquality == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE Range(seq) = { seq[i] : i \in 1 .. Len(seq) } - -(* The range of the reverse sequence equals that of the original one. *) -THEOREM RangeReverse == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE Range(Reverse(seq)) = Range(seq) - -(* Range of concatenation of sequences is the union of the ranges *) -THEOREM RangeConcatenation == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) - PROVE Range(s1 \o s2) = Range(s1) \cup Range(s2) - -(***************************************************************************) -(* Prefixes and suffixes of sequences. *) -(***************************************************************************) - -IsPrefix(s,t) == \E u \in Seq(Range(t)) : t = s \o u -IsStrictPrefix(s,t) == IsPrefix(s,t) /\ s # t - -IsSuffix(s,t) == \E u \in Seq(Range(t)) : t = u \o s -IsStrictSuffix(s,t) == IsSuffix(s,t) /\ s # t - -(***************************************************************************) -(* The following theorem gives three alternative characterizations of *) -(* prefixes. It also implies that any prefix of a sequence t is at most *) -(* as long as t. *) -(***************************************************************************) -THEOREM IsPrefixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsPrefix(s,t) <=> \E u \in Seq(S) : t = s \o u - /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) - /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = Restrict(t, DOMAIN s) - -THEOREM IsStrictPrefixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsStrictPrefix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = s \o u - /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, 1, Len(s)) - /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = Restrict(t, DOMAIN s) - /\ IsStrictPrefix(s,t) <=> IsPrefix(s,t) /\ Len(s) < Len(t) - -THEOREM IsPrefixElts == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), - IsPrefix(s,t) - PROVE s[i] = t[i] - -THEOREM EmptyIsPrefix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsPrefix(<<>>, s) - /\ IsPrefix(s, <<>>) <=> s = <<>> - /\ IsStrictPrefix(<<>>, s) <=> s # <<>> - /\ ~ IsStrictPrefix(s, <<>>) - -THEOREM IsPrefixConcat == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsPrefix(s, s \o t) - -THEOREM IsPrefixAppend == - ASSUME NEW S, NEW s \in Seq(S), NEW e \in S - PROVE IsPrefix(s, Append(s,e)) - -THEOREM FrontIsPrefix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsPrefix(Front(s), s) - /\ s # <<>> => IsStrictPrefix(Front(s), s) - -(***************************************************************************) -(* (Strict) prefixes on sequences form a (strict) partial order, and *) -(* the strict ordering is well-founded. *) -(***************************************************************************) -THEOREM IsPrefixPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : IsPrefix(s,s) - /\ \A s,t \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,s) => s = t - /\ \A s,t,u \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,u) => IsPrefix(s,u) - -THEOREM ConcatIsPrefix == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - IsPrefix(s \o t, u) - PROVE IsPrefix(s, u) - -THEOREM ConcatIsPrefixCancel == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) - PROVE IsPrefix(s \o t, s \o u) <=> IsPrefix(t, u) - -THEOREM ConsIsPrefixCancel == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsPrefix(Cons(e,s), Cons(e,t)) <=> IsPrefix(s,t) - -THEOREM ConsIsPrefix == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), - IsPrefix(Cons(e,s), u) - PROVE /\ e = Head(u) - /\ IsPrefix(s, Tail(u)) - -THEOREM IsStrictPrefixStrictPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : ~ IsStrictPrefix(s,s) - /\ \A s,t \in Seq(S) : IsStrictPrefix(s,t) => ~ IsStrictPrefix(t,s) - /\ \A s,t,u \in Seq(S) : IsStrictPrefix(s,t) /\ IsStrictPrefix(t,u) => IsStrictPrefix(s,u) - -THEOREM IsStrictPrefixWellFounded == - ASSUME NEW S - PROVE IsWellFoundedOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) - -THEOREM SeqStrictPrefixInduction == - ASSUME NEW P(_), NEW S, - \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictPrefix(s,t) => P(s)) => P(t) - PROVE \A s \in Seq(S) : P(s) - -(***************************************************************************) -(* Similar theorems about suffixes. *) -(***************************************************************************) - -THEOREM IsSuffixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsSuffix(s,t) <=> \E u \in Seq(S) : t = u \o s - /\ IsSuffix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - /\ IsSuffix(s,t) <=> IsPrefix(Reverse(s), Reverse(t)) - -THEOREM IsStrictSuffixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsStrictSuffix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = u \o s - /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ IsSuffix(s,t) - /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - /\ IsStrictSuffix(s,t) <=> IsStrictPrefix(Reverse(s), Reverse(t)) - -THEOREM IsSuffixElts == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), - IsSuffix(s,t) - PROVE s[i] = t[Len(t) - Len(s) + i] - -THEOREM EmptyIsSuffix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsSuffix(<<>>, s) - /\ IsSuffix(s, <<>>) <=> s = <<>> - /\ IsStrictSuffix(<<>>, s) <=> s # <<>> - /\ ~ IsStrictSuffix(s, <<>>) - -THEOREM IsSuffixConcat == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsSuffix(s, t \o s) - -THEOREM IsStrictSuffixCons == - ASSUME NEW S, NEW s \in Seq(S), NEW e \in S - PROVE IsStrictSuffix(s, Cons(e,s)) - -THEOREM TailIsSuffix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsSuffix(Tail(s), s) - /\ s # <<>> => IsStrictSuffix(Tail(s), s) - -THEOREM IsSuffixPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : IsSuffix(s,s) - /\ \A s,t \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,s) => s = t - /\ \A s,t,u \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,u) => IsSuffix(s,u) - -THEOREM ConcatIsSuffix == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - IsSuffix(s \o t, u) - PROVE IsSuffix(t, u) - -THEOREM ConcatIsSuffixCancel == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) - PROVE IsSuffix(s \o t, u \o t) <=> IsSuffix(s, u) - -THEOREM AppendIsSuffixCancel == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsSuffix(Append(s,e), Append(t,e)) <=> IsSuffix(s,t) - -THEOREM AppendIsSuffix == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), - IsSuffix(Append(s,e), u) - PROVE /\ e = Last(u) - /\ IsSuffix(s, Front(u)) - -THEOREM IsStrictSuffixStrictPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : ~ IsStrictSuffix(s,s) - /\ \A s,t \in Seq(S) : IsStrictSuffix(s,t) => ~ IsStrictSuffix(t,s) - /\ \A s,t,u \in Seq(S) : IsStrictSuffix(s,t) /\ IsStrictSuffix(t,u) => IsStrictSuffix(s,u) - -THEOREM IsStrictSuffixWellFounded == - ASSUME NEW S - PROVE IsWellFoundedOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) - -THEOREM SeqStrictSuffixInduction == - ASSUME NEW P(_), NEW S, - \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictSuffix(s,t) => P(s)) => P(t) - PROVE \A s \in Seq(S) : P(s) - -(***************************************************************************) -(* Since the (strict) prefix and suffix orderings on sequences are *) -(* well-founded, they can be used for defining recursive functions. *) -(* The operators OpDefinesFcn, WFInductiveDefines, and WFInductiveUnique *) -(* are defined in module WellFoundedInduction. *) -(***************************************************************************) - -StrictPrefixesDetermineDef(S, Def(_,_)) == - \A g,h : \A seq \in Seq(S) : - (\A pre \in Seq(S) : IsStrictPrefix(pre,seq) => g[pre] = h[pre]) - => Def(g, seq) = Def(h, seq) - -LEMMA StrictPrefixesDetermineDef_WFDefOn == - ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) - PROVE WFDefOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S), Def) - -THEOREM PrefixRecursiveSequenceFunctionUnique == - ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) - PROVE WFInductiveUnique(Seq(S), Def) - -THEOREM PrefixRecursiveSequenceFunctionDef == - ASSUME NEW S, NEW Def(_,_), NEW f, - StrictPrefixesDetermineDef(S, Def), - OpDefinesFcn(f, Seq(S), Def) - PROVE WFInductiveDefines(f, Seq(S), Def) - -THEOREM PrefixRecursiveSequenceFunctionType == - ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, - T # {}, - StrictPrefixesDetermineDef(S, Def), - WFInductiveDefines(f, Seq(S), Def), - \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T - PROVE f \in [Seq(S) -> T] - -StrictSuffixesDetermineDef(S, Def(_,_)) == - \A g,h : \A seq \in Seq(S) : - (\A suf \in Seq(S) : IsStrictSuffix(suf,seq) => g[suf] = h[suf]) - => Def(g, seq) = Def(h, seq) - -LEMMA StrictSuffixesDetermineDef_WFDefOn == - ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) - PROVE WFDefOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S), Def) - -THEOREM SuffixRecursiveSequenceFunctionUnique == - ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) - PROVE WFInductiveUnique(Seq(S), Def) - -THEOREM SuffixRecursiveSequenceFunctionDef == - ASSUME NEW S, NEW Def(_,_), NEW f, - StrictSuffixesDetermineDef(S, Def), - OpDefinesFcn(f, Seq(S), Def) - PROVE WFInductiveDefines(f, Seq(S), Def) - -THEOREM SuffixRecursiveSequenceFunctionType == - ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, - T # {}, - StrictSuffixesDetermineDef(S, Def), - WFInductiveDefines(f, Seq(S), Def), - \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T - PROVE f \in [Seq(S) -> T] - -(***************************************************************************) -(* The following theorems justify ``primitive recursive'' functions over *) -(* sequences, with a base case for the empty sequence and recursion along *) -(* either the Tail or the Front of a non-empty sequence. *) -(***************************************************************************) - -TailInductiveDefHypothesis(f, S, f0, Def(_,_)) == - f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Tail(s)], s)] - -TailInductiveDefConclusion(f, S, f0, Def(_,_)) == - f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Tail(s)], s)] - -THEOREM TailInductiveDef == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, - TailInductiveDefHypothesis(f, S, f0, Def) - PROVE TailInductiveDefConclusion(f, S, f0, Def) - -THEOREM TailInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, - TailInductiveDefConclusion(f, S, f0, Def), - f0 \in T, - \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T - PROVE f \in [Seq(S) -> T] - -FrontInductiveDefHypothesis(f, S, f0, Def(_,_)) == - f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Front(s)], s)] - -FrontInductiveDefConclusion(f, S, f0, Def(_,_)) == - f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Front(s)], s)] - -THEOREM FrontInductiveDef == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, - FrontInductiveDefHypothesis(f, S, f0, Def) - PROVE FrontInductiveDefConclusion(f, S, f0, Def) - -THEOREM FrontInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, - FrontInductiveDefConclusion(f, S, f0, Def), - f0 \in T, - \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T - PROVE f \in [Seq(S) -> T] - -============================================================================= diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems_proofs.tla deleted file mode 100644 index f639a4c6d1..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/SequenceTheorems_proofs.tla +++ /dev/null @@ -1,1446 +0,0 @@ ------------------------ MODULE SequenceTheorems_proofs ---------------------- -(***************************************************************************) -(* This module contains the proofs for theorems about sequences and the *) -(* corresponding operations. *) -(***************************************************************************) -EXTENDS Sequences, Integers, WellFoundedInduction, Functions, TLAPS - - -(***************************************************************************) -(* Elementary properties about Seq(S) *) -(***************************************************************************) - -LEMMA SeqDef == \A S : Seq(S) = UNION {[1..n -> S] : n \in Nat} -OBVIOUS - -THEOREM ElementOfSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW n \in 1..Len(seq) - PROVE seq[n] \in S -OBVIOUS - -THEOREM EmptySeq == - ASSUME NEW S - PROVE /\ << >> \in Seq(S) - /\ \A seq \in Seq(S) : (seq = << >>) <=> (Len(seq) = 0) -OBVIOUS - -THEOREM LenProperties == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ Len(seq) \in Nat - /\ seq \in [1..Len(seq) -> S] - /\ DOMAIN seq = 1 .. Len(seq) -OBVIOUS - -THEOREM ExceptSeq == - ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq), NEW e \in S - PROVE /\ [seq EXCEPT ![i] = e] \in Seq(S) - /\ Len([seq EXCEPT ![i] = e]) = Len(seq) - /\ \A j \in 1 .. Len(seq) : [seq EXCEPT ![i] = e][j] = IF j=i THEN e ELSE seq[j] -<1>. DEFINE exc == [seq EXCEPT ![i] = e] -<1>1. \A j \in 1 .. Len(seq) : exc[j] = IF j=i THEN e ELSE seq[j] - BY DOMAIN exc = 1 .. Len(seq), Zenon -<1>. QED - BY <1>1 - -THEOREM IsASeq == - ASSUME NEW n \in Nat, NEW e(_), NEW S, - \A i \in 1..n : e(i) \in S - PROVE [i \in 1..n |-> e(i)] \in Seq(S) -OBVIOUS - -THEOREM SeqEqual == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), - Len(s) = Len(t), \A i \in 1 .. Len(s) : s[i] = t[i] - PROVE s = t -<1>1. /\ DOMAIN s = 1 .. Len(s) - /\ DOMAIN t = 1 .. Len(s) - /\ s = [i \in DOMAIN s |-> s[i]] - /\ t = [i \in DOMAIN t |-> t[i]] - OBVIOUS -<1>. QED - BY <1>1, Zenon - -(*************************************************************************** - Concatenation (\o) And Properties -***************************************************************************) - -THEOREM ConcatProperties == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) - PROVE /\ s1 \o s2 \in Seq(S) - /\ Len(s1 \o s2) = Len(s1) + Len(s2) - /\ \A i \in 1 .. Len(s1) + Len(s2) : (s1 \o s2)[i] = - IF i <= Len(s1) THEN s1[i] ELSE s2[i - Len(s1)] -OBVIOUS - -THEOREM ConcatEmptySeq == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ seq \o << >> = seq - /\ << >> \o seq = seq -OBVIOUS - -THEOREM ConcatAssociative == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S), NEW s3 \in Seq(S) - PROVE (s1 \o s2) \o s3 = s1 \o (s2 \o s3) -OBVIOUS - -THEOREM ConcatSimplifications == - ASSUME NEW S - PROVE /\ \A s,t \in Seq(S) : s \o t = s <=> t = <<>> - /\ \A s,t \in Seq(S) : s \o t = t <=> s = <<>> - /\ \A s,t \in Seq(S) : s \o t = <<>> <=> s = <<>> /\ t = <<>> - /\ \A s,t,u \in Seq(S) : s \o t = s \o u <=> t = u - /\ \A s,t,u \in Seq(S) : s \o u = t \o u <=> s = t -<1>1. /\ \A s,t \in Seq(S) : s \o t = s <=> t = <<>> - /\ \A s,t \in Seq(S) : s \o t = t <=> s = <<>> - /\ \A s,t \in Seq(S) : s \o t = <<>> <=> s = <<>> /\ t = <<>> - OBVIOUS -<1>2. \A s,t,u \in Seq(S) : s \o t = s \o u <=> t = u - <2>. SUFFICES ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - s \o t = s \o u - PROVE t = u - BY Zenon - <2>1. Len(t) = Len(u) OBVIOUS - <2>2. \A i \in 1 .. Len(t) : t[i] = (s \o t)[i + Len(s)] OBVIOUS - <2>3. \A i \in 1 .. Len(u) : u[i] = (s \o u)[i + Len(s)] OBVIOUS - <2>. QED BY <2>1, <2>2, <2>3, SeqEqual -<1>3. \A s,t,u \in Seq(S) : s \o u = t \o u <=> s = t - <2>. SUFFICES ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - s \o u = t \o u - PROVE s = t - BY Zenon - <2>1. Len(s) = Len(t) OBVIOUS - <2>2. \A i \in 1 .. Len(s) : s[i] = (s \o u)[i] OBVIOUS - <2>3. \A i \in 1 .. Len(t) : t[i] = (t \o u)[i] OBVIOUS - <2>. QED BY <2>1, <2>2, <2>3, SeqEqual -<1>. QED BY <1>1, <1>2, <1>3, Zenon - -(***************************************************************************) -(* SubSeq, Head and Tail *) -(***************************************************************************) - -THEOREM SubSeqProperties == - ASSUME NEW S, - NEW s \in Seq(S), - NEW m \in 1 .. Len(s)+1, - NEW n \in m-1 .. Len(s) - PROVE /\ SubSeq(s,m,n) \in Seq(S) - /\ Len(SubSeq(s, m, n)) = n-m+1 - /\ \A i \in 1 .. n-m+1 : SubSeq(s,m,n)[i] = s[m+i-1] -<1>1. CASE n \in m .. Len(s) - BY <1>1 -<1>2. CASE n = m-1 - <2>. DEFINE sub == SubSeq(s,m,m-1) - <2>1. /\ sub = << >> - /\ n-m+1 = 0 - /\ \A i \in 1 .. n-m+1 : sub[i] \in S /\ SubSeq(s,m,n)[i] = s[m+i-1] - BY <1>2 - <2>2. Len(sub) = n-m+1 - BY <2>1, Zenon - <2>. QED - BY <1>2, <2>1, <2>2, Isa -<1>. QED - BY <1>1, <1>2 - -THEOREM SubSeqEmpty == - ASSUME NEW s, NEW m \in Int, NEW n \in Int, n < m - PROVE SubSeq(s,m,n) = << >> -OBVIOUS - -THEOREM HeadTailProperties == - ASSUME NEW S, - NEW seq \in Seq(S), seq # << >> - PROVE /\ Head(seq) \in S - /\ Tail(seq) \in Seq(S) - /\ Len(Tail(seq)) = Len(seq)-1 - /\ \A i \in 1 .. Len(Tail(seq)) : Tail(seq)[i] = seq[i+1] -OBVIOUS - - -THEOREM TailIsSubSeq == - ASSUME NEW S, - NEW seq \in Seq(S), seq # << >> - PROVE Tail(seq) = SubSeq(seq, 2, Len(seq)) -OBVIOUS - -THEOREM SubSeqRestrict == - ASSUME NEW S, NEW seq \in Seq(S), NEW n \in 0 .. Len(seq) - PROVE SubSeq(seq, 1, n) = Restrict(seq, 1 .. n) -BY DEF Restrict - -THEOREM HeadTailOfSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE /\ Head(SubSeq(seq,m,n)) = seq[m] - /\ Tail(SubSeq(seq,m,n)) = SubSeq(seq, m+1, n) -OBVIOUS - -THEOREM SubSeqRecursiveFirst == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE SubSeq(seq, m, n) = << seq[m] >> \o SubSeq(seq, m+1, n) -<1>. DEFINE lhs == SubSeq(seq, m, n) - rhs == << seq[m] >> \o SubSeq(seq, m+1, n) -<1>1. /\ lhs \in Seq(S) - /\ rhs \in Seq(S) - OBVIOUS -<1>2. Len(lhs) = Len(rhs) - <2>1. Len(lhs) = n-m+1 - BY SubSeqProperties - <2>2. /\ m+1 \in 1 .. Len(seq)+1 - /\ n \in (m+1)-1 .. Len(seq) - OBVIOUS - <2>3. Len(SubSeq(seq, m+1, n)) = n - (m+1) + 1 - BY <2>2, SubSeqProperties, Zenon - <2>. QED - BY <2>1, <2>3 -<1>3. ASSUME NEW i \in 1 .. Len(lhs) - PROVE lhs[i] = rhs[i] - OBVIOUS -<1>. QED - BY <1>1, <1>2, <1>3, SeqEqual - -THEOREM SubSeqRecursiveSecond == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE SubSeq(seq, m, n) = SubSeq(seq, m, n-1) \o << seq[n] >> -<1>. DEFINE lhs == SubSeq(seq, m, n) - mid == SubSeq(seq, m, n-1) - rhs == mid \o << seq[n] >> -<1>1. /\ lhs \in Seq(S) - /\ mid \in Seq(S) - /\ rhs \in Seq(S) - /\ <> \in Seq(S) - OBVIOUS -<1>2. Len(lhs) = n-m+1 - BY SubSeqProperties -<1>3. Len(mid) = (n-1) - m + 1 - BY m \in 1 .. Len(seq)+1, n-1 \in m-1 .. Len(seq), SubSeqProperties -<1>4. Len(lhs) = Len(rhs) - BY <1>2, <1>3 -<1>5. ASSUME NEW i \in 1 .. Len(lhs) - PROVE lhs[i] = rhs[i] - <2>1. lhs[i] = seq[m+i-1] - OBVIOUS - <2>2. rhs[i] = seq[m+i-1] - <3>1. i \in 1 .. (Len(mid) + Len(<>)) - BY <1>4, <1>5 - <3>2. CASE i \in 1 .. (Len(lhs)-1) - BY <3>2 - <3>3. CASE ~(i \in 1 .. (Len(lhs)-1)) - <4>1. i = Len(lhs) /\ ~(i <= Len(mid)) - BY <3>3, <1>2, <1>3 - <4>2. rhs[i] = <>[i - Len(mid)] - BY <1>1, <3>1, <4>1, ConcatProperties, Zenon - <4>3. /\ i - Len(mid) = 1 - /\ n = m+i-1 - BY <4>1, <1>2, <1>3 - <4>. QED - BY <4>2, <4>3, Isa - <3>. QED - BY <3>2, <3>3 - <2>. QED - BY <2>1, <2>2 -<1>. QED - BY <1>1, <1>4, <1>5, SeqEqual - -THEOREM SubSeqFull == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE SubSeq(seq, 1, Len(seq)) = seq -OBVIOUS - -(*****************************************************************************) -(* Adjacent subsequences can be concatenated to obtain a longer subsequence. *) -(*****************************************************************************) -THEOREM ConcatAdjacentSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq)+1, - NEW k \in m-1 .. Len(seq), - NEW n \in k .. Len(seq) - PROVE SubSeq(seq, m, k) \o SubSeq(seq, k+1, n) = SubSeq(seq, m, n) -<1>. DEFINE lhs == SubSeq(seq, m, k) \o SubSeq(seq, k+1, n) -<1>. /\ SubSeq(seq, m, k) \in Seq(S) - /\ SubSeq(seq, k+1, n) \in Seq(S) - /\ SubSeq(seq, m, n) \in Seq(S) - /\ lhs \in Seq(S) - OBVIOUS -<1>1. Len(SubSeq(seq, m, k)) = k-m+1 - BY SubSeqProperties -<1>2. Len(SubSeq(seq, k+1,n)) = n-k - BY k+1 \in 1 .. Len(seq)+1, n \in (k+1)-1 .. Len(seq), n-k = n-(k+1)+1, SubSeqProperties -<1>3. Len(SubSeq(seq, m, n)) = n-m+1 - BY n \in m-1 .. Len(seq), SubSeqProperties -<1>4. Len(lhs) = Len(SubSeq(seq, m, n)) - BY <1>1, <1>2, <1>3 -<1>5. ASSUME NEW i \in 1 .. Len(lhs) - PROVE lhs[i] = SubSeq(seq, m, n)[i] - <2>0. 1 .. Len(lhs) = (1 .. k-m+1) \cup (k-m+2 .. n-m+1) - BY <1>4, <1>3 - <2>1. CASE i \in 1 .. k-m+1 - <3>1. lhs[i] = SubSeq(seq, m, k)[i] - BY <2>1, <1>1, <1>2, ConcatProperties, i <= Len(SubSeq(seq, m, k)) - <3>2. SubSeq(seq, m, k)[i] = seq[m+i-1] BY <2>1, SubSeqProperties - <3>3. SubSeq(seq, m, n)[i] = seq[m+i-1] BY <2>1, SubSeqProperties - <3>. QED BY <3>1, <3>2, <3>3 - <2>2. CASE i \in k-m+2 .. n-m+1 - <3>1. /\ i \in 1 .. Len(SubSeq(seq,m,k)) + Len(SubSeq(seq,k+1,n)) - /\ ~(i <= Len(SubSeq(seq, m, k))) - BY <1>1, <1>2, <2>2 - <3>2. lhs[i] = SubSeq(seq, k+1, n)[i - Len(SubSeq(seq,m,k))] - BY <3>1, ConcatProperties - <3>3. i - Len(SubSeq(seq,m,k)) \in 1 .. n-k - BY <2>2, <1>1 - <3>4. SubSeq(seq, k+1, n)[i - Len(SubSeq(seq,m,k))] = seq[m+i-1] - BY <3>3, <1>1, SubSeqProperties - <3>5. SubSeq(seq, m, n)[i] = seq[m+i-1] - BY <1>4, <1>3, SubSeqProperties - <3>. QED BY <3>2, <3>4, <3>5 - <2>. QED BY <2>0, <2>1, <2>2 -<1>. QED BY <1>4, <1>5, SeqEqual - -(***************************************************************************) -(* Append, InsertAt, Cons & RemoveAt *) -(* Append(seq, elt) appends element elt at the end of sequence seq *) -(* Cons(elt, seq) prepends element elt at the beginning of sequence seq *) -(* InsertAt(seq, i, elt) inserts element elt in the position i and pushes *) -(* the *) -(* original element at i to i+1 and so on *) -(* RemoveAt(seq, i) removes the element at position i *) -(***************************************************************************) - -THEOREM AppendProperties == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S - PROVE /\ Append(seq, elt) \in Seq(S) - /\ Append(seq, elt) # << >> - /\ Len(Append(seq, elt)) = Len(seq)+1 - /\ \A i \in 1.. Len(seq) : Append(seq, elt)[i] = seq[i] - /\ Append(seq, elt)[Len(seq)+1] = elt -OBVIOUS - -THEOREM AppendIsConcat == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S - PROVE Append(seq, elt) = seq \o <> -OBVIOUS - -THEOREM HeadTailAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt - PROVE /\ Head(Append(seq, elt)) = IF seq = <<>> THEN elt ELSE Head(seq) - /\ Tail(Append(seq, elt)) = IF seq = <<>> THEN <<>> ELSE Append(Tail(seq), elt) -<1>1. CASE seq = <<>> - <2>1. Append(seq, elt) = <> BY <1>1 - <2>. QED BY <1>1, <2>1 -<1>2. CASE seq # <<>> - <2>1. Head(Append(seq, elt)) = Head(seq) BY <1>2 - <2>2. Tail(Append(seq, elt)) = Append(Tail(seq), elt) BY <1>2 - <2>. QED BY <2>1, <2>2, <1>2 -<1>. QED BY <1>1, <1>2 - -Cons(elt, seq) == <> \o seq - -THEOREM ConsProperties == - ASSUME NEW S, NEW seq \in Seq(S), NEW elt \in S - PROVE /\ Cons(elt, seq) \in Seq(S) - /\ Cons(elt, seq) # <<>> - /\ Len(Cons(elt, seq)) = Len(seq)+1 - /\ Head(Cons(elt, seq)) = elt - /\ Tail(Cons(elt, seq)) = seq - /\ Cons(elt, seq)[1] = elt - /\ \A i \in 1 .. Len(seq) : Cons(elt, seq)[i+1] = seq[i] -BY DEF Cons - -THEOREM ConsEmpty == - \A x : Cons(x, << >>) = << x >> -BY DEF Cons - -THEOREM ConsHeadTail == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE Cons(Head(seq), Tail(seq)) = seq -BY DEF Cons - -THEOREM ConsAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW x \in S, NEW y \in S - PROVE Cons(x, Append(seq, y)) = Append(Cons(x,seq), y) -BY AppendIsConcat DEF Cons - -THEOREM ConsInjective == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) - PROVE Cons(e,s) = Cons(f,t) <=> e = f /\ s = t -<1>1. SUFFICES ASSUME Cons(e,s) = Cons(f,t) PROVE e=f /\ s=t - OBVIOUS -<1>2. /\ Head(Cons(e,s)) = Head(Cons(f,t)) - /\ Tail(Cons(e,s)) = Tail(Cons(f,t)) - BY <1>1 -<1>. QED BY ONLY <1>2, ConsProperties, Isa - -InsertAt(seq,i,elt) == SubSeq(seq, 1, i-1) \o <> \o SubSeq(seq, i, Len(seq)) - -THEOREM InsertAtProperties == - ASSUME NEW S, NEW seq \in Seq(S), NEW i \in 1 .. Len(seq)+1, NEW elt \in S - PROVE /\ InsertAt(seq,i,elt) \in Seq(S) - /\ Len(InsertAt(seq,i,elt)) = Len(seq)+1 - /\ \A j \in 1 .. Len(seq)+1 : InsertAt(seq,i,elt)[j] = - IF j. DEFINE left == SubSeq(seq, 1, i-1) - mid == <> - right == SubSeq(seq, i, Len(seq)) -<1>1. /\ left \in Seq(S) - /\ mid \in Seq(S) - /\ right \in Seq(S) - /\ InsertAt(seq,i,elt) \in Seq(S) - BY DEF InsertAt -<1>l. Len(left) = (i-1) - 1 + 1 - BY 1 \in 1 .. (Len(seq)+1), i-1 \in (1-1) .. Len(seq), SubSeqProperties, Zenon -<1>r. Len(right) = Len(seq) - i + 1 - BY Len(seq) \in (i-1) .. Len(seq), SubSeqProperties, Zenon -<1>2. Len(InsertAt(seq,i,elt)) = Len(seq)+1 - BY <1>l, <1>r DEF InsertAt -<1>3. ASSUME NEW j \in 1 .. Len(seq)+1 - PROVE InsertAt(seq,i,elt)[j] = IF j1. CASE j \in 1 .. i-1 - BY <2>1 DEF InsertAt - <2>2. CASE j = i - <3>1. /\ j \in 1 .. Len(left) + Len(mid) - /\ ~(j <= Len(left)) - /\ <>[j - Len(left)] = elt - BY <2>2, <1>l - <3>2. (left \o mid)[j] = elt - BY <1>1, <3>1, ConcatProperties - <3>3. /\ j \in 1 .. (Len(left \o mid) + Len(right)) - /\ j <= Len(left \o mid) - /\ left \o mid \in Seq(S) - BY <2>2, <1>l, <1>r - <3>4. ((left \o mid) \o right)[j] = (left \o mid)[j] - BY <1>1, <3>3, ConcatProperties DEF InsertAt - <3>. QED - BY <3>4, <3>2, <2>2 DEF InsertAt - <2>3. CASE j \in i+1 .. Len(seq)+1 - <3>1. ~(j < i) /\ j # i - BY <2>3 - <3>2. /\ j \in 1 .. (Len(left \o mid) + Len(right)) - /\ ~(j <= Len(left \o mid)) - /\ left \o mid \in Seq(S) - BY <1>l, <1>r, <2>3 - <3>3. ((left \o mid) \o right)[j] = right[j - Len(left \o mid)] - BY <1>1, <3>2, ConcatProperties - <3>4. /\ Len(seq) \in i-1 .. Len(seq) - /\ j - Len(left \o mid) \in 1 .. (Len(seq) - i + 1) - BY <2>3, <1>l - <3>5. right[j - Len(left \o mid)] = seq[i + (j - Len(left \o mid)) - 1] - BY <3>4, SubSeqProperties - <3>6. right[j - Len(left \o mid)] = seq[j-1] - BY <3>5, <1>l - <3>. QED - BY <3>1, <3>3, <3>6 DEF InsertAt - <2>. QED - BY <2>1, <2>2, <2>3 -<1>. QED - BY <1>1, <1>2, <1>3 - -RemoveAt(seq, i) == SubSeq(seq, 1, i-1) \o SubSeq(seq, i+1, Len(seq)) - -THEOREM RemoveAtProperties == - ASSUME NEW S, NEW seq \in Seq(S), - NEW i \in 1..Len(seq) - PROVE /\ RemoveAt(seq,i) \in Seq(S) - /\ Len(RemoveAt(seq,i)) = Len(seq) - 1 - /\ \A j \in 1 .. Len(seq)-1 : RemoveAt(seq,i)[j] = IF j. DEFINE left == SubSeq(seq, 1, i-1) - right == SubSeq(seq, i+1, Len(seq)) -<1>1. Len(left) = i-1 - BY 1 \in 1 .. Len(seq)+1, i-1 \in (1-1) .. Len(seq), (i-1) - 1 + 1 = i-1, - SubSeqProperties, Zenon -<1>2. Len(right) = Len(seq) - i - BY i+1 \in 1 .. Len(seq)+1, Len(seq) \in (i+1)-1 .. Len(seq), Len(seq) - (i+1) + 1 = Len(seq)-i, - SubSeqProperties, Zenon -<1>3. /\ left \in Seq(S) - /\ right \in Seq(S) - /\ RemoveAt(seq,i) \in Seq(S) - BY DEF RemoveAt -<1>4. Len(RemoveAt(seq,i)) = Len(seq) - 1 - BY <1>1, <1>2 DEF RemoveAt -<1>5. ASSUME NEW j \in 1 .. Len(seq)-1 - PROVE RemoveAt(seq,i)[j] = IF j1. CASE j \in 1 .. i-1 - BY <2>1 DEF RemoveAt - <2>2. CASE j \in i .. Len(seq)-1 - <3>1. /\ j \in 1 .. Len(left) + Len(right) - /\ ~(j <= Len(left)) - BY <2>2, <1>1, <1>2 - <3>2. RemoveAt(seq,i)[j] = right[j - Len(left)] - BY <1>3, <3>1, ConcatProperties, Zenon DEF RemoveAt - <3>3. /\ i+1 \in 1 .. Len(seq)+1 - /\ Len(seq) \in (i+1)-1 .. Len(seq) - /\ j - (i-1) \in 1 .. Len(seq) - (i+1) + 1 - BY <2>2 - <3>4. right[j - (i-1)] = seq[(i+1) + (j - (i-1)) - 1] - BY <3>3, SubSeqProperties, Zenon - <3>. QED - BY <3>2, <3>4, <2>2, <1>1 - <2>. QED - BY <2>1, <2>2 -<1>. QED - BY <1>3, <1>4, <1>5 - -(***************************************************************************) -(* Front & Last *) -(* *) -(* Front(seq) sequence formed by removing the last element *) -(* Last(seq) last element of the sequence *) -(* *) -(* These operators are to Append what Head and Tail are to Cons. *) -(***************************************************************************) - -Front(seq) == SubSeq(seq, 1, Len(seq)-1) -Last(seq) == seq[Len(seq)] - -THEOREM FrontProperties == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ Front(seq) \in Seq(S) - /\ Len(Front(seq)) = IF seq = << >> THEN 0 ELSE Len(seq)-1 - /\ \A i \in 1 .. Len(seq)-1 : Front(seq)[i] = seq[i] -<1>1. CASE seq = << >> - <2>1. /\ Len(seq) = 0 - /\ Front(seq) = << >> - BY <1>1 DEF Front - <2>. QED BY <2>1 -<1>2. CASE seq # << >> - <2>1. /\ 1 \in 1 .. (Len(seq)+1) - /\ Len(seq)-1 \in (1-1) .. Len(seq) - BY <1>2 - <2>2. /\ SubSeq(seq, 1, Len(seq)-1) \in Seq(S) - /\ Len(SubSeq(seq, 1, Len(seq)-1)) = Len(seq)-1-1+1 - /\ \A i \in 1 .. Len(seq)-1-1+1 : SubSeq(seq,1,Len(seq)-1)[i] = seq[1+i-1] - BY <2>1, SubSeqProperties, Zenon - <2>. QED - BY <1>2, <2>2 DEF Front -<1>. QED BY <1>1, <1>2 - -THEOREM FrontOfEmpty == Front(<< >>) = << >> -BY SubSeqEmpty DEF Front - -THEOREM LastProperties == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE /\ Last(seq) \in S - /\ Append(Front(seq), Last(seq)) = seq -<1>1. Last(seq) \in S - BY DEF Last -<1>2. Append(Front(seq), Last(seq)) = seq - <2>1. /\ 1 \in 1 .. Len(seq) - /\ Len(seq) \in 1 .. Len(seq) - OBVIOUS - <2>2. Front(seq) \o << Last(seq) >> = SubSeq(seq, 1, Len(seq)) - BY <2>1, SubSeqRecursiveSecond, Zenon DEF Front, Last - <2>. QED BY <2>2 -<1>. QED BY <1>1, <1>2 - -THEOREM FrontLastOfSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1 .. Len(seq), NEW n \in m .. Len(seq) - PROVE /\ Front(SubSeq(seq,m,n)) = SubSeq(seq, m, n-1) - /\ Last(SubSeq(seq,m,n)) = seq[n] -BY DEF Front, Last - -THEOREM FrontLastAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S - PROVE /\ Front(Append(seq, e)) = seq - /\ Last(Append(seq, e)) = e -BY DEF Front, Last - -THEOREM AppendInjective == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW f \in S, NEW t \in Seq(S) - PROVE Append(s,e) = Append(t,f) <=> s = t /\ e = f -<1>1. SUFFICES ASSUME Append(s,e) = Append(t,f) PROVE s=t /\ e=f - OBVIOUS -<1>2. /\ Front(Append(s,e)) = Front(Append(t,f)) - /\ Last(Append(s,e)) = Last(Append(t,f)) - BY <1>1 -<1>. QED - BY ONLY <1>2, FrontLastAppend - -(***************************************************************************) -(* As a corollary of the previous theorems it follows that a sequence is *) -(* either empty or can be obtained by appending an element to a sequence. *) -(***************************************************************************) -THEOREM SequenceEmptyOrAppend == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE \E s \in Seq(S), elt \in S : seq = Append(s, elt) -BY FrontProperties, LastProperties - -(***************************************************************************) -(* REVERSE SEQUENCE And Properties *) -(* Reverse(seq) --> Reverses the sequence seq *) -(***************************************************************************) - -Reverse(seq) == [j \in 1 .. Len(seq) |-> seq[Len(seq)-j+1] ] - -THEOREM ReverseProperties == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE /\ Reverse(seq) \in Seq(S) - /\ Len(Reverse(seq)) = Len(seq) - /\ Reverse(Reverse(seq)) = seq -BY DEF Reverse - -THEOREM ReverseEmpty == Reverse(<< >>) = << >> -BY DEF Reverse - -THEOREM ReverseEqual == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), Reverse(s) = Reverse(t) - PROVE s = t -<1>1. Len(s) = Len(t) BY DEF Reverse -<1>2. ASSUME NEW i \in 1 .. Len(s) - PROVE s[i] = t[i] - <2>1. Reverse(s)[Len(s)-i+1] = Reverse(t)[Len(s)-i+1] OBVIOUS - <2>. QED BY <2>1 DEF Reverse -<1>. QED BY <1>1, <1>2, SeqEqual - -THEOREM ReverseEmptyIffEmpty == - ASSUME NEW S, NEW seq \in Seq(S), Reverse(seq) = <<>> - PROVE seq = <<>> -BY <<>> \in Seq(S), ReverseEmpty, ReverseEqual, Zenon - -THEOREM ReverseConcat == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) - PROVE Reverse(s1 \o s2) = Reverse(s2) \o Reverse(s1) -BY DEF Reverse - -THEOREM ReverseAppend == - ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S - PROVE Reverse(Append(seq,e)) = Cons(e, Reverse(seq)) -BY DEF Reverse, Cons - -THEOREM ReverseCons == - ASSUME NEW S, NEW seq \in Seq(S), NEW e \in S - PROVE Reverse(Cons(e,seq)) = Append(Reverse(seq), e) -BY DEF Reverse, Cons - -THEOREM ReverseSingleton == \A x : Reverse(<< x >>) = << x >> -BY DEF Reverse - -THEOREM ReverseSubSeq == - ASSUME NEW S, NEW seq \in Seq(S), - NEW m \in 1..Len(seq), NEW n \in 1..Len(seq) - PROVE Reverse(SubSeq(seq, m , n)) = SubSeq(Reverse(seq), Len(seq)-n+1, Len(seq)-m+1) -BY DEF Reverse - -THEOREM ReversePalindrome == - ASSUME NEW S, NEW seq \in Seq(S), - Reverse(seq) = seq - PROVE Reverse(seq \o seq) = seq \o seq -BY ReverseConcat, Zenon - -THEOREM LastEqualsHeadReverse == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE Last(seq) = Head(Reverse(seq)) -BY DEF Last, Reverse - -THEOREM ReverseFrontEqualsTailReverse == - ASSUME NEW S, NEW seq \in Seq(S), seq # << >> - PROVE Reverse(Front(seq)) = Tail(Reverse(seq)) -<1>. DEFINE lhs == Reverse(Front(seq)) - rhs == Tail(Reverse(seq)) -<1>1. /\ lhs \in Seq(S) - /\ rhs \in Seq(S) - /\ Len(lhs) = Len(seq) - 1 - /\ Len(rhs) = Len(seq) - 1 - BY FrontProperties, ReverseProperties -<1>3. ASSUME NEW i \in 1 .. Len(seq)-1 - PROVE lhs[i] = rhs[i] - <2>1. /\ Len(Front(seq)) = Len(seq)-1 - /\ i \in 1 .. Len(Front(seq)) - BY FrontProperties - <2>2. lhs[i] = Front(seq)[Len(seq)-i] - BY <2>1 DEF Reverse - <2>4. Front(seq)[Len(seq)-i] = seq[Len(seq)-i] - BY FrontProperties - <2>5. rhs[i] = seq[Len(seq)-i] - BY DEF Reverse - <2>. QED - BY <2>2, <2>4, <2>5 -<1>. QED - BY <1>1, <1>3, SeqEqual - - -(***************************************************************************) -(* Induction principles for sequences *) -(***************************************************************************) - -THEOREM SequencesInductionAppend == - ASSUME NEW P(_), NEW S, - P(<< >>), - \A s \in Seq(S), e \in S : P(s) => P(Append(s,e)) - PROVE \A seq \in Seq(S) : P(seq) -<1>. DEFINE Q(n) == \A seq \in Seq(S) : Len(seq) = n => P(seq) -<1>1. SUFFICES \A k \in Nat : Q(k) - OBVIOUS -<1>2. Q(0) - OBVIOUS -<1>3. ASSUME NEW n \in Nat, Q(n) - PROVE Q(n+1) - <2>1. ASSUME NEW s \in Seq(S), Len(s) = n+1 - PROVE P(s) - <3>1. /\ Front(s) \in Seq(S) - /\ Last(s) \in S - /\ Len(Front(s)) = n - /\ Append(Front(s), Last(s)) = s - BY <2>1, FrontProperties, LastProperties - <3>2. P(Front(s)) - BY <1>3, <3>1 - <3>3. QED - BY <3>1, <3>2, Zenon - <2>. QED - BY <2>1 -<1>4. QED - BY <1>2, <1>3, NatInduction, Isa - -THEOREM SequencesInductionCons == - ASSUME NEW P(_), NEW S, - P(<< >>), - \A s \in Seq(S), e \in S : P(s) => P(Cons(e,s)) - PROVE \A seq \in Seq(S) : P(seq) -<1>. DEFINE Q(n) == \A seq \in Seq(S) : Len(seq) = n => P(seq) -<1>1. SUFFICES \A k \in Nat : Q(k) - OBVIOUS -<1>2. Q(0) - OBVIOUS -<1>3. ASSUME NEW n \in Nat, Q(n) - PROVE Q(n+1) - <2>1. ASSUME NEW s \in Seq(S), Len(s) = n+1 - PROVE P(s) - <3>1. /\ Tail(s) \in Seq(S) - /\ Head(s) \in S - /\ Len(Tail(s)) = n - /\ Cons(Head(s), Tail(s)) = s - BY <2>1, ConsHeadTail - <3>2. P(Tail(s)) - BY <1>3, <3>1, Zenon - <3>3. QED - BY <3>1, <3>2, Zenon - <2>. QED - BY <2>1 -<1>4. QED - BY <1>2, <1>3, NatInduction, Isa - -(***************************************************************************) -(* RANGE OF SEQUENCE *) -(***************************************************************************) - -THEOREM RangeOfSeq == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE Range(seq) \in SUBSET S -BY DEF Range - -THEOREM RangeEquality == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE Range(seq) = { seq[i] : i \in 1 .. Len(seq) } -<1>1. DOMAIN seq = 1 .. Len(seq) - OBVIOUS -<1>2. QED - BY <1>1, Zenon DEF Range - -(* The range of the reverse sequence equals that of the original one. *) -THEOREM RangeReverse == - ASSUME NEW S, NEW seq \in Seq(S) - PROVE Range(Reverse(seq)) = Range(seq) -<1>1. Range(Reverse(seq)) \subseteq Range(seq) - BY RangeEquality DEF Reverse -<1>2. Range(seq) \subseteq Range(Reverse(seq)) - BY RangeEquality DEF Reverse -<1>3. QED - BY <1>1, <1>2, Zenon - -(* Range of concatenation of sequences is the union of the ranges *) -THEOREM RangeConcatenation == - ASSUME NEW S, NEW s1 \in Seq(S), NEW s2 \in Seq(S) - PROVE Range(s1 \o s2) = Range(s1) \cup Range(s2) -<1>1. Range(s1) \subseteq Range(s1 \o s2) - BY DEF Range -<1>2. Range(s2) \subseteq Range(s1 \o s2) - <2>1. SUFFICES ASSUME NEW i \in 1 .. Len(s2) - PROVE s2[i] \in Range(s1 \o s2) - BY RangeEquality - <2>2. /\ Len(s1)+i \in 1 .. Len(s1 \o s2) - /\ (s1 \o s2)[Len(s1)+i] = s2[i] - OBVIOUS - <2>. QED - BY <2>2, RangeEquality -<1>3. Range(s1 \o s2) \subseteq Range(s1) \cup Range(s2) - <2>1. SUFFICES ASSUME NEW i \in 1 .. Len(s1 \o s2) - PROVE (s1 \o s2)[i] \in Range(s1) \cup Range(s2) - BY LenProperties, ConcatProperties, Zenon DEF Range - <2>2. CASE i \in 1 .. Len(s1) - BY RangeEquality - <2>3. CASE i \in Len(s1)+1 .. Len(s1 \o s2) - BY RangeEquality - <2>. QED - BY <2>2, <2>3 -<1>. QED - BY <1>1, <1>2, <1>3, Zenon - -(***************************************************************************) -(* Prefixes and suffixes of sequences. *) -(***************************************************************************) - -IsPrefix(s,t) == \E u \in Seq(Range(t)) : t = s \o u -IsStrictPrefix(s,t) == IsPrefix(s,t) /\ s # t - -IsSuffix(s,t) == \E u \in Seq(Range(t)) : t = u \o s -IsStrictSuffix(s,t) == IsSuffix(s,t) /\ s # t - -(***************************************************************************) -(* The following theorem gives three alternative characterizations of *) -(* prefixes. It also implies that any prefix of a sequence t is at most *) -(* as long as t. *) -(***************************************************************************) -THEOREM IsPrefixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsPrefix(s,t) <=> \E u \in Seq(S) : t = s \o u - /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) - /\ IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = Restrict(t, DOMAIN s) -<1>1. ASSUME IsPrefix(s,t) PROVE Len(s) <= Len(t) - BY <1>1 DEF IsPrefix -<1>2. IsPrefix(s,t) <=> \E u \in Seq(S) : t = s \o u - <2>1. ASSUME NEW u \in Seq(Range(t)), t = s \o u - PROVE u \in Seq(S) - BY DEF Range - <2>2. ASSUME NEW u \in Seq(S), t = s \o u - PROVE u \in Seq(Range(t)) - <3>1. \A i \in 1 .. Len(u) : u[i] \in Range(u) - BY DOMAIN u = 1 .. Len(u) DEF Range - <3>2. \A i \in 1 .. Len(u) : u[i] \in Range(t) - BY <2>2, <3>1, RangeConcatenation - <3>. QED BY <3>2 - <2>. QED BY <2>1, <2>2 DEF IsPrefix -<1>3. IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) - <2>1. ASSUME IsPrefix(s,t) - PROVE Len(s) <= Len(t) /\ s = SubSeq(t, 1, Len(s)) - <3>1. Len(s) <= Len(t) BY <2>1, <1>1 - <3>2. /\ 1 \in 1 .. Len(t)+1 - /\ Len(s) \in 1-1 .. Len(t) - /\ Len(s) = Len(s) - 1 + 1 - BY <3>1 - <3>3. Len(s) = Len(SubSeq(t, 1, Len(s))) - BY <3>2, SubSeqProperties, Zenon - <3>4. ASSUME NEW i \in 1 .. Len(s) - PROVE s[i] = SubSeq(t, 1, Len(s))[i] - BY <3>2, <2>1, SubSeqProperties DEF IsPrefix - <3>. QED BY <3>1, <3>3, <3>4, SeqEqual - <2>2. ASSUME Len(s) <= Len(t), s = SubSeq(t, 1, Len(s)) - PROVE IsPrefix(s,t) - <3>1. /\ 1 \in 1 .. Len(t)+1 - /\ Len(s) \in 1-1 .. Len(t) - /\ Len(t) \in Len(s) .. Len(t) - BY <2>2 - <3>2. t = s \o SubSeq(t, Len(s)+1, Len(t)) - BY <2>2, <3>1, ConcatAdjacentSubSeq, SubSeqFull, Zenon - <3>3. SubSeq(t, Len(s)+1, Len(t)) \in Seq(S) OBVIOUS - <3>. QED BY <3>2, <3>3, <1>2 - <2>. QED BY <2>1, <2>2 -<1>4. IsPrefix(s,t) <=> Len(s) <= Len(t) /\ s = Restrict(t, DOMAIN s) - <2>1. /\ DOMAIN s = 1 .. Len(s) - /\ Len(s) <= Len(t) <=> Len(s) \in 0 .. Len(t) - OBVIOUS - <2>. QED - BY <1>3, <2>1, SubSeqRestrict, Zenon -<1>. QED BY <1>2, <1>3, <1>4 - -THEOREM IsStrictPrefixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsStrictPrefix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = s \o u - /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, 1, Len(s)) - /\ IsStrictPrefix(s,t) <=> Len(s) < Len(t) /\ s = Restrict(t, DOMAIN s) - /\ IsStrictPrefix(s,t) <=> IsPrefix(s,t) /\ Len(s) < Len(t) -BY IsPrefixProperties DEF IsStrictPrefix - -THEOREM IsPrefixElts == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), - IsPrefix(s,t) - PROVE s[i] = t[i] -BY IsPrefixProperties - -THEOREM EmptyIsPrefix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsPrefix(<<>>, s) - /\ IsPrefix(s, <<>>) <=> s = <<>> - /\ IsStrictPrefix(<<>>, s) <=> s # <<>> - /\ ~ IsStrictPrefix(s, <<>>) -BY IsPrefixProperties, IsStrictPrefixProperties - -THEOREM IsPrefixConcat == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsPrefix(s, s \o t) -BY IsPrefixProperties, ConcatProperties, Zenon - -THEOREM IsPrefixAppend == - ASSUME NEW S, NEW s \in Seq(S), NEW e \in S - PROVE IsPrefix(s, Append(s,e)) -<1>1. /\ <> \in Seq(S) - /\ Append(s,e) = s \o <> - OBVIOUS -<1>. QED BY <1>1, IsPrefixConcat, Zenon - -THEOREM FrontIsPrefix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsPrefix(Front(s), s) - /\ s # <<>> => IsStrictPrefix(Front(s), s) -<1>1. CASE s = << >> - BY <1>1, FrontOfEmpty, EmptyIsPrefix -<1>2. CASE s # << >> - BY <1>2, IsPrefixProperties, FrontProperties DEF Front, IsStrictPrefix -<1>. QED BY <1>1, <1>2 - -(***************************************************************************) -(* (Strict) prefixes on sequences form a (strict) partial order, and *) -(* the strict ordering is well-founded. *) -(***************************************************************************) -THEOREM IsPrefixPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : IsPrefix(s,s) - /\ \A s,t \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,s) => s = t - /\ \A s,t,u \in Seq(S) : IsPrefix(s,t) /\ IsPrefix(t,u) => IsPrefix(s,u) -BY IsPrefixProperties - -THEOREM ConcatIsPrefix == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - IsPrefix(s \o t, u) - PROVE IsPrefix(s, u) -<1>1. /\ s \o t \in Seq(S) - /\ IsPrefix(s, s \o t) - BY IsPrefixConcat -<1>. QED BY <1>1, IsPrefixPartialOrder, Zenon - -THEOREM ConcatIsPrefixCancel == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) - PROVE IsPrefix(s \o t, s \o u) <=> IsPrefix(t, u) -<1>1. ASSUME IsPrefix(t,u) PROVE IsPrefix(s \o t, s \o u) - <2>1. PICK v \in Seq(S) : u = t \o v BY <1>1, IsPrefixProperties - <2>2. s \o u = (s \o t) \o v BY <2>1 - <2>. QED BY s \o t \in Seq(S), s \o u \in Seq(S), <2>2, IsPrefixProperties, Zenon -<1>2. ASSUME IsPrefix(s \o t, s \o u) PROVE IsPrefix(t,u) - <2>1. PICK v \in Seq(S) : s \o u = (s \o t) \o v - BY <1>2, s \o t \in Seq(S), s \o u \in Seq(S), IsPrefixProperties, Isa - <2>2. s \o u = s \o (t \o v) - BY <2>1 - <2>3. u = t \o v - BY t \o v \in Seq(S), <2>2, ConcatSimplifications, IsaM("blast") - <2>. QED BY <2>3, IsPrefixProperties, Zenon -<1>. QED BY <1>1, <1>2 - -THEOREM ConsIsPrefixCancel == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsPrefix(Cons(e,s), Cons(e,t)) <=> IsPrefix(s,t) -BY <> \in Seq(S), ConcatIsPrefixCancel, Zenon DEF Cons - -THEOREM ConsIsPrefix == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), - IsPrefix(Cons(e,s), u) - PROVE /\ e = Head(u) - /\ IsPrefix(s, Tail(u)) -<1>. <> \in Seq(S) - OBVIOUS -<1>1. IsPrefix(<>, u) - BY ConcatIsPrefix, Zenon DEF Cons -<1>2. PICK v \in Seq(S) : u = Cons(e, v) - BY <1>1, IsPrefixProperties, Isa DEF Cons -<1>3. /\ e = Head(u) - /\ v = Tail(u) - /\ IsPrefix(Cons(e,s), Cons(e, Tail(u))) - BY <1>2, ConsProperties, Isa -<1>. QED - BY <1>3, ConsIsPrefixCancel, Zenon - -THEOREM IsStrictPrefixStrictPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : ~ IsStrictPrefix(s,s) - /\ \A s,t \in Seq(S) : IsStrictPrefix(s,t) => ~ IsStrictPrefix(t,s) - /\ \A s,t,u \in Seq(S) : IsStrictPrefix(s,t) /\ IsStrictPrefix(t,u) => IsStrictPrefix(s,u) -BY IsStrictPrefixProperties - -THEOREM IsStrictPrefixWellFounded == - ASSUME NEW S - PROVE IsWellFoundedOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) -<1>1. IsWellFoundedOn(PreImage(Len, Seq(S), OpToRel(<, Nat)), Seq(S)) - BY NatLessThanWellFounded, PreImageWellFounded, \A s \in Seq(S) : Len(s) \in Nat, Blast -<1>2. OpToRel(IsStrictPrefix, Seq(S)) \subseteq PreImage(Len, Seq(S), OpToRel(<, Nat)) - BY IsStrictPrefixProperties DEF PreImage, OpToRel -<1>. QED - BY <1>1, <1>2, IsWellFoundedOnSubrelation, Zenon - -THEOREM SeqStrictPrefixInduction == - ASSUME NEW P(_), NEW S, - \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictPrefix(s,t) => P(s)) => P(t) - PROVE \A s \in Seq(S) : P(s) -<1>1. \A t \in Seq(S) : - (\A s \in SetLessThan(t, OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) : P(s)) - => P(t) - BY DEF SetLessThan, OpToRel -<1>. QED BY WFInduction, IsStrictPrefixWellFounded, <1>1, Blast - -(***************************************************************************) -(* Similar theorems about suffixes. *) -(***************************************************************************) - -THEOREM IsSuffixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsSuffix(s,t) <=> \E u \in Seq(S) : t = u \o s - /\ IsSuffix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - /\ IsSuffix(s,t) <=> IsPrefix(Reverse(s), Reverse(t)) -<1>1. IsSuffix(s,t) <=> \E u \in Seq(S) : t = u \o s - <2>1. ASSUME NEW u \in Seq(Range(t)), t = u \o s - PROVE u \in Seq(S) - BY DEF Range - <2>2. ASSUME NEW u \in Seq(S), t = u \o s - PROVE u \in Seq(Range(t)) - <3>1. \A i \in 1 .. Len(u) : u[i] \in Range(u) - BY DOMAIN u = 1 .. Len(u) DEF Range - <3>2. \A i \in 1 .. Len(u) : u[i] \in Range(t) - BY <2>2, <3>1, RangeConcatenation - <3>. QED BY <3>2 - <2>. QED BY <2>1, <2>2 DEF IsSuffix -<1>2. IsSuffix(s,t) <=> Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - <2>1. ASSUME IsSuffix(s,t) - PROVE Len(s) <= Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - <3>1. Len(s) <= Len(t) - BY <2>1 DEF IsSuffix - <3>2. /\ Len(t) - Len(s) + 1 \in 1 .. Len(t)+1 - /\ Len(t) \in (Len(t) - Len(s) + 1) - 1 .. Len(t) - /\ Len(t) - (Len(t) - Len(s) + 1) + 1 = Len(s) - BY <3>1 - <3>3. Len(s) = Len(SubSeq(t, Len(t)-Len(s)+1, Len(t))) - BY <3>2, SubSeqProperties, Zenon - <3>4. ASSUME NEW i \in 1 .. Len(s) - PROVE s[i] = SubSeq(t, Len(t)-Len(s)+1, Len(t))[i] - BY <3>2, <2>1, SubSeqProperties DEF IsSuffix - <3>. QED BY <3>1, <3>3, <3>4, SeqEqual - <2>2. ASSUME Len(s) <= Len(t), s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - PROVE IsSuffix(s,t) - <3>1. /\ 1 \in 1 .. Len(t)+1 - /\ Len(t)-Len(s) \in 1-1 .. Len(t) - /\ Len(t) \in Len(t)-Len(s) .. Len(t) - BY <2>2 - <3>2. t = SubSeq(t, 1, Len(t) - Len(s)) \o s - BY <2>2, <3>1, ConcatAdjacentSubSeq, SubSeqFull, Zenon - <3>3. SubSeq(t, 1, Len(t) - Len(s)) \in Seq(S) OBVIOUS - <3>. QED BY <3>2, <3>3, <1>1 - <2>. QED BY <2>1, <2>2 -<1>3. IsSuffix(s,t) <=> IsPrefix(Reverse(s), Reverse(t)) - <2>. /\ Reverse(s) \in Seq(S) - /\ Reverse(t) \in Seq(S) - BY ReverseProperties - <2>1. ASSUME IsSuffix(s,t) - PROVE IsPrefix(Reverse(s), Reverse(t)) - <3>1. PICK u \in Seq(S) : t = u \o s - BY <2>1, <1>1 - <3>2. /\ Reverse(u) \in Seq(S) - /\ Reverse(t) = Reverse(s) \o Reverse(u) - BY <3>1, ReverseProperties, ReverseConcat, Zenon - <3>. QED BY <3>2, IsPrefixProperties, Zenon - <2>2. ASSUME IsPrefix(Reverse(s), Reverse(t)) - PROVE IsSuffix(s,t) - <3>1. PICK u \in Seq(S) : Reverse(t) = Reverse(s) \o u - BY <2>2, IsPrefixProperties - <3>2. /\ Reverse(u) \in Seq(S) - /\ Reverse(Reverse(t)) = Reverse(u) \o Reverse(Reverse(s)) - BY <3>1, ReverseProperties, ReverseConcat, Zenon - <3>. QED BY <3>2, <1>1, ReverseProperties, Zenon - <2>. QED BY <2>1, <2>2 -<1>. QED BY <1>1, <1>2, <1>3 - -THEOREM IsStrictSuffixProperties == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE /\ IsStrictSuffix(s,t) <=> \E u \in Seq(S) : u # << >> /\ t = u \o s - /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ IsSuffix(s,t) - /\ IsStrictSuffix(s,t) <=> Len(s) < Len(t) /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - /\ IsStrictSuffix(s,t) <=> IsStrictPrefix(Reverse(s), Reverse(t)) -<1>1. ASSUME IsStrictSuffix(s,t) - PROVE /\ \E u \in Seq(S) : u # << >> /\ t = u \o s - /\ Len(s) < Len(t) - /\ IsSuffix(s,t) - /\ s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - /\ IsStrictPrefix(Reverse(s), Reverse(t)) - <2>1. IsSuffix(s,t) /\ s # t - BY <1>1 DEF IsStrictSuffix - <2>2. PICK u \in Seq(S) : t = u \o s - BY <2>1, IsSuffixProperties - <2>3. u # << >> - BY <2>2, <1>1 DEF IsStrictSuffix - <2>4. Len(s) < Len(t) - BY <2>2, <2>3 - <2>5. s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - BY <2>1, IsSuffixProperties - <2>6. IsStrictPrefix(Reverse(s), Reverse(t)) - BY <2>1, IsSuffixProperties, ReverseEqual DEF IsStrictPrefix - <2>. QED BY <2>1, <2>2, <2>3, <2>4, <2>5, <2>6 -<1>2. ASSUME NEW u \in Seq(S), u # << >>, t = u \o s - PROVE IsStrictSuffix(s,t) - <2>1. IsSuffix(s,t) BY <1>2, IsSuffixProperties, Zenon - <2>2. s # t BY <1>2 - <2>. QED BY <2>1, <2>2 DEF IsStrictSuffix -<1>3. ASSUME Len(s) < Len(t), IsSuffix(s,t) - PROVE IsStrictSuffix(s,t) - BY <1>3, IsSuffixProperties DEF IsStrictSuffix -<1>4. ASSUME Len(s) < Len(t), s = SubSeq(t, Len(t)-Len(s)+1, Len(t)) - PROVE IsStrictSuffix(s,t) - BY <1>4, IsSuffixProperties DEF IsStrictSuffix -<1>5. ASSUME IsStrictPrefix(Reverse(s), Reverse(t)) - PROVE IsStrictSuffix(s,t) - BY <1>5, IsSuffixProperties DEF IsStrictPrefix, IsStrictSuffix -<1>. QED BY <1>1, <1>2, <1>3, <1>4, <1>5 - -THEOREM IsSuffixElts == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW i \in 1 .. Len(s), - IsSuffix(s,t) - PROVE s[i] = t[Len(t) - Len(s) + i] -BY IsSuffixProperties - -THEOREM EmptyIsSuffix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsSuffix(<<>>, s) - /\ IsSuffix(s, <<>>) <=> s = <<>> - /\ IsStrictSuffix(<<>>, s) <=> s # <<>> - /\ ~ IsStrictSuffix(s, <<>>) -BY IsSuffixProperties, IsStrictSuffixProperties - -THEOREM IsSuffixConcat == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsSuffix(s, t \o s) -BY IsSuffixProperties, ConcatProperties, Zenon - -THEOREM IsStrictSuffixCons == - ASSUME NEW S, NEW s \in Seq(S), NEW e \in S - PROVE IsStrictSuffix(s, Cons(e,s)) -BY IsStrictSuffixProperties DEF Cons - -THEOREM TailIsSuffix == - ASSUME NEW S, NEW s \in Seq(S) - PROVE /\ IsSuffix(Tail(s), s) - /\ s # <<>> => IsStrictSuffix(Tail(s), s) -<1>1. CASE s = <<>> - BY <1>1, Tail(<<>>) = <<>>, EmptyIsSuffix -<1>2. CASE s # <<>> - <2>. Head(s) \in S /\ Tail(s) \in Seq(S) - BY <1>2 - <2>1. IsStrictSuffix(Tail(s), Cons(Head(s), Tail(s))) - BY IsStrictSuffixCons, Zenon - <2>. QED BY <1>2, <2>1, ConsHeadTail DEF IsStrictSuffix -<1>. QED BY <1>1, <1>2 - -THEOREM IsSuffixPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : IsSuffix(s,s) - /\ \A s,t \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,s) => s = t - /\ \A s,t,u \in Seq(S) : IsSuffix(s,t) /\ IsSuffix(t,u) => IsSuffix(s,u) -<1>1. ASSUME NEW s \in Seq(S) PROVE IsSuffix(s,s) - BY IsSuffixProperties -<1>2. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), IsSuffix(s,t), IsSuffix(t,s) - PROVE s = t - <2>1. PICK v \in Seq(S) : t = v \o s - BY <1>2, IsSuffixProperties - <2>2. PICK w \in Seq(S) : s = w \o t - BY <1>2, IsSuffixProperties - <2>3. /\ v \o w \in Seq(S) - /\ (v \o w) \o t = t - BY <2>1, <2>2 - <2>. QED BY <2>2, <2>3 -<1>3. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - IsSuffix(s,t), IsSuffix(t,u) - PROVE IsSuffix(s,u) - <2>1. PICK v \in Seq(S) : t = v \o s - BY <1>3, IsSuffixProperties - <2>2. PICK w \in Seq(S) : u = w \o t - BY <1>3, IsSuffixProperties - <2>3. /\ w \o v \in Seq(S) - /\ u = (w \o v) \o s - BY <2>1, <2>2 - <2>. QED BY <2>3, IsSuffixProperties, Zenon -<1>. QED BY <1>1, <1>2, <1>3 - -THEOREM ConcatIsSuffix == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - IsSuffix(s \o t, u) - PROVE IsSuffix(t, u) -<1>1. /\ s \o t \in Seq(S) - /\ IsSuffix(t, s \o t) - BY IsSuffixConcat -<1>. QED BY <1>1, IsSuffixPartialOrder, Zenon - -THEOREM ConcatIsSuffixCancel == - ASSUME NEW S, NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S) - PROVE IsSuffix(s \o t, u \o t) <=> IsSuffix(s, u) -<1>1. ASSUME IsSuffix(s, u) PROVE IsSuffix(s \o t, u \o t) - <2>1. PICK v \in Seq(S) : u = v \o s BY <1>1, IsSuffixProperties - <2>2. u \o t = v \o (s \o t) BY <2>1 - <2>. QED BY s \o t \in Seq(S), u \o t \in Seq(S), <2>2, IsSuffixProperties, ZenonT(20) -<1>2. ASSUME IsSuffix(s \o t, u \o t) PROVE IsSuffix(s, u) - <2>1. PICK v \in Seq(S) : u \o t = v \o (s \o t) - BY <1>2, s \o t \in Seq(S), u \o t \in Seq(S), IsSuffixProperties, Isa - <2>2. u \o t = (v \o s) \o t - BY <2>1 - <2>3. u = v \o s - BY v \o s \in Seq(S), <2>2, ConcatSimplifications - <2>. QED BY <2>3, IsSuffixProperties, Zenon -<1>. QED BY <1>1, <1>2 - -THEOREM AppendIsSuffixCancel == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW t \in Seq(S) - PROVE IsSuffix(Append(s,e), Append(t,e)) <=> IsSuffix(s,t) -BY <> \in Seq(S), ConcatIsSuffixCancel, AppendIsConcat, Isa - -THEOREM AppendIsSuffix == - ASSUME NEW S, NEW e \in S, NEW s \in Seq(S), NEW u \in Seq(S), - IsSuffix(Append(s,e), u) - PROVE /\ e = Last(u) - /\ IsSuffix(s, Front(u)) -<1>. <> \in Seq(S) - OBVIOUS -<1>1. IsSuffix(<>, u) - BY ConcatIsSuffix, AppendIsConcat, Isa -<1>2. PICK v \in Seq(S) : u = Append(v,e) - BY <1>1, IsSuffixProperties, AppendIsConcat, Isa -<1>3. /\ e = Last(u) - /\ v = Front(u) - /\ IsSuffix(Append(s,e), Append(Front(u),e)) - BY <1>2, FrontLastAppend -<1>. QED - BY <1>3, AppendIsSuffixCancel, Zenon - -THEOREM IsStrictSuffixStrictPartialOrder == - ASSUME NEW S - PROVE /\ \A s \in Seq(S) : ~ IsStrictSuffix(s,s) - /\ \A s,t \in Seq(S) : IsStrictSuffix(s,t) => ~ IsStrictSuffix(t,s) - /\ \A s,t,u \in Seq(S) : IsStrictSuffix(s,t) /\ IsStrictSuffix(t,u) => IsStrictSuffix(s,u) -<1>1. ASSUME NEW s \in Seq(S) PROVE ~ IsStrictSuffix(s,s) - BY DEF IsStrictSuffix -<1>2. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), IsStrictSuffix(s,t) - PROVE ~ IsStrictSuffix(t,s) - BY <1>2, IsSuffixPartialOrder DEF IsStrictSuffix -<1>3. ASSUME NEW s \in Seq(S), NEW t \in Seq(S), NEW u \in Seq(S), - IsStrictSuffix(s,t), IsStrictSuffix(t,u) - PROVE IsStrictSuffix(s,u) - <2>1. /\ IsSuffix(s,t) /\ Len(s) < Len(t) - /\ IsSuffix(t,u) /\ Len(t) < Len(u) - BY <1>3, IsStrictSuffixProperties - <2>2. IsSuffix(s,u) - BY <2>1, IsSuffixPartialOrder, Zenon - <2>3. Len(s) < Len(u) - BY <2>1 - <2>. QED BY <2>2, <2>3, IsStrictSuffixProperties -<1>4. QED BY <1>1, <1>2, <1>3 - -THEOREM IsStrictSuffixWellFounded == - ASSUME NEW S - PROVE IsWellFoundedOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) -<1>1. IsWellFoundedOn(PreImage(Len, Seq(S), OpToRel(<, Nat)), Seq(S)) - BY NatLessThanWellFounded, PreImageWellFounded, \A s \in Seq(S) : Len(s) \in Nat, Blast -<1>2. OpToRel(IsStrictSuffix, Seq(S)) \subseteq PreImage(Len, Seq(S), OpToRel(<, Nat)) - BY IsStrictSuffixProperties DEF PreImage, OpToRel -<1>. QED - BY <1>1, <1>2, IsWellFoundedOnSubrelation, Zenon - -THEOREM SeqStrictSuffixInduction == - ASSUME NEW P(_), NEW S, - \A t \in Seq(S) : (\A s \in Seq(S) : IsStrictSuffix(s,t) => P(s)) => P(t) - PROVE \A s \in Seq(S) : P(s) -<1>1. \A t \in Seq(S) : - (\A s \in SetLessThan(t, OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) : P(s)) - => P(t) - BY DEF SetLessThan, OpToRel -<1>. QED BY WFInduction, IsStrictSuffixWellFounded, <1>1, Blast - -(***************************************************************************) -(* Since the (strict) prefix and suffix orderings on sequences are *) -(* well-founded, they can be used for defining recursive functions. *) -(* The operators OpDefinesFcn, WFInductiveDefines, and WFInductiveUnique *) -(* are defined in module WellFoundedInduction. *) -(***************************************************************************) - -StrictPrefixesDetermineDef(S, Def(_,_)) == - \A g,h : \A seq \in Seq(S) : - (\A pre \in Seq(S) : IsStrictPrefix(pre,seq) => g[pre] = h[pre]) - => Def(g, seq) = Def(h, seq) - -LEMMA StrictPrefixesDetermineDef_WFDefOn == - ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) - PROVE WFDefOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S), Def) -BY Isa DEF StrictPrefixesDetermineDef, WFDefOn, OpToRel, SetLessThan - -THEOREM PrefixRecursiveSequenceFunctionUnique == - ASSUME NEW S, NEW Def(_,_), StrictPrefixesDetermineDef(S, Def) - PROVE WFInductiveUnique(Seq(S), Def) -BY StrictPrefixesDetermineDef_WFDefOn, IsStrictPrefixWellFounded, WFDefOnUnique - -THEOREM PrefixRecursiveSequenceFunctionDef == - ASSUME NEW S, NEW Def(_,_), NEW f, - StrictPrefixesDetermineDef(S, Def), - OpDefinesFcn(f, Seq(S), Def) - PROVE WFInductiveDefines(f, Seq(S), Def) -BY StrictPrefixesDetermineDef_WFDefOn, IsStrictPrefixWellFounded, WFInductiveDef - -THEOREM PrefixRecursiveSequenceFunctionType == - ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, - T # {}, - StrictPrefixesDetermineDef(S, Def), - WFInductiveDefines(f, Seq(S), Def), - \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T - PROVE f \in [Seq(S) -> T] -<1>1. IsWellFoundedOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S)) - BY IsStrictPrefixWellFounded -<1>2. WFDefOn(OpToRel(IsStrictPrefix, Seq(S)), Seq(S), Def) - BY StrictPrefixesDetermineDef_WFDefOn -<1>. QED - BY <1>1, <1>2, WFInductiveDefType, Isa - -StrictSuffixesDetermineDef(S, Def(_,_)) == - \A g,h : \A seq \in Seq(S) : - (\A suf \in Seq(S) : IsStrictSuffix(suf,seq) => g[suf] = h[suf]) - => Def(g, seq) = Def(h, seq) - -LEMMA StrictSuffixesDetermineDef_WFDefOn == - ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) - PROVE WFDefOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S), Def) -BY Isa DEF StrictSuffixesDetermineDef, WFDefOn, OpToRel, SetLessThan - -THEOREM SuffixRecursiveSequenceFunctionUnique == - ASSUME NEW S, NEW Def(_,_), StrictSuffixesDetermineDef(S, Def) - PROVE WFInductiveUnique(Seq(S), Def) -BY StrictSuffixesDetermineDef_WFDefOn, IsStrictSuffixWellFounded, WFDefOnUnique - -THEOREM SuffixRecursiveSequenceFunctionDef == - ASSUME NEW S, NEW Def(_,_), NEW f, - StrictSuffixesDetermineDef(S, Def), - OpDefinesFcn(f, Seq(S), Def) - PROVE WFInductiveDefines(f, Seq(S), Def) -BY StrictSuffixesDetermineDef_WFDefOn, IsStrictSuffixWellFounded, WFInductiveDef - -THEOREM SuffixRecursiveSequenceFunctionType == - ASSUME NEW S, NEW T, NEW Def(_,_), NEW f, - T # {}, - StrictSuffixesDetermineDef(S, Def), - WFInductiveDefines(f, Seq(S), Def), - \A g \in [Seq(S) -> T], s \in Seq(S) : Def(g,s) \in T - PROVE f \in [Seq(S) -> T] -<1>1. IsWellFoundedOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S)) - BY IsStrictSuffixWellFounded -<1>2. WFDefOn(OpToRel(IsStrictSuffix, Seq(S)), Seq(S), Def) - BY StrictSuffixesDetermineDef_WFDefOn -<1>. QED - BY <1>1, <1>2, WFInductiveDefType, Isa - -(***************************************************************************) -(* The following theorems justify ``primitive recursive'' functions over *) -(* sequences, with a base case for the empty sequence and recursion along *) -(* either the Tail or the Front of a non-empty sequence. *) -(***************************************************************************) - -TailInductiveDefHypothesis(f, S, f0, Def(_,_)) == - f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Tail(s)], s)] - -TailInductiveDefConclusion(f, S, f0, Def(_,_)) == - f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Tail(s)], s)] - -THEOREM TailInductiveDef == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, - TailInductiveDefHypothesis(f, S, f0, Def) - PROVE TailInductiveDefConclusion(f, S, f0, Def) -<1>. DEFINE Op(h,s) == IF s = <<>> THEN f0 ELSE Def(h[Tail(s)], s) -<1>1. StrictSuffixesDetermineDef(S, Op) - <2>. SUFFICES ASSUME NEW g, NEW h, NEW seq \in Seq(S), - \A suf \in Seq(S) : IsStrictSuffix(suf, seq) => g[suf] = h[suf] - PROVE Op(g, seq) = Op(h, seq) - BY DEF StrictSuffixesDetermineDef, Zenon - <2>1. CASE seq = <<>> - BY <2>1 - <2>2. CASE seq # <<>> - <3>1. /\ Tail(seq) \in Seq(S) - /\ IsStrictSuffix(Tail(seq), seq) - BY <2>2, TailIsSuffix - <3>2. g[Tail(seq)] = h[Tail(seq)] - BY <3>1, Zenon - <3>. QED - BY <2>2, <3>2 - <2>. QED BY <2>1, <2>2 -<1>2. OpDefinesFcn(f, Seq(S), Op) - BY DEF OpDefinesFcn, TailInductiveDefHypothesis -<1>3. WFInductiveDefines(f, Seq(S), Op) - BY <1>1, <1>2, SuffixRecursiveSequenceFunctionDef -<1>. QED BY <1>3 DEF WFInductiveDefines, TailInductiveDefConclusion - -THEOREM TailInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, - TailInductiveDefConclusion(f, S, f0, Def), - f0 \in T, - \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T - PROVE f \in [Seq(S) -> T] -<1>. SUFFICES \A s \in Seq(S) : f[s] \in T - BY DEF TailInductiveDefConclusion -<1>1. f[<<>>] \in T - BY <<>> \in Seq(S) DEF TailInductiveDefConclusion -<1>2. ASSUME NEW seq \in Seq(S), NEW e \in S, f[seq] \in T - PROVE f[Cons(e, seq)] \in T - <2>1. /\ Cons(e, seq) \in Seq(S) - /\ Cons(e, seq) # <<>> - /\ Tail(Cons(e, seq)) = seq - BY ConsProperties - <2>2. f[Cons(e, seq)] = Def(f[seq], Cons(e,seq)) - BY <2>1 DEF TailInductiveDefConclusion - <2>. QED BY <1>2, <2>1, <2>2 -<1>. QED BY <1>1, <1>2, SequencesInductionCons, Isa - -FrontInductiveDefHypothesis(f, S, f0, Def(_,_)) == - f = CHOOSE g : g = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(g[Front(s)], s)] - -FrontInductiveDefConclusion(f, S, f0, Def(_,_)) == - f = [s \in Seq(S) |-> IF s = <<>> THEN f0 ELSE Def(f[Front(s)], s)] - -THEOREM FrontInductiveDef == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, - FrontInductiveDefHypothesis(f, S, f0, Def) - PROVE FrontInductiveDefConclusion(f, S, f0, Def) -<1>. DEFINE Op(h,s) == IF s = <<>> THEN f0 ELSE Def(h[Front(s)], s) -<1>1. StrictPrefixesDetermineDef(S, Op) - <2>. SUFFICES ASSUME NEW g, NEW h, NEW seq \in Seq(S), - \A pre \in Seq(S) : IsStrictPrefix(pre, seq) => g[pre] = h[pre] - PROVE Op(g, seq) = Op(h, seq) - BY DEF StrictPrefixesDetermineDef, Zenon - <2>1. CASE seq = <<>> - BY <2>1 - <2>2. CASE seq # <<>> - <3>1. /\ Front(seq) \in Seq(S) - /\ IsStrictPrefix(Front(seq), seq) - BY <2>2, FrontProperties, FrontIsPrefix - <3>2. g[Front(seq)] = h[Front(seq)] - BY <3>1, Zenon - <3>. QED - BY <2>2, <3>2 - <2>. QED BY <2>1, <2>2 -<1>2. OpDefinesFcn(f, Seq(S), Op) - BY DEF OpDefinesFcn, FrontInductiveDefHypothesis -<1>3. WFInductiveDefines(f, Seq(S), Op) - BY <1>1, <1>2, PrefixRecursiveSequenceFunctionDef -<1>. QED BY <1>3 DEF WFInductiveDefines, FrontInductiveDefConclusion - -THEOREM FrontInductiveDefType == - ASSUME NEW S, NEW Def(_,_), NEW f, NEW f0, NEW T, - FrontInductiveDefConclusion(f, S, f0, Def), - f0 \in T, - \A v \in T, s \in Seq(S) : s # <<>> => Def(v,s) \in T - PROVE f \in [Seq(S) -> T] -<1>. SUFFICES \A s \in Seq(S) : f[s] \in T - BY DEF FrontInductiveDefConclusion -<1>1. f[<<>>] \in T - BY <<>> \in Seq(S) DEF FrontInductiveDefConclusion -<1>2. ASSUME NEW seq \in Seq(S), NEW e \in S, f[seq] \in T - PROVE f[Append(seq, e)] \in T - <2>1. /\ Append(seq, e) \in Seq(S) - /\ Append(seq, e) # <<>> - /\ Front(Append(seq, e)) = seq - BY AppendProperties, FrontLastAppend - <2>2. f[Append(seq, e)] = Def(f[seq], Append(seq, e)) - BY <2>1 DEF FrontInductiveDefConclusion - <2>. QED BY <1>2, <2>1, <2>2 -<1>. QED BY <1>1, <1>2, SequencesInductionAppend, Isa - -============================================================================= diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/TLAPS.tla b/x/ccv/provider/keyguard/prototyping/tla/library/TLAPS.tla deleted file mode 100644 index 3abf4b1b2e..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/TLAPS.tla +++ /dev/null @@ -1,411 +0,0 @@ -------------------------------- MODULE TLAPS -------------------------------- - -(* Backend pragmas. *) - - -(***************************************************************************) -(* Each of these pragmas can be cited with a BY or a USE. The pragma that *) -(* is added to the context of an obligation most recently is the one whose *) -(* effects are triggered. *) -(***************************************************************************) - -(***************************************************************************) -(* The following pragmas should be used only as a last resource. They are *) -(* dependent upon the particular backend provers, and are unlikely to have *) -(* any effect if the set of backend provers changes. Moreover, they are *) -(* meaningless to a reader of the proof. *) -(***************************************************************************) - - -(**************************************************************************) -(* Backend pragma: use the SMT solver for arithmetic. *) -(* *) -(* This method exists under this name for historical reasons. *) -(**************************************************************************) - -SimpleArithmetic == TRUE (*{ by (prover:"smt3") }*) - - -(**************************************************************************) -(* Backend pragma: SMT solver *) -(* *) -(* This method translates the proof obligation to SMTLIB2. The supported *) -(* fragment includes first-order logic, set theory, functions and *) -(* records. *) -(* SMT calls the smt-solver with the default timeout of 5 seconds *) -(* while SMTT(n) calls the smt-solver with a timeout of n seconds. *) -(**************************************************************************) - -SMT == TRUE (*{ by (prover:"smt3") }*) -SMTT(X) == TRUE (*{ by (prover:"smt3"; timeout:@) }*) - - -(**************************************************************************) -(* Backend pragma: CVC3 SMT solver *) -(* *) -(* CVC3 is used by default but you can also explicitly call it. *) -(**************************************************************************) - -CVC3 == TRUE (*{ by (prover: "cvc33") }*) -CVC3T(X) == TRUE (*{ by (prover:"cvc33"; timeout:@) }*) - -(**************************************************************************) -(* Backend pragma: Yices SMT solver *) -(* *) -(* This method translates the proof obligation to Yices native language. *) -(**************************************************************************) - -Yices == TRUE (*{ by (prover: "yices3") }*) -YicesT(X) == TRUE (*{ by (prover:"yices3"; timeout:@) }*) - -(**************************************************************************) -(* Backend pragma: veriT SMT solver *) -(* *) -(* This method translates the proof obligation to SMTLIB2 and calls veriT.*) -(**************************************************************************) - -veriT == TRUE (*{ by (prover: "verit") }*) -veriTT(X) == TRUE (*{ by (prover:"verit"; timeout:@) }*) - -(**************************************************************************) -(* Backend pragma: Z3 SMT solver *) -(* *) -(* This method translates the proof obligation to SMTLIB2 and calls Z3. *) -(**************************************************************************) - -Z3 == TRUE (*{ by (prover: "z33") }*) -Z3T(X) == TRUE (*{ by (prover:"z33"; timeout:@) }*) - -(**************************************************************************) -(* Backend pragma: SPASS superposition prover *) -(* *) -(* This method translates the proof obligation to the DFG format language *) -(* supported by the ATP SPASS. The translation is based on the SMT one. *) -(**************************************************************************) - -Spass == TRUE (*{ by (prover: "spass") }*) -SpassT(X) == TRUE (*{ by (prover:"spass"; timeout:@) }*) - -(**************************************************************************) -(* Backend pragma: The PTL propositional linear time temporal logic *) -(* prover. It currently is the LS4 backend. *) -(* *) -(* This method translates the negetation of the proof obligation to *) -(* Seperated Normal Form (TRP++ format) and checks for unsatisfiability *) -(**************************************************************************) - -LS4 == TRUE (*{ by (prover: "ls4") }*) -PTL == TRUE (*{ by (prover: "ls4") }*) - -(**************************************************************************) -(* Backend pragma: Zenon with different timeouts (default is 10 seconds) *) -(* *) -(**************************************************************************) - -Zenon == TRUE (*{ by (prover:"zenon") }*) -ZenonT(X) == TRUE (*{ by (prover:"zenon"; timeout:@) }*) - -(********************************************************************) -(* Backend pragma: Isabelle with different timeouts and tactics *) -(* (default is 30 seconds/auto) *) -(********************************************************************) - -Isa == TRUE (*{ by (prover:"isabelle") }*) -IsaT(X) == TRUE (*{ by (prover:"isabelle"; timeout:@) }*) -IsaM(X) == TRUE (*{ by (prover:"isabelle"; tactic:@) }*) -IsaMT(X,Y) == TRUE (*{ by (prover:"isabelle"; tactic:@; timeout:@) }*) - -(***************************************************************************) -(* The following theorem expresses the (useful implication of the) law of *) -(* set extensionality, which can be written as *) -(* *) -(* THEOREM \A S, T : (S = T) <=> (\A x : (x \in S) <=> (x \in T)) *) -(* *) -(* Theorem SetExtensionality is sometimes required by the SMT backend for *) -(* reasoning about sets. It is usually counterproductive to include *) -(* theorem SetExtensionality in a BY clause for the Zenon or Isabelle *) -(* backends. Instead, use the pragma IsaWithSetExtensionality to instruct *) -(* the Isabelle backend to use the rule of set extensionality. *) -(***************************************************************************) -IsaWithSetExtensionality == TRUE - (*{ by (prover:"isabelle"; tactic:"(auto intro: setEqualI)")}*) - -THEOREM SetExtensionality == \A S,T : (\A x : x \in S <=> x \in T) => S = T -OBVIOUS - -(***************************************************************************) -(* The following theorem is needed to deduce NotInSetS \notin SetS from *) -(* the definition *) -(* *) -(* NotInSetS == CHOOSE v : v \notin SetS *) -(***************************************************************************) -THEOREM NoSetContainsEverything == \A S : \E x : x \notin S -OBVIOUS (*{by (isabelle "(auto intro: inIrrefl)")}*) ------------------------------------------------------------------------------ - - - -(********************************************************************) -(********************************************************************) -(********************************************************************) - - -(********************************************************************) -(* Old versions of Zenon and Isabelle pragmas below *) -(* (kept for compatibility) *) -(********************************************************************) - - -(**************************************************************************) -(* Backend pragma: Zenon with different timeouts (default is 10 seconds) *) -(* *) -(**************************************************************************) - -SlowZenon == TRUE (*{ by (prover:"zenon"; timeout:20) }*) -SlowerZenon == TRUE (*{ by (prover:"zenon"; timeout:40) }*) -VerySlowZenon == TRUE (*{ by (prover:"zenon"; timeout:80) }*) -SlowestZenon == TRUE (*{ by (prover:"zenon"; timeout:160) }*) - - - -(********************************************************************) -(* Backend pragma: Isabelle's automatic search ("auto") *) -(* *) -(* This pragma bypasses Zenon. It is useful in situations involving *) -(* essentially simplification and equational reasoning. *) -(* Default imeout for all isabelle tactics is 30 seconds. *) -(********************************************************************) -Auto == TRUE (*{ by (prover:"isabelle"; tactic:"auto") }*) -SlowAuto == TRUE (*{ by (prover:"isabelle"; tactic:"auto"; timeout:120) }*) -SlowerAuto == TRUE (*{ by (prover:"isabelle"; tactic:"auto"; timeout:480) }*) -SlowestAuto == TRUE (*{ by (prover:"isabelle"; tactic:"auto"; timeout:960) }*) - -(********************************************************************) -(* Backend pragma: Isabelle's "force" tactic *) -(* *) -(* This pragma bypasses Zenon. It is useful in situations involving *) -(* quantifier reasoning. *) -(********************************************************************) -Force == TRUE (*{ by (prover:"isabelle"; tactic:"force") }*) -SlowForce == TRUE (*{ by (prover:"isabelle"; tactic:"force"; timeout:120) }*) -SlowerForce == TRUE (*{ by (prover:"isabelle"; tactic:"force"; timeout:480) }*) -SlowestForce == TRUE (*{ by (prover:"isabelle"; tactic:"force"; timeout:960) }*) - -(***********************************************************************) -(* Backend pragma: Isabelle's "simplification" tactics *) -(* *) -(* These tactics simplify the goal before running one of the automated *) -(* tactics. They are often necessary for obligations involving record *) -(* or tuple projections. Use the SimplfyAndSolve tactic unless you're *) -(* sure you can get away with just Simplification *) -(***********************************************************************) -SimplifyAndSolve == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp auto?") }*) -SlowSimplifyAndSolve == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:120) }*) -SlowerSimplifyAndSolve == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:480) }*) -SlowestSimplifyAndSolve == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:960) }*) - -Simplification == TRUE (*{ by (prover:"isabelle"; tactic:"clarsimp") }*) -SlowSimplification == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp"; timeout:120) }*) -SlowerSimplification == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp"; timeout:480) }*) -SlowestSimplification == TRUE - (*{ by (prover:"isabelle"; tactic:"clarsimp"; timeout:960) }*) - -(**************************************************************************) -(* Backend pragma: Isabelle's tableau prover ("blast") *) -(* *) -(* This pragma bypasses Zenon and uses Isabelle's built-in theorem *) -(* prover, Blast. It is almost never better than Zenon by itself, but *) -(* becomes very useful in combination with the Auto pragma above. The *) -(* AutoBlast pragma first attempts Auto and then uses Blast to prove what *) -(* Auto could not prove. (There is currently no way to use Zenon on the *) -(* results left over from Auto.) *) -(**************************************************************************) -Blast == TRUE (*{ by (prover:"isabelle"; tactic:"blast") }*) -SlowBlast == TRUE (*{ by (prover:"isabelle"; tactic:"blast"; timeout:120) }*) -SlowerBlast == TRUE (*{ by (prover:"isabelle"; tactic:"blast"; timeout:480) }*) -SlowestBlast == TRUE (*{ by (prover:"isabelle"; tactic:"blast"; timeout:960) }*) - -AutoBlast == TRUE (*{ by (prover:"isabelle"; tactic:"auto, blast") }*) - - -(**************************************************************************) -(* Backend pragmas: multi-back-ends *) -(* *) -(* These pragmas just run a bunch of back-ends one after the other in the *) -(* hope that one will succeed. This saves time and effort for the user at *) -(* the expense of computation time. *) -(**************************************************************************) - -(* CVC3 goes first because it's bundled with TLAPS, then the other SMT - solvers are unlikely to succeed if CVC3 fails, so we run zenon and - Isabelle before them. *) -AllProvers == TRUE (*{ - by (prover:"cvc33") - by (prover:"zenon") - by (prover:"isabelle"; tactic:"auto") - by (prover:"spass") - by (prover:"smt3") - by (prover:"yices3") - by (prover:"verit") - by (prover:"z33") - by (prover:"isabelle"; tactic:"force") - by (prover:"isabelle"; tactic:"(auto intro: setEqualI)") - by (prover:"isabelle"; tactic:"clarsimp auto?") - by (prover:"isabelle"; tactic:"clarsimp") - by (prover:"isabelle"; tactic:"auto, blast") - }*) -AllProversT(X) == TRUE (*{ - by (prover:"cvc33"; timeout:@) - by (prover:"zenon"; timeout:@) - by (prover:"isabelle"; tactic:"auto"; timeout:@) - by (prover:"spass"; timeout:@) - by (prover:"smt3"; timeout:@) - by (prover:"yices3"; timeout:@) - by (prover:"verit"; timeout:@) - by (prover:"z33"; timeout:@) - by (prover:"isabelle"; tactic:"force"; timeout:@) - by (prover:"isabelle"; tactic:"(auto intro: setEqualI)"; timeout:@) - by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:@) - by (prover:"isabelle"; tactic:"clarsimp"; timeout:@) - by (prover:"isabelle"; tactic:"auto, blast"; timeout:@) - }*) - -AllSMT == TRUE (*{ - by (prover:"cvc33") - by (prover:"smt3") - by (prover:"yices3") - by (prover:"verit") - by (prover:"z33") - }*) -AllSMTT(X) == TRUE (*{ - by (prover:"cvc33"; timeout:@) - by (prover:"smt3"; timeout:@) - by (prover:"yices3"; timeout:@) - by (prover:"verit"; timeout:@) - by (prover:"z33"; timeout:@) - }*) - -AllIsa == TRUE (*{ - by (prover:"isabelle"; tactic:"auto") - by (prover:"isabelle"; tactic:"force") - by (prover:"isabelle"; tactic:"(auto intro: setEqualI)") - by (prover:"isabelle"; tactic:"clarsimp auto?") - by (prover:"isabelle"; tactic:"clarsimp") - by (prover:"isabelle"; tactic:"auto, blast") - }*) -AllIsaT(X) == TRUE (*{ - by (prover:"isabelle"; tactic:"auto"; timeout:@) - by (prover:"isabelle"; tactic:"force"; timeout:@) - by (prover:"isabelle"; tactic:"(auto intro: setEqualI)"; timeout:@) - by (prover:"isabelle"; tactic:"clarsimp auto?"; timeout:@) - by (prover:"isabelle"; tactic:"clarsimp"; timeout:@) - by (prover:"isabelle"; tactic:"auto, blast"; timeout:@) - }*) - ----------------------------------------------------------------------------- -(***************************************************************************) -(* TEMPORAL LOGIC *) -(* *) -(* The following rules are intended to be used when TLAPS handles temporal *) -(* logic. They will not work now. Moreover when temporal reasoning is *) -(* implemented, these rules may be changed or omitted, and additional *) -(* rules will probably be added. However, they are included mainly so *) -(* their names will be defined, preventing the use of identifiers that are *) -(* likely to produce name clashes with future versions of this module. *) -(***************************************************************************) - - -(***************************************************************************) -(* The following proof rules (and their names) are from the paper "The *) -(* Temporal Logic of Actions". *) -(***************************************************************************) -THEOREM RuleTLA1 == ASSUME STATE P, STATE f, - P /\ (f' = f) => P' - PROVE []P <=> P /\ [][P => P']_f - -THEOREM RuleTLA2 == ASSUME STATE P, STATE Q, STATE f, STATE g, - ACTION A, ACTION B, - P /\ [A]_f => Q /\ [B]_g - PROVE []P /\ [][A]_f => []Q /\ [][B]_g - -THEOREM RuleINV1 == ASSUME STATE I, STATE F, ACTION N, - I /\ [N]_F => I' - PROVE I /\ [][N]_F => []I - -THEOREM RuleINV2 == ASSUME STATE I, STATE f, ACTION N - PROVE []I => ([][N]_f <=> [][N /\ I /\ I']_f) - -THEOREM RuleWF1 == ASSUME STATE P, STATE Q, STATE f, ACTION N, ACTION A, - P /\ [N]_f => (P' \/ Q'), - P /\ <>_f => Q', - P => ENABLED <>_f - PROVE [][N]_f /\ WF_f(A) => (P ~> Q) - -THEOREM RuleSF1 == ASSUME STATE P, STATE Q, STATE f, - ACTION N, ACTION A, TEMPORAL F, - P /\ [N]_f => (P' \/ Q'), - P /\ <>_f => Q', - []P /\ [][N]_f /\ []F => <> ENABLED <>_f - PROVE [][N]_f /\ SF_f(A) /\ []F => (P ~> Q) - -(***************************************************************************) -(* The rules WF2 and SF2 in "The Temporal Logic of Actions" are obtained *) -(* from the following two rules by the following substitutions: `. *) -(* *) -(* ___ ___ _______________ *) -(* M <- M , g <- g , EM <- ENABLED <>_g .' *) -(***************************************************************************) -THEOREM RuleWF2 == ASSUME STATE P, STATE f, STATE g, STATE EM, - ACTION A, ACTION B, ACTION N, ACTION M, - TEMPORAL F, - <>_f => <>_g, - P /\ P' /\ <>_f /\ EM => B, - P /\ EM => ENABLED A, - [][N /\ ~B]_f /\ WF_f(A) /\ []F /\ <>[]EM => <>[]P - PROVE [][N]_f /\ WF_f(A) /\ []F => []<><>_g \/ []<>(~EM) - -THEOREM RuleSF2 == ASSUME STATE P, STATE f, STATE g, STATE EM, - ACTION A, ACTION B, ACTION N, ACTION M, - TEMPORAL F, - <>_f => <>_g, - P /\ P' /\ <>_f /\ EM => B, - P /\ EM => ENABLED A, - [][N /\ ~B]_f /\ SF_f(A) /\ []F /\ []<>EM => <>[]P - PROVE [][N]_f /\ SF_f(A) /\ []F => []<><>_g \/ <>[](~EM) - - -(***************************************************************************) -(* The following rule is a special case of the general temporal logic *) -(* proof rule STL4 from the paper "The Temporal Logic of Actions". The *) -(* general rule is for arbitrary temporal formulas F and G, but it cannot *) -(* yet be handled by TLAPS. *) -(***************************************************************************) -THEOREM RuleInvImplication == - ASSUME STATE F, STATE G, - F => G - PROVE []F => []G -PROOF OMITTED - -(***************************************************************************) -(* The following rule is a special case of rule TLA2 from the paper "The *) -(* Temporal Logic of Actions". *) -(***************************************************************************) -THEOREM RuleStepSimulation == - ASSUME STATE I, STATE f, STATE g, - ACTION M, ACTION N, - I /\ I' /\ [M]_f => [N]_g - PROVE []I /\ [][M]_f => [][N]_g -PROOF OMITTED - -(***************************************************************************) -(* The following may be used to invoke a decision procedure for *) -(* propositional temporal logic. *) -(***************************************************************************) -PropositionalTemporalLogic == TRUE -============================================================================= diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction.tla b/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction.tla deleted file mode 100755 index 43e4107f30..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction.tla +++ /dev/null @@ -1,328 +0,0 @@ ------------------------- MODULE WellFoundedInduction ------------------------ -(***************************************************************************) -(* This module contains useful theorems for inductive proofs and recursive *) -(* definitions based on a well-founded ordering. *) -(* *) -(* Most of the statements of the theorems are decomposed in terms of *) -(* definitions. This is done for two reasons: *) -(* *) -(* - It makes it easier for the backends to instantiate the theorems *) -(* when those definitions are not expanded. In fact, at the moment *) -(* the provers can't prove any of those theorems from the theorem *) -(* itself if the definitions are made usable. *) -(* *) -(* - It can be convenient when writing proofs to use those definitions *) -(* rather than having to write out their expansions. *) -(* *) -(* A relation is represented as a set of ordered pairs, where we write *) -(* <> \in R instead of x R y. It is more convenient to represent *) -(* relations this way rather than as operators such as < . *) -(* *) -(* Proofs of these theorems appear in module WellFoundedInduction_proofs. *) -(***************************************************************************) -EXTENDS NaturalsInduction - -(***************************************************************************) -(* The following defines what it means for a relation R to be transitively *) -(* closed on a set S. In this and other definitions, we think of R as a *) -(* relation on S, meaning that it is a subset of S \X S. However, this is *) -(* not necessary. Our results do not require this as a hypothesis, and it *) -(* is often convenient to apply them when R is a relation on a set *) -(* containing S as a subset. They're even true (though uninteresting) if *) -(* R and S \X S are disjoint sets. *) -(***************************************************************************) -IsTransitivelyClosedOn(R, S) == - \A i, j, k \in S : (<> \in R) /\ (<> \in R) - => (<> \in R) -(***************************************************************************) -(* If we think of R as a less-than relation, then R is well founded on S *) -(* iff there is no "infinitely descending" sequence of elements of S. The *) -(* canonical example of a well founded relation is the ordinary less-than *) -(* relation on the natural numbers. *) -(* *) -(* A S with a well-founded ordering is often called well-ordered. *) -(***************************************************************************) -IsWellFoundedOn(R, S) == - ~ \E f \in [Nat -> S] : \A n \in Nat : <> \in R - -LEMMA EmptyIsWellFounded == \A S : IsWellFoundedOn({}, S) - - -LEMMA IsWellFoundedOnSubset == - ASSUME NEW R, NEW S, NEW T \in SUBSET S, - IsWellFoundedOn(R,S) - PROVE IsWellFoundedOn(R,T) - - -LEMMA IsWellFoundedOnSubrelation == - ASSUME NEW S, NEW R, NEW RR, RR \cap (S \X S) \subseteq R, - IsWellFoundedOn(R,S) - PROVE IsWellFoundedOn(RR,S) - - -(***************************************************************************) -(* If we think of R as a less-than relation on S, then the following is *) -(* the set of elements of S that are less than x. *) -(***************************************************************************) -SetLessThan(x, R, S) == {y \in S : <> \in R} - -(***************************************************************************) -(* If we think of R as a less-than relation on S, then R is well-founded *) -(* iff every non-empty subset of S has a minimal element. *) -(***************************************************************************) - -THEOREM WFMin == - ASSUME NEW R, NEW S, - IsWellFoundedOn(R, S), - NEW T, T \subseteq S, T # {} - PROVE \E x \in T : \A y \in T : ~ (<> \in R) - - -THEOREM MinWF == - ASSUME NEW R, NEW S, - \A T \in SUBSET S : T # {} => \E x \in T : \A y \in T : ~ (<> \in R) - PROVE IsWellFoundedOn(R,S) - - -(***************************************************************************) -(* The two following lemmas are simple consequences of theorem WFMin. *) -(***************************************************************************) -LEMMA WellFoundedIsIrreflexive == - ASSUME NEW R, NEW S, NEW x \in S, - IsWellFoundedOn(R, S) - PROVE <> \notin R - - -LEMMA WellFoundedIsAsymmetric == - ASSUME NEW R, NEW S, NEW x \in S, NEW y \in S, - IsWellFoundedOn(R,S), - <> \in R, <> \in R - PROVE FALSE - - -(***************************************************************************) -(* The following lemmas are simple facts about operator SetLessThan. *) -(***************************************************************************) -LEMMA WFSetLessThanIrreflexive == - ASSUME NEW R, NEW S, NEW x \in S, - IsWellFoundedOn(R,S) - PROVE x \notin SetLessThan(x,R,S) - - -LEMMA SetLessTransitive == - ASSUME NEW R, NEW S, NEW x \in S, NEW y \in SetLessThan(x,R,S), - IsTransitivelyClosedOn(R, S) - PROVE SetLessThan(y, R, S) \subseteq SetLessThan(x, R, S) - - ----------------------------------------------------------------------------- -(***************************************************************************) -(* The following theorem is the basis for proof by induction over a *) -(* well-founded set. It generalizes theorem GeneralNatInduction of module *) -(* NaturalsInduction. *) -(***************************************************************************) -THEOREM WFInduction == - ASSUME NEW P(_), NEW R, NEW S, - IsWellFoundedOn(R, S), - \A x \in S : (\A y \in SetLessThan(x, R, S) : P(y)) - => P(x) - PROVE \A x \in S : P(x) - - -(***************************************************************************) -(* Theorem WFInductiveDef below justifies recursive definitions based on a *) -(* well-founded ordering. We first prove it with the hypothesis that the *) -(* ordering is transitively closed. We prove the theorem for an arbitrary *) -(* well-founded relation by applying the special case to its transitive *) -(* closure. *) -(***************************************************************************) -WFDefOn(R, S, Def(_,_)) == - \A g, h : - \A x \in S : - (\A y \in SetLessThan(x, R, S) : g[y] = h[y]) - => (Def(g,x) = Def(h,x)) - -OpDefinesFcn(f, S, Def(_,_)) == - f = CHOOSE g : g = [x \in S |-> Def(g, x)] - -WFInductiveDefines(f, S, Def(_,_)) == - f = [x \in S |-> Def(f, x)] - -WFInductiveUnique(S, Def(_,_)) == - \A g, h : /\ WFInductiveDefines(g, S, Def) - /\ WFInductiveDefines(h, S, Def) - => (g = h) - -THEOREM WFDefOnUnique == - ASSUME NEW Def(_,_), NEW R, NEW S, - IsWellFoundedOn(R, S), WFDefOn(R, S, Def) - PROVE WFInductiveUnique(S, Def) - - -LEMMA WFInductiveDefLemma == - ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, - IsWellFoundedOn(R, S), - IsTransitivelyClosedOn(R, S), - WFDefOn(R, S, Def), - OpDefinesFcn(f, S, Def) - PROVE WFInductiveDefines(f, S, Def) - - -(***************************************************************************) -(* The following defines the transitive closure of the relation R on S. *) -(* More precisely, it is the transitive closure of the restriction of R *) -(* to S. We give an abstract definition of transitive closure as the *) -(* smallest relation that contains R (restricted to S \X S) and that is *) -(* transitively closed, then prove some relevant properties. *) -(***************************************************************************) -TransitiveClosureOn(R,S) == - { ss \in S \X S : - \A U \in SUBSET (S \X S) : - /\ R \cap S \X S \subseteq U - /\ IsTransitivelyClosedOn(U, S) - => ss \in U } - -LEMMA TransitiveClosureThm == - \A R, S : - /\ R \cap S \X S \subseteq TransitiveClosureOn(R, S) - /\ IsTransitivelyClosedOn(TransitiveClosureOn(R, S), S) - - -LEMMA TransitiveClosureMinimal == - ASSUME NEW R, NEW S, NEW U \in SUBSET (S \X S), - R \cap S \X S \subseteq U, - IsTransitivelyClosedOn(U,S) - PROVE TransitiveClosureOn(R,S) \subseteq U - - -(***************************************************************************) -(* The following lemmas are consequences of the two previous ones. The *) -(* first three state closure properties of transitive closure, the fourth *) -(* lemma allows one to chop off a step in the underlying relation for any *) -(* pair in the transitive closure. *) -(***************************************************************************) - -LEMMA TCTCTC == - ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, - <> \in TransitiveClosureOn(R,S), - <> \in TransitiveClosureOn(R,S) - PROVE <> \in TransitiveClosureOn(R,S) - - -LEMMA TCRTC == - ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, - <> \in TransitiveClosureOn(R,S), <> \in R - PROVE <> \in TransitiveClosureOn(R,S) - - -LEMMA RTCTC == - ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, - <> \in R, <> \in TransitiveClosureOn(R,S) - PROVE <> \in TransitiveClosureOn(R,S) - - -LEMMA TransitiveClosureChopLast == - ASSUME NEW R, NEW S, NEW i \in S, NEW k \in S, <> \in TransitiveClosureOn(R,S) - PROVE \E j \in S : /\ <> \in R - /\ i = j \/ <> \in TransitiveClosureOn(R,S) - - -THEOREM TransitiveClosureWF == - ASSUME NEW R, NEW S, IsWellFoundedOn(R,S) - PROVE IsWellFoundedOn(TransitiveClosureOn(R, S), S) - - -THEOREM WFInductiveDef == - ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, - IsWellFoundedOn(R, S), - WFDefOn(R, S, Def), - OpDefinesFcn(f, S, Def) - PROVE WFInductiveDefines(f, S, Def) - - -(***************************************************************************) -(* Theorem WFInductiveDef allows us to conclude that a recursively defined *) -(* function satisfies its recursion equation. The following result allows *) -(* us to deduce the range of this function. *) -(***************************************************************************) -THEOREM WFInductiveDefType == - ASSUME NEW Def(_,_), NEW f, NEW R, NEW S, NEW T, - T # {}, - IsWellFoundedOn(R, S), - WFDefOn(R, S, Def), - WFInductiveDefines(f, S, Def), - \A g \in [S -> T], s \in S : Def(g, s) \in T - PROVE f \in [S -> T] - - - ---------------------------------------------------------------------------- -(***************************************************************************) -(* Below are some theorems that allow us to derive some useful *) -(* well-founded relations from a given well-founded relation. First, we *) -(* define the operator OpToRel that constructs a relation (a set of *) -(* ordered pairs) from a relation expressed as an operator. *) -(***************************************************************************) -OpToRel(_\prec_, S) == {ss \in S \X S : ss[1] \prec ss[2]} - -(***************************************************************************) -(* To construct well-founded relations from the less-than relation on the *) -(* natural numbers, we first prove that it is well-founded. *) -(***************************************************************************) -THEOREM NatLessThanWellFounded == IsWellFoundedOn(OpToRel(<,Nat), Nat) - - -(***************************************************************************) -(* The next definition would be easier to read if we used the TLA+ *) -(* construct {<> \in T : ... }. However, TLAPS does not suport *) -(* that notation. (It's meaning is rather complicated in the general case *) -(* when T is not a Cartesian product of sets.) *) -(***************************************************************************) -PreImage(f(_), S, R) == {ss \in S \X S : <> \in R} - -THEOREM PreImageWellFounded == - ASSUME NEW S, NEW T, NEW R, NEW f(_), - \A s \in S : f(s) \in T, - IsWellFoundedOn(R, T) - PROVE IsWellFoundedOn(PreImage(f, S, R), S) - - -(***************************************************************************) -(* We now prove that the lexicographical ordering on the Cartesian product *) -(* of two well-ordered sets is well-ordered. *) -(***************************************************************************) -LexPairOrdering(R1, R2, S1, S2) == - {ss \in (S1 \X S2) \X (S1 \X S2) : - \/ <> \in R1 - \/ /\ ss[1][1] = ss[2][1] - /\ <> \in R2} - -THEOREM WFLexPairOrdering == - ASSUME NEW R1, NEW R2, NEW S1, NEW S2, - IsWellFoundedOn(R1, S1), - IsWellFoundedOn(R2, S2) - PROVE IsWellFoundedOn(LexPairOrdering(R1, R2, S1, S2), S1 \X S2) - - -(***************************************************************************) -(* The preceding theorem generalizes in the obvious way to the Cartesian *) -(* product of a finite number of well-ordered sets. However, the *) -(* statement of the general theorem is rather complicated, so we state it *) -(* for the most useful case: the Cartesian product of n copies of the same *) -(* set. *) -(***************************************************************************) -LexProductOrdering(R, S, n) == - { ff \in [1..n -> S] \X [1..n -> S] : - \E j \in 1..n : - /\ \A i \in 1..(j-1) : ff[1][i] = ff[2][i] - /\ <> \in R } - -THEOREM WFLexProductOrdering == - ASSUME NEW R, NEW S, NEW n \in Nat, - IsWellFoundedOn(R, S) - PROVE IsWellFoundedOn(LexProductOrdering(R, S, n), [1..n -> S]) - -============================================================================= -\* Modification History -\* Last modified Thu Feb 13 18:14:56 GMT-03:00 2014 by merz -\* Last modified Sun Jan 01 18:39:23 CET 2012 by merz -\* Last modified Wed Nov 23 10:13:18 PST 2011 by lamport diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction_proofs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction_proofs.tla deleted file mode 100644 index 7dce4d04bc..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/WellFoundedInduction_proofs.tla +++ /dev/null @@ -1,738 +0,0 @@ ---------------------- MODULE WellFoundedInduction_proofs -------------------- -(***************************************************************************) -(* This module contains useful theorems for inductive proofs and recursive *) -(* definitions based on a well-founded ordering. *) -(* *) -(* Most of the statements of the theorems are decomposed in terms of *) -(* definitions. This is done for two reasons: *) -(* *) -(* - It makes it easier for the backends to instantiate the theorems *) -(* when those definitions are not expanded. In fact, at the moment *) -(* the provers can't prove any of those theorems from the theorem *) -(* itself if the definitions are made usable. *) -(* *) -(* - It can be convenient when writing proofs to use those definitions *) -(* rather than having to write out their expansions. *) -(* *) -(* A relation is represented as a set of ordered pairs, where we write *) -(* <> \in R instead of x R y. It is more convenient to represent *) -(* relations this way rather than as operators such as < . *) -(***************************************************************************) -EXTENDS NaturalsInduction, TLAPS - -(***************************************************************************) -(* The following defines what it means for a relation R to be transitively *) -(* closed on a set S. In this and other definitions, we think of R as a *) -(* relation on S, meaning that it is a subset of S \X S. However, this is *) -(* not necessary. Our results do not require this as a hypothesis, and it *) -(* is often convenient to apply them when R is a relation on a set *) -(* containing S as a subset. They're even true (though uninteresting) if *) -(* R and S \X S are disjoint sets. *) -(***************************************************************************) -IsTransitivelyClosedOn(R, S) == - \A i, j, k \in S : (<> \in R) /\ (<> \in R) - => (<> \in R) -(***************************************************************************) -(* If we think of R as a less-than relation, then R is well founded on S *) -(* iff there is no "infinitely descending" sequence of elements of S. The *) -(* canonical example of a well founded relation is the ordinary less-than *) -(* relation on the natural numbers. *) -(* *) -(* A S with a well-founded ordering is often called well-ordered. *) -(***************************************************************************) -IsWellFoundedOn(R, S) == - ~ \E f \in [Nat -> S] : \A n \in Nat : <> \in R - -LEMMA EmptyIsWellFounded == \A S : IsWellFoundedOn({}, S) -BY DEF IsWellFoundedOn - - -LEMMA IsWellFoundedOnSubset == - ASSUME NEW R, NEW S, NEW T \in SUBSET S, - IsWellFoundedOn(R,S) - PROVE IsWellFoundedOn(R,T) -BY DEF IsWellFoundedOn - - -LEMMA IsWellFoundedOnSubrelation == - ASSUME NEW S, NEW R, NEW RR, RR \cap (S \X S) \subseteq R, - IsWellFoundedOn(R,S) - PROVE IsWellFoundedOn(RR,S) -<1>1. SUFFICES ASSUME NEW f \in [Nat -> S], - \A n \in Nat : <> \in RR - PROVE FALSE - BY DEF IsWellFoundedOn -<1>2. \A n \in Nat : <> \in RR \cap (S \X S) - BY Isa, <1>1 -<1>. QED - BY <1>2 DEF IsWellFoundedOn - -(***************************************************************************) -(* If we think of R as a less-than relation on S, then the following is *) -(* the set of elements of S that are less than x. *) -(***************************************************************************) -SetLessThan(x, R, S) == {y \in S : <> \in R} - -(***************************************************************************) -(* If we think of R as a less-than relation on S, then R is well-founded *) -(* iff every non-empty subset of S has a minimal element. *) -(***************************************************************************) - -THEOREM WFMin == - ASSUME NEW R, NEW S, - IsWellFoundedOn(R, S), - NEW T, T \subseteq S, T # {} - PROVE \E x \in T : \A y \in T : ~ (<> \in R) -<1> SUFFICES ASSUME \A x \in T : \E y \in T : <> \in R - PROVE FALSE - OBVIOUS -<1> DEFINE f0 == CHOOSE x \in T : TRUE - Def(v, n) == CHOOSE x \in T : <> \in R - f[n \in Nat] == IF n = 0 THEN f0 ELSE Def(f[n-1], n) -<1>1. NatInductiveDefConclusion(f, f0, Def) - <2>1. NatInductiveDefHypothesis(f, f0, Def) - BY DEF NatInductiveDefHypothesis - <2>2. QED - BY <2>1, NatInductiveDef -<1>2. f \in [Nat -> T] - <2>1. f0 \in T - OBVIOUS - <2>2. \A v \in T, n \in Nat \ {0} : Def(v, n) \in T - OBVIOUS - <2>3. QED - BY <1>1, <2>1, <2>2, NatInductiveDefType, Isa -<1>3. ASSUME NEW n \in Nat - PROVE <> \in R - <2>1. /\ n+1 \in Nat - /\ n+1 # 0 - /\ (n+1)-1 = n - BY Isa - <2>2. f[n+1] = Def(f[(n+1)-1], n+1) - BY <2>1, <1>1 DEF NatInductiveDefConclusion - <2>3. QED - BY <2>1, <2>2, <1>2 -<1>4. QED - BY <1>2, <1>3 DEF IsWellFoundedOn - - -THEOREM MinWF == - ASSUME NEW R, NEW S, - \A T \in SUBSET S : T # {} => \E x \in T : \A y \in T : ~ (<> \in R) - PROVE IsWellFoundedOn(R,S) -<1> SUFFICES ASSUME NEW f \in [Nat -> S], - \A n \in Nat : <> \in R - PROVE FALSE - BY DEF IsWellFoundedOn -<1> DEFINE T == { f[n] : n \in Nat } -<1>1. T \subseteq S - OBVIOUS -<1>2. \A x \in T : \E y \in T : <> \in R - BY Isa -<1> QED - BY <1>1, <1>2 - -(***************************************************************************) -(* The two following lemmas are simple consequences of theorem WFMin. *) -(***************************************************************************) -LEMMA WellFoundedIsIrreflexive == - ASSUME NEW R, NEW S, NEW x \in S, - IsWellFoundedOn(R, S) - PROVE <> \notin R -<1>1. \E z \in {x} : \A y \in {x} : <> \notin R - BY WFMin, IsaM("blast") -<1>2. QED - BY <1>1 - - -LEMMA WellFoundedIsAsymmetric == - ASSUME NEW R, NEW S, NEW x \in S, NEW y \in S, - IsWellFoundedOn(R,S), - <> \in R, <> \in R - PROVE FALSE -<1>1. \E u \in {x,y} : \A v \in {x,y} : <> \notin R - BY WFMin, IsaM("blast") -<1>2. QED - BY <1>1 - -(***************************************************************************) -(* The following lemmas are simple facts about operator SetLessThan. *) -(***************************************************************************) -LEMMA WFSetLessThanIrreflexive == - ASSUME NEW R, NEW S, NEW x \in S, - IsWellFoundedOn(R,S) - PROVE x \notin SetLessThan(x,R,S) -BY WellFoundedIsIrreflexive DEF SetLessThan - - -LEMMA SetLessTransitive == - ASSUME NEW R, NEW S, NEW x \in S, NEW y \in SetLessThan(x,R,S), - IsTransitivelyClosedOn(R, S) - PROVE SetLessThan(y, R, S) \subseteq SetLessThan(x, R, S) -BY DEF SetLessThan, IsTransitivelyClosedOn - ----------------------------------------------------------------------------- -(***************************************************************************) -(* The following theorem is the basis for proof by induction over a *) -(* well-founded set. It generalizes theorem GeneralNatInduction of module *) -(* NaturalsInduction. *) -(***************************************************************************) -THEOREM WFInduction == - ASSUME NEW P(_), NEW R, NEW S, - IsWellFoundedOn(R, S), - \A x \in S : (\A y \in SetLessThan(x, R, S) : P(y)) - => P(x) - PROVE \A x \in S : P(x) -<1> DEFINE T == {x \in S : ~P(x)} -<1>1. SUFFICES ASSUME T # {} - PROVE FALSE - OBVIOUS -<1>2. PICK x \in T : \A y \in T : ~ (<> \in R) - BY <1>1, WFMin -<1>3. QED - BY <1>2 DEF SetLessThan - -(***************************************************************************) -(* Theorem WFInductiveDef below justifies recursive definitions based on a *) -(* well-founded ordering. We first prove it with the hypothesis that the *) -(* ordering is transitively closed. We prove the theorem for an arbitrary *) -(* well-founded relation by applying the special case to its transitive *) -(* closure. *) -(***************************************************************************) -WFDefOn(R, S, Def(_,_)) == - \A g, h : - \A x \in S : - (\A y \in SetLessThan(x, R, S) : g[y] = h[y]) - => (Def(g,x) = Def(h,x)) - -OpDefinesFcn(f, S, Def(_,_)) == - f = CHOOSE g : g = [x \in S |-> Def(g, x)] - -WFInductiveDefines(f, S, Def(_,_)) == - f = [x \in S |-> Def(f, x)] - -WFInductiveUnique(S, Def(_,_)) == - \A g, h : /\ WFInductiveDefines(g, S, Def) - /\ WFInductiveDefines(h, S, Def) - => (g = h) - -THEOREM WFDefOnUnique == - ASSUME NEW Def(_,_), NEW R, NEW S, - IsWellFoundedOn(R, S), WFDefOn(R, S, Def) - PROVE WFInductiveUnique(S, Def) -<1>0. SUFFICES ASSUME NEW g, NEW h, - WFInductiveDefines(g, S, Def), - WFInductiveDefines(h, S, Def) - PROVE g = h - BY DEF WFInductiveUnique -<1> SUFFICES \A x \in S : g[x] = h[x] - BY <1>0 DEF WFInductiveDefines -<1>1. ASSUME NEW x \in S, - \A y \in SetLessThan(x, R, S) : g[y] = h[y] - PROVE g[x] = h[x] - <2>1. Def(g,x) = Def(h,x) - BY <1>1 DEF WFDefOn - <2>2. QED - BY <1>0, <2>1 DEF WFInductiveDefines -<1>2. QED - BY <1>1, WFInduction, Isa - -LEMMA WFInductiveDefLemma == - ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, - IsWellFoundedOn(R, S), - IsTransitivelyClosedOn(R, S), - WFDefOn(R, S, Def), - OpDefinesFcn(f, S, Def) - PROVE WFInductiveDefines(f, S, Def) -<1> DEFINE LT(x) == {x} \cup SetLessThan(x, R, S) -<1>1. ASSUME NEW x \in S - PROVE /\ LT(x) = {x} \cup UNION {LT(y) : y \in SetLessThan(x, R, S)} - /\ (x \in LT(x)) /\ (SetLessThan(x, R, S) \subseteq LT(x)) - /\ \A y \in LT(x) : SetLessThan(y, R, S) \subseteq LT(x) - /\ \A y \in LT(x) : LT(y) \subseteq LT(x) - /\ LT(x) \subseteq S - BY Isa DEF SetLessThan, IsTransitivelyClosedOn -<1> HIDE DEF LT \** from now on, (mostly) use properties in step <1>1 rather than the definition - -<1> DEFINE F(x) == CHOOSE g : g = [y \in LT(x) |-> Def(g, y)] - ff == [x \in S |-> F(x)[x]] -<1> HIDE DEF ff - -<1>2. \A x \in S : ff[x] = Def(ff,x) - <2>1. SUFFICES ASSUME NEW x \in S, - \A y \in SetLessThan(x, R, S) : ff[y] = Def(ff,y) - PROVE ff[x] = Def(ff,x) - BY WFInduction, Isa - <2>2. WFInductiveUnique(LT(x), Def) - <3>1. LT(x) \subseteq S - BY <1>1 - <3>2. IsWellFoundedOn(R, LT(x)) - BY <3>1, IsWellFoundedOnSubset - <3>3. \A z \in LT(x) : SetLessThan(z, R, LT(x)) = SetLessThan(z, R, S) - BY DEF LT, SetLessThan, IsTransitivelyClosedOn - <3>4. WFDefOn(R, LT(x), Def) - BY <3>1, <3>3, IsaM("blast") DEF WFDefOn - <3>. QED - BY <3>2, <3>4, WFDefOnUnique - <2> DEFINE g == [y \in LT(x) |-> Def(ff, y)] - <2>3. Def(ff,x) = Def(g,x) - BY <1>1 (* x \in LT(x) *), <2>1 DEF WFDefOn - <2>4. ASSUME NEW y \in SetLessThan(x, R, S) - PROVE Def(ff,y) = Def(g,y) - <3>1. y \in S - BY DEF SetLessThan - <3>2. \A z \in SetLessThan(y, R, S) : ff[z] = g[z] - BY <2>1, SetLessTransitive DEF LT - <3>3. QED - BY <3>1, <3>2 DEF WFDefOn - <2>5. WFInductiveDefines(g, LT(x), Def) - BY <2>3, <2>4 DEF WFInductiveDefines, LT - <2>6. WFInductiveDefines(F(x), LT(x), Def) - BY <2>5 DEF WFInductiveDefines - <2>7. g = F(x) - BY <2>5, <2>6, <2>2 DEF WFInductiveUnique - <2>. QED - BY <1>1, <2>7 DEF ff - -<1>3. QED - <2>1. WFInductiveDefines(ff, S, Def) - BY <1>2 DEF WFInductiveDefines, ff - <2>2. QED - BY <2>1 DEF WFInductiveDefines, OpDefinesFcn - -(***************************************************************************) -(* The following defines the transitive closure of the relation R on S. *) -(* More precisely, it is the transitive closure of the restriction of R *) -(* to S. We give an abstract definition of transitive closure as the *) -(* smallest relation that contains R (restricted to S \X S) and that is *) -(* transitively closed, then prove some relevant properties. *) -(***************************************************************************) -TransitiveClosureOn(R,S) == - { ss \in S \X S : - \A U \in SUBSET (S \X S) : - /\ R \cap S \X S \subseteq U - /\ IsTransitivelyClosedOn(U, S) - => ss \in U } - -LEMMA TransitiveClosureThm == - \A R, S : - /\ R \cap S \X S \subseteq TransitiveClosureOn(R, S) - /\ IsTransitivelyClosedOn(TransitiveClosureOn(R, S), S) -<1> TAKE R, S -<1>1. R \cap S \X S \subseteq TransitiveClosureOn(R, S) - BY DEF TransitiveClosureOn -<1>2. IsTransitivelyClosedOn(TransitiveClosureOn(R, S), S) - BY DEF TransitiveClosureOn, IsTransitivelyClosedOn -<1>3. QED - BY <1>1, <1>2 - -LEMMA TransitiveClosureMinimal == - ASSUME NEW R, NEW S, NEW U \in SUBSET (S \X S), - R \cap S \X S \subseteq U, - IsTransitivelyClosedOn(U,S) - PROVE TransitiveClosureOn(R,S) \subseteq U -BY DEF TransitiveClosureOn - -(***************************************************************************) -(* The following lemmas are consequences of the two previous ones. The *) -(* first three state closure properties of transitive closure, the fourth *) -(* lemma allows one to chop off a step in the underlying relation for any *) -(* pair in the transitive closure. *) -(***************************************************************************) - -LEMMA TCTCTC == - ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, - <> \in TransitiveClosureOn(R,S), - <> \in TransitiveClosureOn(R,S) - PROVE <> \in TransitiveClosureOn(R,S) -BY TransitiveClosureThm DEF IsTransitivelyClosedOn - -LEMMA TCRTC == - ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, - <> \in TransitiveClosureOn(R,S), <> \in R - PROVE <> \in TransitiveClosureOn(R,S) -BY TransitiveClosureThm, TCTCTC - -LEMMA RTCTC == - ASSUME NEW R, NEW S, NEW i \in S, NEW j \in S, NEW k \in S, - <> \in R, <> \in TransitiveClosureOn(R,S) - PROVE <> \in TransitiveClosureOn(R,S) -BY TransitiveClosureThm, TCTCTC - -LEMMA TransitiveClosureChopLast == - ASSUME NEW R, NEW S, NEW i \in S, NEW k \in S, <> \in TransitiveClosureOn(R,S) - PROVE \E j \in S : /\ <> \in R - /\ i = j \/ <> \in TransitiveClosureOn(R,S) -<1> DEFINE U == { ss \in S \X S : \E s \in S : /\ <> \in R - /\ ss[1] = s \/ <> \in TransitiveClosureOn(R,S) } -<1>1. R \cap S \X S \subseteq U - <2> SUFFICES ASSUME NEW x \in S, NEW y \in S, <> \in R - PROVE <> \in U - BY IsaM("blast") - <2> QED - OBVIOUS -<1>2. U \subseteq TransitiveClosureOn(R,S) - <2> SUFFICES ASSUME NEW x \in S, NEW y \in S, <> \in U - PROVE <> \in TransitiveClosureOn(R,S) - BY IsaM("blast") - <2> QED - BY TransitiveClosureThm DEF IsTransitivelyClosedOn -<1>3. IsTransitivelyClosedOn(U,S) - <2>1. SUFFICES ASSUME NEW x \in S, NEW y \in S, NEW z \in S, - <> \in U, <> \in U - PROVE <> \in U - BY DEF IsTransitivelyClosedOn - <2>2. <> \in TransitiveClosureOn(R,S) - BY <2>1, <1>2 - <2>3. PICK s \in S : /\ <> \in R - /\ y=s \/ <> \in TransitiveClosureOn(R,S) - BY <2>1 - <2>4. <> \in TransitiveClosureOn(R,S) - BY <2>2, <2>3, TransitiveClosureThm DEF IsTransitivelyClosedOn - <2> QED - BY <2>3, <2>4 -<1>4. QED - <2>1. TransitiveClosureOn(R,S) \subseteq U - BY <1>1, <1>3, TransitiveClosureMinimal - <2>2. QED - BY <2>1 - -(***************************************************************************) -(* NB: In a similar way to the preceding lemma, one could prove *) -(* ASSUME NEW R, NEW S, NEW x \in S, NEW y \in S, *) -(* <> \in TransitiveClosureOn(R,S) *) -(* PROVE \E n \in Nat : \E f \in [0..(n+1) -> S] : *) -(* /\ \A i \in 0..n : <> \in R *) -(* /\ x = f[0] /\ y = f[n+1] *) -(* which provides a more constructive characterization of transitive *) -(* closure. The converse theorem would be proved by induction on n, *) -(* using the above closure properties. *) -(***************************************************************************) - -THEOREM TransitiveClosureWF == - ASSUME NEW R, NEW S, IsWellFoundedOn(R,S) - PROVE IsWellFoundedOn(TransitiveClosureOn(R, S), S) -<1> SUFFICES ASSUME NEW T \in SUBSET S, T # {} - PROVE \E x \in T : \A y \in T : ~(<> \in TransitiveClosureOn(R, S)) - BY MinWF -(* It is tempting to simply pick a minimal element x in T w.r.t. relation R as the witness, - but that wouldn't work in general because there may be elements below x in the transitive - closure of R. So we complete T w.r.t. the transitive closure in an appropriate way and - pick a minimal element in that larger set. *) -<1> DEFINE TT == T \cup { j \in S : \E i,k \in T : /\ <> \in TransitiveClosureOn(R,S) - /\ <> \in TransitiveClosureOn(R,S) } -<1>1. PICK x \in TT : \A y \in TT : ~(<> \in R) - BY WFMin -<1>2. x \in T - <2>1. ASSUME NEW i \in T, NEW k \in T, - <> \in TransitiveClosureOn(R,S), - <> \in TransitiveClosureOn(R,S) - PROVE FALSE - <3>1. PICK j \in S : /\ <> \in R - /\ i=j \/ <> \in TransitiveClosureOn(R,S) - BY <2>1, TransitiveClosureChopLast - <3>2. j \in TT - <4>1. CASE <> \in TransitiveClosureOn(R,S) - BY <3>1, <4>1, <2>1, RTCTC - <4>2. QED - BY <3>1, <4>1 - <3>3. QED - BY <3>1, <3>2, <1>1 - <2>2. QED - BY <2>1 -<1>3. ASSUME NEW y \in T, <> \in TransitiveClosureOn(R, S) - PROVE FALSE - <2>1. PICK j \in S : /\ <> \in R - /\ y=j \/ <> \in TransitiveClosureOn(R,S) - BY <1>3, TransitiveClosureChopLast - <2>2. j \in TT - <3>1. CASE <> \in TransitiveClosureOn(R,S) - BY <1>2, <3>1, <2>1, TransitiveClosureThm - <3>2. QED - BY <2>1, <3>1 - <2>3. QED - BY <2>1, <2>2, <1>1 -<1> QED - BY <1>2, <1>3 - -THEOREM WFInductiveDef == - ASSUME NEW Def(_,_), NEW R, NEW S, NEW f, - IsWellFoundedOn(R, S), - WFDefOn(R, S, Def), - OpDefinesFcn(f, S, Def) - PROVE WFInductiveDefines(f, S, Def) -<1> DEFINE TC == TransitiveClosureOn(R, S) -<1>1. IsTransitivelyClosedOn(TC, S) - BY TransitiveClosureThm -<1>2. IsWellFoundedOn(TC, S) - BY TransitiveClosureWF -<1>3. WFDefOn(TC, S, Def) - <2>1. \A x \in S : SetLessThan(x, R, S) \subseteq SetLessThan(x, TC, S) - BY TransitiveClosureThm DEF SetLessThan - <2>2. QED - BY <2>1 DEF WFDefOn -<1>4. QED - BY <1>1, <1>2, <1>3, WFInductiveDefLemma - -(***************************************************************************) -(* Theorem WFInductiveDef allows us to conclude that a recursively defined *) -(* function satisfies its recursion equation. The following result allows *) -(* us to deduce the range of this function. *) -(***************************************************************************) -THEOREM WFInductiveDefType == - ASSUME NEW Def(_,_), NEW f, NEW R, NEW S, NEW T, - T # {}, - IsWellFoundedOn(R, S), - WFDefOn(R, S, Def), - WFInductiveDefines(f, S, Def), - \A g \in [S -> T], s \in S : Def(g, s) \in T - PROVE f \in [S -> T] -<1>1. \A s \in S : f[s] \in T - <2>1. SUFFICES ASSUME NEW s \in S, - \A x \in SetLessThan(s, R, S) : f[x] \in T - PROVE f[s] \in T - BY ONLY <2>1, IsWellFoundedOn(R, S), WFInduction, IsaM("auto") - <2>2. PICK t0 : t0 \in T - OBVIOUS - <2> DEFINE g == [x \in S |-> IF x \in SetLessThan(s, R, S) THEN f[x] ELSE t0] - <2>3. /\ g \in [S -> T] - /\ \A x \in SetLessThan(s, R, S) : g[x] = f[x] - <3> SetLessThan(s, R, S) \subseteq S - BY DEF SetLessThan - <3> QED - BY <2>1, <2>2 - <2>4. Def(f,s) = Def(g,s) - BY <2>3 DEF WFDefOn - <2>5. QED - BY <2>3, <2>4 DEF WFInductiveDefines, WFDefOn -<1>2. QED - BY <1>1 DEF WFInductiveDefines - - ---------------------------------------------------------------------------- -(***************************************************************************) -(* Below are some theorems that allow us to derive some useful *) -(* well-founded relations from a given well-founded relation. First, we *) -(* define the operator OpToRel that constructs a relation (a set of *) -(* ordered pairs) from a relation expressed as an operator. *) -(***************************************************************************) -OpToRel(_\prec_, S) == {ss \in S \X S : ss[1] \prec ss[2]} - -(***************************************************************************) -(* To construct well-founded relations from the less-than relation on the *) -(* natural numbers, we first prove that it is well-founded. *) -(***************************************************************************) -THEOREM NatLessThanWellFounded == IsWellFoundedOn(OpToRel(<,Nat), Nat) -<1> DEFINE R == OpToRel(<,Nat) -<1>1. SUFFICES ASSUME NEW ff \in [Nat -> Nat], - \A n \in Nat : ff[n+1] < ff[n] - PROVE FALSE - BY DEF IsWellFoundedOn, OpToRel - -<1> DEFINE P(n) == \E f \in [Nat -> Nat] : - /\ \A m \in Nat : <> \in R - /\ f[0] = n -<1>1a. P(ff[0]) - BY <1>1, IsaM("auto") DEF OpToRel -<1>2. ASSUME NEW n \in Nat, - \A m \in 0..(n-1) : ~ P(m) - PROVE ~ P(n) - <2> SUFFICES ASSUME NEW f \in [Nat -> Nat], - \A m \in Nat : <> \in R , - f[0] = n - PROVE FALSE - OBVIOUS - <2> DEFINE g[i \in Nat] == f[i+1] - <2>1. g \in [Nat -> Nat] - BY ONLY f \in [Nat -> Nat], IsaM("auto") - <2>2. \A i \in Nat : <> \in R - BY IsaM("auto") - <2>3. g[0] \in 0..(n-1) - BY <2>2, Z3 DEF OpToRel - <2>4 QED - BY <2>1, <2>2, <2>3, <1>2 -<1>3. ~ P(ff[0]) - <2> HIDE DEF P - <2> \A n \in Nat : ~ P(n) - BY ONLY <1>2, GeneralNatInduction, IsaM("auto") - <2> QED - BY DEF P -<1>4. QED - BY <1>1a, <1>3 - -(***************************************************************************) -(* The next definition would be easier to read if we used the TLA+ *) -(* construct {<> \in T : ... }. However, TLAPS does not suport *) -(* that notation. (It's meaning is rather complicated in the general case *) -(* when T is not a Cartesian product of sets.) *) -(***************************************************************************) -PreImage(f(_), S, R) == {ss \in S \X S : <> \in R} - -THEOREM PreImageWellFounded == - ASSUME NEW S, NEW T, NEW R, NEW f(_), - \A s \in S : f(s) \in T, - IsWellFoundedOn(R, T) - PROVE IsWellFoundedOn(PreImage(f, S, R), S) -<1> SUFFICES ASSUME NEW g \in [Nat -> S], - \A n \in Nat : <> \in PreImage(f, S, R) - PROVE FALSE - BY DEF IsWellFoundedOn -<1> DEFINE gg[n \in Nat] == f(g[n]) -<1>1. ASSUME NEW n \in Nat - PROVE <> \in R - BY IsaM("auto") DEF PreImage -<1> QED - BY <1>1 DEF IsWellFoundedOn - -(***************************************************************************) -(* We now prove that the lexicographical ordering on the Cartesian product *) -(* of two well-ordered sets is well-ordered. *) -(***************************************************************************) -LexPairOrdering(R1, R2, S1, S2) == - {ss \in (S1 \X S2) \X (S1 \X S2) : - \/ <> \in R1 - \/ /\ ss[1][1] = ss[2][1] - /\ <> \in R2} - -THEOREM WFLexPairOrdering == - ASSUME NEW R1, NEW R2, NEW S1, NEW S2, - IsWellFoundedOn(R1, S1), - IsWellFoundedOn(R2, S2) - PROVE IsWellFoundedOn(LexPairOrdering(R1, R2, S1, S2), S1 \X S2) -<1> SUFFICES ASSUME NEW T \in SUBSET (S1 \X S2), T # {} - PROVE \E x \in T : \A y \in T : <> \notin LexPairOrdering(R1, R2, S1, S2) - BY MinWF -<1> DEFINE T1 == { tt[1] : tt \in T } -<1>1. PICK x1 \in T1 : \A y1 \in T1 : <> \notin R1 - <2>1. T1 \subseteq S1 /\ T1 # {} - OBVIOUS - <2>2. QED - BY <2>1, WFMin -<1> DEFINE T2 == { tt[2] : tt \in { uu \in T : uu[1] = x1 } } -<1>2. PICK x2 \in T2 : \A y2 \in T2 : <> \notin R2 - <2>1. T2 \subseteq S2 /\ T2 # {} - OBVIOUS - <2>2. QED - BY <2>1, WFMin -<1>3. <> \in T - BY IsaM("force") -<1>4. ASSUME NEW t \in T, - << t, <> >> \in LexPairOrdering(R1, R2, S1, S2) - PROVE FALSE - <2>1. CASE << t[1], x1 >> \in R1 - BY <1>1, <2>1 - <2>2. CASE t[1] = x1 /\ << t[2], x2 >> \in R2 - BY <1>2, <2>2 - <2>3. QED - BY <2>1, <2>2, <1>4 DEF LexPairOrdering -<1> QED - BY <1>3, <1>4 - -(***************************************************************************) -(* The preceding theorem generalizes in the obvious way to the Cartesian *) -(* product of a finite number of well-ordered sets. However, the *) -(* statement of the general theorem is rather complicated, so we state it *) -(* for the most useful case: the Cartesian product of n copies of the same *) -(* set. *) -(***************************************************************************) -LexProductOrdering(R, S, n) == - { ff \in [1..n -> S] \X [1..n -> S] : - \E j \in 1..n : - /\ \A i \in 1..(j-1) : ff[1][i] = ff[2][i] - /\ <> \in R } - -THEOREM WFLexProductOrdering == - ASSUME NEW R, NEW S, NEW n \in Nat, - IsWellFoundedOn(R, S) - PROVE IsWellFoundedOn(LexProductOrdering(R, S, n), [1..n -> S]) -<1> DEFINE LPO(m) == LexProductOrdering(R, S, m) -<1> DEFINE P(m) == IsWellFoundedOn(LPO(m), [1..m -> S]) -<1>1. P(0) - BY 1..0 = {}, EmptyIsWellFounded DEF LexProductOrdering -<1>2. ASSUME NEW m \in Nat, P(m) - PROVE P(m+1) - <2>1. IsWellFoundedOn(LexPairOrdering(LPO(m), R, [1..m -> S], S), [1..m -> S] \X S) - BY <1>2, WFLexPairOrdering - (*************************************************************************) - (* Pairs of m-tuples over S in [1..m ->S] and an element of S are *) - (* isomorphic to (m+1)-tuples over S, and the following function *) - (* establishes this isomorphism. We will then apply the theorem about *) - (* preimages to prove the desired result. *) - (*************************************************************************) - <2> DEFINE g(ss) == << [i \in 1..m |-> ss[i]], ss[m+1] >> - <2>2. 1 .. m+1 = 1..m \union {m+1} - OBVIOUS - <2>3. IsWellFoundedOn(PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)), - [1..m+1 -> S]) - <3>1. \A ss \in [1..m+1 -> S] : g(ss) \in [1..m -> S] \X S - BY <2>2 - <3> HIDE DEF g - <3>2. QED - BY <2>1, <3>1, PreImageWellFounded - <2>4. LPO(m+1) = PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) - <3>1. LPO(m+1) \subseteq PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) - <4> SUFFICES ASSUME NEW x \in [1..m+1 -> S], NEW y \in [1..m+1 -> S], - NEW j \in 1 .. m+1, - \A i \in 1..j-1 : x[i] = y[i], - <> \in R - PROVE <> \in PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) - BY Isa DEF LexProductOrdering - <4>1. \A i \in 1 .. j-1 : i \in 1 .. m - OBVIOUS - <4>2. << g(x), g(y) >> \in LexPairOrdering(LPO(m), R, [1..m -> S], S) - <5>1. CASE j \in 1..m - <6>1. << g(x)[1], g(y)[1] >> \in LPO(m) - BY <2>2, <4>1, <5>1 DEF LexProductOrdering - <6>2. QED - BY <6>1, <2>2 DEF LexPairOrdering - <5>2. CASE j = m+1 - <6>1. /\ g(x)[1] = g(y)[1] - /\ << g(x)[2], g(y)[2] >> \in R - BY <2>2, <5>2, IsaM("force") - <6>2. QED - BY <6>1, <2>2 DEF LexPairOrdering - <5>3. QED - BY <2>2, <5>1, <5>2 - <4>3. QED - BY <4>2 DEF PreImage - <3>2. PreImage(g, [1..m+1 -> S], LexPairOrdering(LPO(m), R, [1..m -> S], S)) \subseteq LPO(m+1) - <4> SUFFICES ASSUME NEW x \in [1..m+1 -> S], NEW y \in [1..m+1 -> S], - << g(x), g(y) >> \in LexPairOrdering(LPO(m), R, [1..m -> S], S) - PROVE <> \in LPO(m+1) - BY IsaM("auto") DEF PreImage - <4>1. CASE << g(x)[1], g(y)[1] >> \in LPO(m) - <5> HIDE DEF g - <5>1. PICK j \in 1..m : /\ \A i \in 1..j-1 : g(x)[1][i] = g(y)[1][i] - /\ << g(x)[1][j], g(y)[1][j] >> \in R - BY <4>1 DEF LexProductOrdering - <5>3. /\ g(x)[1][j] = x[j] - /\ \A i \in 1..j-1 : g(x)[1][i] = x[i] - /\ g(y)[1][j] = y[j] - /\ \A i \in 1..j-1 : g(y)[1][i] = y[i] - BY <2>2, SMT DEF g - <5> QED - BY <5>1, <5>3, <2>2 DEF LexProductOrdering - <4>2. CASE g(x)[1] = g(y)[1] /\ << g(x)[2], g(y)[2] >> \in R - <5>1. <> \in R - BY <4>2 - <5>2. \A i \in 1..m : /\ g(x)[1][i] = x[i] - /\ g(y)[1][i] = y[i] - OBVIOUS - <5>3. \A i \in 1..(m+1)-1 : x[i] = y[i] - BY <4>2, <5>2, IsaM("auto") - <5> QED - BY <5>1, <5>3 DEF LexProductOrdering - <4> QED - BY <4>1, <4>2 DEF LexPairOrdering - <3>3. QED - BY <3>1, <3>2 - <2> QED - BY <2>3, <2>4 -<1>3. \A m \in Nat : P(m) - BY <1>1, <1>2, NatInduction, IsaM("auto") -<1>4. QED - BY <1>3 - -============================================================================= -\* Modification History -\* Last modified Thu Feb 13 18:26:54 GMT-03:00 2014 by merz -\* Last modified Sun Jan 01 18:39:23 CET 2012 by merz -\* Last modified Wed Nov 23 10:13:18 PST 2011 by lamport diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/ref/CCV.tla b/x/ccv/provider/keyguard/prototyping/tla/library/ref/CCV.tla deleted file mode 100644 index 23ad1079d5..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/ref/CCV.tla +++ /dev/null @@ -1,550 +0,0 @@ ---------------------------- MODULE CCV --------------------------- -(* - * Modeling voting power relay between provider- and consumer chains in ICS. - * - * Simplifications: - * - We only track voting power, not bonded tokens - * - CCV channel creation is atomic and never fails/times out. - * - No new consumers join midway. - * - Block height is not modeled. - * - * Jure Kukovec, 2022 - *) - -EXTENDS Integers, Sequences, Apalache, typedefs - -CONSTANT - \* The set of all nodes, which may take on a validator role. - \* node \in Nodes is a validator <=> node \in DOMAIN votingPowerRunning - \* @type: Set($node); - Nodes, - \* The set of all consumer chains. Consumers may be removed - \* during execution, but not added. - \* @type: Set($chain); - ConsumerChains, - \* Time that needs to elapse, before a received VPC is considered - \* mature on a chain. - \* @type: $time; - UnbondingPeriod, - \* Time that needs to elapse, before a message is considered to have - \* timed out (resulting in the removal of the related consumer chain). - \* @type: $time; - Timeout, - \* Maximal time by which clocks are assumed to differ from the provider chain. - \* Since consumer chains don't communicate, we don't care about - \* drift between tow consumers (though it's implicitly less than MaxDrift, if - \* each differs from the provider chain by at most MaxDrift). - \* The specification doesn't force clocks to maintain bounded drift, - \* but the invariants are only verified in cases where clocks never drift too far. - \* @type: $time; - MaxDrift - -\* Provider chain only -VARIABLES - \* Snapshots of the voting power on the provider chain, at the times - \* when a VPC packet was sent. - \* t \in DOMAIN votingPowerHist <=> VPC packet sent at time t - \* @type: $packet -> $votingPowerOnChain; - votingPowerHist, - \* Current voting power on the provider chain. - \* @type: $votingPowerOnChain; - votingPowerRunning, - \* Current status for each consumer. May be one of: - \* Unused - Initializing - Active - Dropped - \* @type: $chain -> STATUS; - consumerStatus, - \* Set of chains live at the time a packet was sent (who are expected to reply) - \* @type: $packet -> Set($chain); - expectedResponders, - \* The set of MatureVSCPacket sent by consumer chains to the - \* provider chain. - \* @type: Set($matureVSCPacket); - maturePackets - -\* Consumer chains or both -VARIABLES - \* Representation of the current voting power, as understood by consumer chains. - \* Because consumer chains may not arbitrarily modify their own voting power, - \* but must instead update in accordance to VPC packets received from the - \* provider, it is sufficient to only track the last received packet. - \* The voting power on chain c is then equal to votingPowerHist[votingPowerReferences[c]]. - \* @type: $chain -> $time; - votingPowerReferences, - \* The queues of VPC packets, waiting to be received by consumer chains. - \* Note that a packet being placed in the channel is not considered - \* received by the consumer, until the receive-action is taken. - \* @type: $chain -> Seq($packet); - ccvChannelsPending, - \* The queues of VPC packets, that have been received by consumer chains in the past. - \* @type: $chain -> Seq($packet); - ccvChannelsResolved, - \* The current times of all chains (including the provider). - \* @type: $chain -> $time; - currentTimes, - \* Bookkeeping of maturity times for received packets. - \* A consumer may only send a MatureVSCPacket (i.e. notify the provider) after - \* its local time exceeds the time designated in maturityTimes. - \* For each consumer chain c, and VSC packet t sent by the provider, - \* a) t \in DOMAIN maturityTimes[c] <=> c has received packet t - \* b) if t \in DOMAIN maturityTimes[c], then maturity for t on c is - \* guarded by currentTimes[c] >= maturityTimes[c][t] - \* @type: $chain -> $packet -> $time; - maturityTimes - -\* Bookkeeping -VARIABLES - \* Name of last action, for debugging - \* @type: Str; - lastAction, - \* VPC flag; Voting power may be considered to have changed, even if - \* the (TLA) value of votingPowerRunning does not (for example, due to a sequence - \* of delegations and un-delegations, with a net 0 change in voting power). - \* We use this flag to determine whether it is necessary to send a VPC packet. - \* @type: Bool; - votingPowerHasChanged, - \* Invariant flag, TRUE iff clocks never drifted too much - \* @type: Bool; - boundedDrift - -\* Helper tuples for UNCHANGED syntax -\* We don't track consumerStatus and lastAction in var tuples, because -\* they change each round. - -providerVars == - << votingPowerHist, votingPowerRunning, expectedResponders, maturePackets >> - -consumerVars == - << votingPowerReferences, ccvChannelsPending, ccvChannelsResolved, currentTimes, maturityTimes >> - -\* @type: <>; -bookkeepingVars == - << votingPowerHasChanged, boundedDrift >> - - -(*** NON-ACTION DEFINITIONS ***) - -Unused == "Unused_OF_STATUS" -Initializing == "Initializing_OF_STATUS" -Active == "Active_OF_STATUS" -Dropped == "Dropped_OF_STATUS" - -Status == { Unused, Initializing, Active, Dropped } - -ActiveConsumers == {c \in ConsumerChains: consumerStatus[c] = Active } -InitializingConsumers == {c \in ConsumerChains: consumerStatus[c] = Initializing } -LiveConsumers == ActiveConsumers \union InitializingConsumers -LiveNext == { c \in ConsumerChains: consumerStatus'[c] \in {Initializing, Active} } -InactiveConsumers == {c \in ConsumerChains: consumerStatus[c] \in {Unused, Dropped}} - -\* Some value not in Nat, for initialization -UndefinedTime == -1 - -\* Provider chain ID, assumed to be distinct from all consumer chain IDs -ProviderChain == "provider_OF_C" - -\* Some value not in [Nodes -> Nat], for initialization -UndefinedPower == [node \in Nodes |-> -1] - -\* All chains, including the provider. Used for the domain of shared -\* variables, e.g. currentTimes -Chains == ConsumerChains \union {ProviderChain} - -\* According to https://github.com/cosmos/ibc/blob/main/spec/core/ics-004-channel-and-packet-semantics/README.md#receiving-packets -\* we need to use >=. -TimeoutGuard(a,b) == a >= b - -\* @type: (Seq($packet), $time) => Bool; -TimeoutOnReception(channel, consumerT) == - /\ Len(channel) /= 0 - \* Head is always the oldest packet, so if there is a timeout for some packet, - \* there must be one for Head too - /\ TimeoutGuard(consumerT, Head(channel) + Timeout) - - -\* @type: ($chain, $time, $packet -> $time) => Bool; -TimeoutOnMaturity(c, providerT, maturity) == - \E packet \in DOMAIN maturity: - \* Note: Reception time = maturity[packet] - UnbondingPeriod - /\ TimeoutGuard(providerT + UnbondingPeriod, maturity[packet] + Timeout) - \* Not yet matured - /\ \A matureVSCPacket \in maturePackets: - \/ matureVSCPacket.chain /= c - \/ matureVSCPacket.packetTime /= packet - -\* Takes parameters, so primed and non-primed values can be passed -\* @type: ($chain, Seq($packet), $time, $time, $packet -> $time) => Bool; -PacketTimeoutForConsumer(c, channel, consumerT, providerT, maturity) == - \* Option 1: Timeout on reception - \/ TimeoutOnReception(channel, consumerT) - \* Option 2: Timeout on maturity - \/ TimeoutOnMaturity(c, providerT, maturity) - -\* Because we're not using functions with fixed domains, we can't use EXCEPT. -\* Thus, we need a helper method for domain-extension. -\* @type: (a -> b, a, b) => a -> b; -ExtendFnBy(f, k, v) == - [ - x \in DOMAIN f \union {k} |-> - IF x = k - THEN v - ELSE f[x] - ] - -\* Packets are set at unique times, monotonically increasing, the last -\* one is just the max in the votingPowerHist domain. -LastPacketTime == - LET Max2(a,b) == IF a >= b THEN a ELSE b IN - ApaFoldSet(Max2, -1, DOMAIN votingPowerHist) - -\* @type: ($chain, $packet, $time) => $matureVSCPacket; -MatureVSCPacket(c, packet, matT) == - [chain |-> c, packet |-> packet, maturityTime |-> matT] - -\* @type: (Int, Int) => Int; -Delta(a,b) == IF a > b THEN a - b ELSE b - a - -\* @type: (a -> Int, Set(a), Int) => Bool; -BoundedDeltas(fn, dom, bound) == - /\ dom \subseteq DOMAIN fn - /\ \A v1, v2 \in dom: - Delta(fn[v1], fn[v2]) <= bound - -\* All the packets ever sent to c in the order they were sent in -\* @type: ($chain) => Seq($packet); -PacketOrder(c) == ccvChannelsResolved[c] \o ccvChannelsPending[c] - -\* @type: $packet => Set($chain); -RemainingResponders(t) == - { c \in expectedResponders[t]: consumerStatus[c] /= Dropped } - -(*** ACTIONS ***) - -Init == - /\ votingPowerHist = [t \in {} |-> UndefinedPower] - /\ \E initValidators \in SUBSET Nodes: - /\ initValidators /= {} - /\ votingPowerRunning \in [initValidators -> Nat] - /\ \A v \in initValidators: votingPowerRunning[v] > 0 - /\ consumerStatus \in [ConsumerChains -> Status] - /\ expectedResponders = [t \in {} |-> {}] - /\ maturePackets = {} - /\ votingPowerReferences = [chain \in ConsumerChains |-> UndefinedTime] - /\ ccvChannelsPending = [chain \in ConsumerChains |-> <<>>] - /\ ccvChannelsResolved = [chain \in ConsumerChains |-> <<>>] - /\ currentTimes = [c \in Chains |-> 0] - /\ maturityTimes = [c \in ConsumerChains |-> [t \in {} |-> UndefinedTime]] - /\ votingPowerHasChanged = FALSE - /\ boundedDrift = TRUE - /\ lastAction = "Init" - -\* We combine all (un)delegate actions, as well as (un)bonding actions into an -\* abstract VotingPowerChange. -\* Since VPC packets are sent at most once at the end of each block, -\* the granularity wouldn't have added value to the model. -VotingPowerChange == - \E newValidators \in SUBSET Nodes: - /\ newValidators /= {} - /\ votingPowerRunning' \in [newValidators -> Nat] - /\ \A v \in newValidators: votingPowerRunning'[v] > 0 - \* Note: votingPowerHasChanged' is set to true - \* even if votingPowerRunning' = votingPowerRunning - /\ votingPowerHasChanged' = TRUE - /\ UNCHANGED consumerVars - /\ UNCHANGED << votingPowerHist, expectedResponders, maturePackets >> - /\ lastAction' = "VotingPowerChange" - -RcvPacket == - \E c \in ActiveConsumers: - \* There must be a packet to be received - /\ Len(ccvChannelsPending[c]) /= 0 - /\ LET packet == Head(ccvChannelsPending[c]) IN - \* The voting power adjusts immediately, but the matureVSCPacket - \* is sent later, on maturity - /\ votingPowerReferences' = [votingPowerReferences EXCEPT ![c] = packet] - \* Maturity happens after UnbondingPeriod time has elapsed on c - /\ maturityTimes' = [ - maturityTimes EXCEPT ![c] = - ExtendFnBy(maturityTimes[c], packet, currentTimes[c] + UnbondingPeriod) - ] - /\ ccvChannelsResolved' = [ccvChannelsResolved EXCEPT ![c] = Append(@, packet)] - \* Drop from channel, to unblock reception of other packets. - /\ ccvChannelsPending' = [ccvChannelsPending EXCEPT ![c] = Tail(@)] - /\ UNCHANGED providerVars - /\ UNCHANGED currentTimes - /\ UNCHANGED votingPowerHasChanged - /\ lastAction' = "RcvPacket" - -SendMatureVSCPacket == - \E c \in ActiveConsumers: - \* Has been received - \E packet \in DOMAIN maturityTimes[c]: - \* Has matured - /\ currentTimes[c] >= maturityTimes[c][packet] - \* Hasn't been sent before - /\ \A matureVSCPacket \in maturePackets: - \/ matureVSCPacket.chain /= c - \/ matureVSCPacket.packetTime /= packet - /\ maturePackets' = maturePackets \union { MatureVSCPacket(c, packet, currentTimes[c]) } - /\ UNCHANGED consumerVars - /\ UNCHANGED << votingPowerHist, votingPowerRunning, expectedResponders >> - /\ UNCHANGED votingPowerHasChanged - /\ lastAction' = "SendMatureVSCPacket" - -\* Partial action, always happens on Next -\* Each consumer status advances (or is unchanged) in the -\* Unused -> Initializing -> Active -> Dropped order -PromoteConsumers == - \E newStatus \in [ConsumerChains -> Status]: - /\ \A c \in ConsumerChains: - \* No regressions - /\ consumerStatus[c] = Initializing => newStatus[c] /= Unused - /\ consumerStatus[c] = Active => newStatus[c] \in {Active, Dropped} - /\ consumerStatus[c] = Dropped => newStatus[c] = Dropped - \* All timed out chains are dropped - \* Only ACTIVE chains may time out (not Initializing) - /\ ( /\ consumerStatus[c] = Active - /\ PacketTimeoutForConsumer(c, ccvChannelsPending'[c], currentTimes'[c], currentTimes'[ProviderChain], maturityTimes'[c]) - ) => consumerStatus[c] = Dropped - /\ consumerStatus' = newStatus - - -\* Partial action, always happens on EndBlock, may also happen independently -AdvanceTimeCore == - \E newTimes \in [Chains -> Nat]: - \* None regress - \* Does not guarantee strict time progression in AdvanceTime. - \* In EndProviderBlockAndSendPacket, provider time is forced - \* to strictly progress with an additional constraint. - /\ \A c \in Chains: newTimes[c] >= currentTimes[c] - /\ currentTimes' = newTimes - -\* Time may also elapse without EndProviderBlockAndSendPacket. -AdvanceTime == - /\ AdvanceTimeCore - /\ UNCHANGED providerVars - /\ UNCHANGED << votingPowerReferences, ccvChannelsPending, ccvChannelsResolved, maturityTimes >> - /\ UNCHANGED votingPowerHasChanged - /\ lastAction' = "AdvanceTime" - -EndProviderBlockAndSendPacket == - \* Packets are only sent if there is a VPC - /\ votingPowerHasChanged - /\ LET packet == currentTimes[ProviderChain] IN - /\ ccvChannelsPending' = - [ - chain \in ConsumerChains |-> Append( - ccvChannelsPending[chain], - \* a packet is just the current time, the VP can be read from votingPowerHist - currentTimes[ProviderChain] - ) - ] - /\ votingPowerHist' = ExtendFnBy(votingPowerHist, packet, votingPowerRunning) - \* All currently live (= Active or Init) consumers are expected to respond to this packet - /\ expectedResponders' = ExtendFnBy(expectedResponders, packet, LiveConsumers) - \* Reset flag for next block - /\ votingPowerHasChanged' = FALSE - \* packet sending forces time progression on provider - /\ AdvanceTimeCore - /\ currentTimes'[ProviderChain] > currentTimes[ProviderChain] - /\ UNCHANGED <> - /\ UNCHANGED <> - /\ lastAction' = "EndProviderBlockAndSendPacket" - -Next == - /\\/ EndProviderBlockAndSendPacket - \/ VotingPowerChange - \/ RcvPacket - \/ SendMatureVSCPacket - \/ AdvanceTime - \* Drop timed out, possibly promote rest - /\ PromoteConsumers - /\ boundedDrift' = boundedDrift /\ - BoundedDeltas(currentTimes', LiveNext \union {ProviderChain}, MaxDrift) - -(*** PROPERTIES/INVARIANTS ***) - -\* VCS must also mature on provider -LastVCSMatureOnProvider == - LastPacketTime + UnbondingPeriod <= currentTimes[ProviderChain] - -VPCUpdateInProgress == - \* some chain has pending packets - \/ \E c \in LiveConsumers: - \/ Len(ccvChannelsPending[c]) /= 0 - \/ \E packet \in DOMAIN maturityTimes[c]: maturityTimes[c][packet] < currentTimes[c] - \* not enough time has elapsed on provider itself since last update - \/ ~LastVCSMatureOnProvider - -LiveConsumersNotTimedOut == - \A c \in LiveConsumers: - ~PacketTimeoutForConsumer(c, ccvChannelsPending[c], currentTimes[c], currentTimes[ProviderChain], maturityTimes[c]) - -\* Sanity- predicates check that the data structures don't take on unexpected values -SanityVP == - /\ \A t \in DOMAIN votingPowerHist: - LET VP == votingPowerHist[t] IN - VP /= UndefinedPower <=> - \A node \in DOMAIN VP: VP[node] >= 0 - /\ \A node \in DOMAIN votingPowerRunning: votingPowerRunning[node] >= 0 - -SanityRefs == - \A c \in ConsumerChains: - votingPowerReferences[c] < 0 <=> votingPowerReferences[c] = UndefinedTime - -SanityMaturity == - \A c \in ConsumerChains: - \A t \in DOMAIN maturityTimes[c]: - LET mt == maturityTimes[c][t] IN - mt < 0 <=> mt = UndefinedTime - -Sanity == - /\ SanityVP - /\ SanityRefs - /\ SanityMaturity - - -\* Since the clocks may drift, any delay that exceeds -\* Timeout + MaxDrift is perceived as timeout on all chains -AdjustedTimeout == Timeout + MaxDrift - -\* Any packet sent by the provider is either received within Timeout, or -\* the consumer chain is no longer considered active. -ReceptionBeforeTimeout == - \A t \in DOMAIN votingPowerHist: - \A c \in RemainingResponders(t): - \* If c is still live after Timeout has elapsed from packet t broadcast ... - TimeoutGuard(currentTimes[c], t + AdjustedTimeout) => - \* ... then c must have received packet t - t \in DOMAIN maturityTimes[c] - -\* Any packet received by the consumer matures within Timeout of reception, -\* or the consumer is no longer considered active. -MaturedBeforeTimeout == - \A t \in DOMAIN votingPowerHist: - \A c \in RemainingResponders(t): - t \in DOMAIN maturityTimes[c] => - \* If c is still active after Timeout has elapsed from packet t reception ... - \* Note: Reception time = maturityTimes[c][p] - UnbondingPeriod - ( - TimeoutGuard(currentTimes[ProviderChain] + UnbondingPeriod, maturityTimes[c][t] + AdjustedTimeout) => - \* ... then packet t must have matured on c - \E matureVSCPacket \in maturePackets: - /\ matureVSCPacket.chain = c - /\ matureVSCPacket.packetTime = t - ) - - -\* All packets mature at the latest by Timeout, from all -\* active consumers (or those consumers are removed from the active set) -\* It should be the case that ReceptionBeforeTimeout /\ MaturedBeforeTimeout => EventuallyMatureOnProvider -EventuallyMatureOnProvider == - \A t \in DOMAIN votingPowerHist: - \* If a packet was sent at time t and enough time has elapsed, - \* s.t. all consumers should have responded ... - TimeoutGuard(currentTimes[ProviderChain], t + 2 * AdjustedTimeout) => - \* then, all consumers have matured - \A c \in RemainingResponders(t): - \E matureVSCPacket \in maturePackets: - /\ matureVSCPacket.chain = c - /\ matureVSCPacket.packetTime = t - - -\* Invariants from https://github.com/cosmos/interchain-security/blob/main/docs/quality_assurance.md - -(* -4.10 - The provider chain's correctness is not affected by a consumer chain -shutting down - -What is "provider chain correctness"? -*) - -(* -4.11 - The provider chain can graciously handle a CCV packet timing out -(without shutting down) - expected outcome: -consumer chain shuts down and its state in provider CCV module is removed -*) -Inv411 == - boundedDrift => - \A c \in ConsumerChains: - TimeoutOnReception(ccvChannelsPending[c], currentTimes[c]) => - c \notin LiveConsumers - -(* -4.12 - The provider chain can graciously handle a StopConsumerChainProposal - -expected outcome: consumer chain shuts down and its state -in provider CCV module is removed. - -What is "graciously handle"? -*) - -(* -6.01 - Every validator set on any consumer chain MUST either be or have been -a validator set on the provider chain. - -In the current model, implicit through construction (votingPowerReferences) -*) -Inv601 == - \A c \in LiveConsumers: - LET ref == votingPowerReferences[c] IN - ref /= UndefinedTime => ref \in DOMAIN votingPowerHist - -(* -6.02 - Any update in the power of a validator val on the provider, as a result of -- (increase) Delegate() / Redelegate() to val -- (increase) val joining the provider validator set -- (decrease) Undelegate() / Redelegate() from val -- (decrease) Slash(val) -- (decrease) val leaving the provider validator set -MUST be present in a ValidatorSetChangePacket that is sent to all registered consumer chains -*) -Inv602 == - \A packet \in DOMAIN votingPowerHist: - \A c \in LiveConsumers: - LET packetsToC == PacketOrder(c) IN - \E i \in DOMAIN packetsToC: - packetsToC[i] = packet - -(* -6.03 - Every consumer chain receives the same sequence of -ValidatorSetChangePackets in the same order. - -Note: consider only prefixes on received packets (ccvChannelsResolved) -*) -Inv603 == - \A c1,c2 \in LiveConsumers: - \A i \in (DOMAIN ccvChannelsResolved[c1] \intersect DOMAIN ccvChannelsResolved[c2]): - ccvChannelsResolved[c1][i] = ccvChannelsResolved[c2][i] - -(* -7.01 - For every ValidatorSetChangePacket received by a consumer chain at -time t, a MaturedVSCPacket is sent back to the provider in the first block -with a timestamp >= t + UnbondingPeriod - -Modification: not necessarily _first_ block with that timestamp, -since we don't model height _and_ time. -*) -Inv701 == - boundedDrift => MaturedBeforeTimeout - -(* -7.02 - If an unbonding operation resulted in a ValidatorSetChangePacket sent -to all registered consumer chains, then it cannot complete before receiving -matching MaturedVSCPackets from these consumer chains -(unless some of these consumer chains are removed) - -We can define change completion, but we don't model it. Best approximation: -*) -Inv702 == - boundedDrift => EventuallyMatureOnProvider - -Inv == - \* /\ Sanity - \* /\ LiveConsumersNotTimedOut - /\ (boundedDrift => - /\ ReceptionBeforeTimeout - /\ MaturedBeforeTimeout - ) - - - -============================================================================= \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/ref/MC_CCV.tla b/x/ccv/provider/keyguard/prototyping/tla/library/ref/MC_CCV.tla deleted file mode 100644 index 40f9bb2177..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/ref/MC_CCV.tla +++ /dev/null @@ -1,62 +0,0 @@ ---------------------------- MODULE MC_CCV --------------------------- - -EXTENDS Integers - -Nodes == {"1_OF_N", "2_OF_N", "3_OF_N", "4_OF_N"} -ConsumerChains == {"1_OF_C", "2_OF_C", "3_OF_C", "4_OF_C"} -\* UnbondingPeriod == 3 * 7 * 24 \* h -\* Timeout == 4 * 7 * 24 \* h -\* MaxDrift == 24 \* h - -CONSTANT - \* @type: $time; - UnbondingPeriod, - \* @type: $time; - Timeout, - \* @type: $time; - MaxDrift - -CInit == - /\ UnbondingPeriod \in Nat - /\ Timeout \in Nat - /\ MaxDrift \in Nat - /\ MaxDrift < Timeout - -\* Provider chain only -VARIABLES - \* @type: $time -> $votingPowerOnChain; - votingPowerHist, - \* @type: $votingPowerOnChain; - votingPowerRunning, - \* @type: $chain -> STATUS; - consumerStatus, - \* @type: $packet -> Set($chain); - expectedResponders, - \* @type: Set($matureVSCPacket); - maturePackets - -\* Consumer chains or both -VARIABLES - \* @type: $chain -> $time; - votingPowerReferences, - \* @type: $chain -> Seq($packet); - ccvChannelsPending, - \* @type: $chain -> Seq($packet); - ccvChannelsResolved, - \* @type: $chain -> $time; - currentTimes, - \* @type: $chain -> $time -> $time; - maturityTimes - -\* Bookkeeping -VARIABLES - \* @type: Str; - lastAction, - \* @type: Bool; - votingPowerHasChanged, - \* @type: Bool; - boundedDrift - -INSTANCE CCV - -============================================================================= \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/ref/typedefs.tla b/x/ccv/provider/keyguard/prototyping/tla/library/ref/typedefs.tla deleted file mode 100644 index 62e9ea7501..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/ref/typedefs.tla +++ /dev/null @@ -1,12 +0,0 @@ ---------------------------- MODULE typedefs --------------------------- -(* - @typeAlias: chain = C; chain type - @typeAlias: node = N; node type - @typeAlias: power = Int; voting power - @typeAlias: time = Int; - @typeAlias: votingPowerOnChain = $node -> $power; - @typeAlias: packet = $time; - @typeAlias: matureVSCPacket = [chain: $chain, packet: $packet, maturityTime: $time]; -*) -AliasesCVV == TRUE -============================================================================= \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/tla/library/tlcFolds.tla b/x/ccv/provider/keyguard/prototyping/tla/library/tlcFolds.tla deleted file mode 100644 index d7dc1107ec..0000000000 --- a/x/ccv/provider/keyguard/prototyping/tla/library/tlcFolds.tla +++ /dev/null @@ -1,27 +0,0 @@ - ----- MODULE tlcFolds ---- - -EXTENDS Integers, FiniteSets, Sequences - -(*****************************************************************************) -(* The folding operator, used to implement computation over a set. *) -(* Apalache implements a more efficient encoding than the one below. *) -(* (from the community modules). *) -(*****************************************************************************) -RECURSIVE FoldSet(_,_,_) -FoldSet( Op(_,_), v, S ) == IF S = {} - THEN v - ELSE LET w == CHOOSE x \in S: TRUE - IN LET T == S \ {w} - IN FoldSet( Op, Op(v,w), T ) - -(*****************************************************************************) -(* The folding operator, used to implement computation over a sequence. *) -(* Apalache implements a more efficient encoding than the one below. *) -(* (from the community modules). *) -(*****************************************************************************) -RECURSIVE FoldSeq(_,_,_) -FoldSeq( Op(_,_), v, seq ) == IF seq = <<>> - THEN v - ELSE FoldSeq( Op, Op(v,Head(seq)), Tail(seq) ) -==== diff --git a/x/ccv/provider/keyguard/prototyping/ts/.eslintignore b/x/ccv/provider/keyguard/prototyping/ts/.eslintignore deleted file mode 100644 index fc40c5a94d..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -/**/*.js diff --git a/x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json b/x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json deleted file mode 100644 index 239cad64e9..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/.eslintrc.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "env": { - "browser": false, - "es6": true, - "node": true - }, - "parser": "@typescript-eslint/parser", - "parserOptions": { - "project": "tsconfig.json", - "sourceType": "module", - "ecmaVersion": 2020 - }, - "plugins": [ - "@typescript-eslint", - "jest" - ], - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/eslint-recommended", - "plugin:@typescript-eslint/recommended", - "plugin:jest/recommended", - "prettier" - ], - "rules": { - "@typescript-eslint/no-var-requires": 0 - } -} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/.gitignore b/x/ccv/provider/keyguard/prototyping/ts/.gitignore deleted file mode 100644 index 48c9c9e654..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/.gitignore +++ /dev/null @@ -1,31 +0,0 @@ -# Logs -logs -*.log -npm-debug.log* - -# Dependencies -node_modules/ - -# Coverage -coverage - -# Transpiled files -build/ - -# VS Code -.vscode -!.vscode/tasks.js - -# JetBrains IDEs -.idea/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Misc -.DS_Store - -traces/ \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/.prettierrc b/x/ccv/provider/keyguard/prototyping/ts/.prettierrc deleted file mode 100644 index f17b0543ee..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/.prettierrc +++ /dev/null @@ -1,17 +0,0 @@ -{ - "singleQuote": true, - "trailingComma": "all", - "overrides": [ - { - "files": [ - "*.ts", - "*.mts" - ], - "options": { - "parser": "typescript" - } - } - ], - "tabWidth": 2, - "printWidth": 74 -} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts b/x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts deleted file mode 100644 index 5bc75b7fdf..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/__tests__/gen.test.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { gen } from '../src/main.js'; - -/** - * This test is useful to check how much coverage - * trace generation actually gets over the model. - * - * yarn jest --collect-coverage - */ -describe('check properties', () => { - it('_', () => { - gen(120, true); - expect(true).toBeTruthy(); // satisfies linter - }); -}); diff --git a/x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json b/x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json deleted file mode 100644 index 734ac11619..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/__tests__/tsconfig.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2022", - "module": "node16", - "lib": [ - "ES2022" - ], - "moduleResolution": "Node16", - "rootDir": "..", - "outDir": "build", - "allowSyntheticDefaultImports": true, - "importHelpers": true, - "alwaysStrict": true, - "sourceMap": true, - "forceConsistentCasingInFileNames": true, - "noFallthroughCasesInSwitch": true, - "noImplicitReturns": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "noImplicitAny": true, - "noImplicitThis": true, - "strictNullChecks": true, - "allowJs": true, - "esModuleInterop": true - }, - "include": [ - "src/**/*", - "__tests__/**/*" - ] -} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/jest.config.js b/x/ccv/provider/keyguard/prototyping/ts/jest.config.js deleted file mode 100644 index 2d2c1ef1d3..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/jest.config.js +++ /dev/null @@ -1,24 +0,0 @@ -export default { - testEnvironment: 'node', - preset: 'ts-jest/presets/js-with-ts-esm', - globals: { - 'ts-jest': { - useESM: true, - tsconfig: '/__tests__/tsconfig.json', - }, - }, - transformIgnorePatterns: [ - "node_modules/(?!(time-span|convert-hrtime))", - ], - moduleNameMapper: { - '^(\\.{1,2}/.*)\\.(m)?js$': '$1', - }, - testRegex: '(/__tests__/.*|(\\.|/)(test|spec))\\.(m)?ts$', - coverageDirectory: 'coverage', - collectCoverageFrom: [ - 'src/**/*.ts', - 'src/**/*.mts', - '!src/**/*.d.ts', - '!src/**/*.d.mts', - ], -}; diff --git a/x/ccv/provider/keyguard/prototyping/ts/package.json b/x/ccv/provider/keyguard/prototyping/ts/package.json deleted file mode 100644 index cb129e4ce2..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "diff-tests-core", - "version": "3.0.1", - "description": "A model for generating difference tests for Interchain Security core protocol components.", - "type": "module", - "engines": { - "node": ">= 16.13 <17" - }, - "devDependencies": { - "@types/jest": "^28.1.4", - "@types/node": "~16", - "@typescript-eslint/eslint-plugin": "~5.26", - "@typescript-eslint/parser": "~5.26", - "eslint": "~8.16", - "eslint-config-prettier": "~8.5", - "eslint-plugin-jest": "~26.2", - "jest": "^28.1.1", - "prettier": "~2.6", - "rimraf": "~3.0", - "source-map-support": "^0.5.21", - "ts-jest": "^28.0.5", - "tsutils": "~3.21", - "typescript": "~4.7" - }, - "scripts": { - "start": "node build/src/main.js", - "clean": "rimraf coverage build tmp", - "prebuild": "npm run lint", - "build": "tsc -p tsconfig.json", - "build:watch": "tsc -w -p tsconfig.json", - "build:release": "npm run clean && tsc -p tsconfig.release.json", - "lint": "eslint . --ext .ts --ext .mts", - "test": "jest --coverage", - "prettier": "prettier --config .prettierrc --write .", - "test:watch": "jest --watch" - }, - "author": "", - "license": "Apache-2.0", - "dependencies": { - "@types/clone-deep": "^4.0.1", - "@types/underscore": "^1.11.4", - "clone-deep": "^4.0.1", - "time-span": "^5.1.0", - "tslib": "~2.4", - "underscore": "^1.13.4" - }, - "volta": { - "node": "16.13.0" - } -} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/src/main.ts b/x/ccv/provider/keyguard/prototyping/ts/src/main.ts deleted file mode 100644 index 4fb0c2d54e..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/src/main.ts +++ /dev/null @@ -1,23 +0,0 @@ -import * as fs from 'fs'; -import _ from 'underscore'; -import timeSpan from 'time-span'; -import cloneDeep from 'clone-deep'; - -class Model { - constructor() {} - - endBlock = () => { - /** - * EndBlock can cause a complete change in the validator set - * and the powers of the validators - * - */ - }; -} - -function main() { - const m = new Model(); -} - -console.log(`Running main`); -main(); diff --git a/x/ccv/provider/keyguard/prototyping/ts/tsconfig.json b/x/ccv/provider/keyguard/prototyping/ts/tsconfig.json deleted file mode 100644 index 2a6e221b27..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/tsconfig.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2022", - "module": "node16", - "lib": [ - "ES2022" - ], - "moduleResolution": "Node16", - "rootDir": ".", - "outDir": "build", - "allowSyntheticDefaultImports": true, - "importHelpers": true, - "alwaysStrict": true, - "sourceMap": true, - "forceConsistentCasingInFileNames": true, - "noFallthroughCasesInSwitch": true, - "noImplicitReturns": true, - "noUnusedLocals": true, - "noUnusedParameters": true, - "noImplicitAny": true, - "noImplicitThis": true, - "strictNullChecks": true, - }, - "include": [ - "src/**/*", - "__tests__/**/*" - ] -} \ No newline at end of file diff --git a/x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json b/x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json deleted file mode 100644 index f08638c215..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/tsconfig.release.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "sourceMap": false, - "removeComments": true - }, - "include": ["src/**/*"] -} diff --git a/x/ccv/provider/keyguard/prototyping/ts/yarn.lock b/x/ccv/provider/keyguard/prototyping/ts/yarn.lock deleted file mode 100644 index 77abc940b0..0000000000 --- a/x/ccv/provider/keyguard/prototyping/ts/yarn.lock +++ /dev/null @@ -1,2862 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@ampproject/remapping@^2.1.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" - integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== - dependencies: - "@jridgewell/gen-mapping" "^0.1.0" - "@jridgewell/trace-mapping" "^0.3.9" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" - integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== - dependencies: - "@babel/highlight" "^7.18.6" - -"@babel/compat-data@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.6.tgz#8b37d24e88e8e21c499d4328db80577d8882fa53" - integrity sha512-tzulrgDT0QD6U7BJ4TKVk2SDDg7wlP39P9yAx1RfLy7vP/7rsDRlWVfbWxElslu56+r7QOhB2NSDsabYYruoZQ== - -"@babel/core@^7.11.6", "@babel/core@^7.12.3": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.6.tgz#54a107a3c298aee3fe5e1947a6464b9b6faca03d" - integrity sha512-cQbWBpxcbbs/IUredIPkHiAGULLV8iwgNRMFzvbhEXISp4f3rUUXE5+TIw6KwUWUR3DwyI6gmBRnmAtYaWehwQ== - dependencies: - "@ampproject/remapping" "^2.1.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.18.6" - "@babel/helper-compilation-targets" "^7.18.6" - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helpers" "^7.18.6" - "@babel/parser" "^7.18.6" - "@babel/template" "^7.18.6" - "@babel/traverse" "^7.18.6" - "@babel/types" "^7.18.6" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.1" - semver "^6.3.0" - -"@babel/generator@^7.18.6", "@babel/generator@^7.7.2": - version "7.18.7" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.7.tgz#2aa78da3c05aadfc82dbac16c99552fc802284bd" - integrity sha512-shck+7VLlY72a2w9c3zYWuE1pwOKEiQHV7GTUbSnhyl5eu3i04t30tBY82ZRWrDfo3gkakCFtevExnxbkf2a3A== - dependencies: - "@babel/types" "^7.18.7" - "@jridgewell/gen-mapping" "^0.3.2" - jsesc "^2.5.1" - -"@babel/helper-compilation-targets@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.6.tgz#18d35bfb9f83b1293c22c55b3d576c1315b6ed96" - integrity sha512-vFjbfhNCzqdeAtZflUFrG5YIFqGTqsctrtkZ1D/NB0mDW9TwW3GmmUepYY4G9wCET5rY5ugz4OGTcLd614IzQg== - dependencies: - "@babel/compat-data" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - browserslist "^4.20.2" - semver "^6.3.0" - -"@babel/helper-environment-visitor@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.6.tgz#b7eee2b5b9d70602e59d1a6cad7dd24de7ca6cd7" - integrity sha512-8n6gSfn2baOY+qlp+VSzsosjCVGFqWKmDF0cCWOybh52Dw3SEyoWR1KrhMJASjLwIEkkAufZ0xvr+SxLHSpy2Q== - -"@babel/helper-function-name@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.18.6.tgz#8334fecb0afba66e6d87a7e8c6bb7fed79926b83" - integrity sha512-0mWMxV1aC97dhjCah5U5Ua7668r5ZmSC2DLfH2EZnf9c3/dHZKiFa5pRLMH5tjSl471tY6496ZWk/kjNONBxhw== - dependencies: - "@babel/template" "^7.18.6" - "@babel/types" "^7.18.6" - -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-module-imports@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" - integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-module-transforms@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.6.tgz#57e3ca669e273d55c3cda55e6ebf552f37f483c8" - integrity sha512-L//phhB4al5uucwzlimruukHB3jRd5JGClwRMD/ROrVjXfLqovYnvQrK/JK36WYyVwGGO7OD3kMyVTjx+WVPhw== - dependencies: - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.18.6" - "@babel/template" "^7.18.6" - "@babel/traverse" "^7.18.6" - "@babel/types" "^7.18.6" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.8.0": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.6.tgz#9448974dd4fb1d80fefe72e8a0af37809cd30d6d" - integrity sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg== - -"@babel/helper-simple-access@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" - integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-validator-identifier@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz#9c97e30d31b2b8c72a1d08984f2ca9b574d7a076" - integrity sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g== - -"@babel/helper-validator-option@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" - integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== - -"@babel/helpers@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.6.tgz#4c966140eaa1fcaa3d5a8c09d7db61077d4debfd" - integrity sha512-vzSiiqbQOghPngUYt/zWGvK3LAsPhz55vc9XNN0xAl2gV4ieShI2OQli5duxWHD+72PZPTKAcfcZDE1Cwc5zsQ== - dependencies: - "@babel/template" "^7.18.6" - "@babel/traverse" "^7.18.6" - "@babel/types" "^7.18.6" - -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/parser@^7.1.0", "@babel/parser@^7.14.7", "@babel/parser@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.6.tgz#845338edecad65ebffef058d3be851f1d28a63bc" - integrity sha512-uQVSa9jJUe/G/304lXspfWVpKpK4euFLgGiMQFOCpM/bgcAdeoHwi/OQz23O9GK2osz26ZiXRRV9aV+Yl1O8tw== - -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.8.3": - version "7.12.13" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz#b5c987274c4a3a82b89714796931a6b53544ae10" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== - dependencies: - "@babel/helper-plugin-utils" "^7.12.13" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-top-level-await@^7.8.3": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz#c1cfdadc35a646240001f06138247b741c34d94c" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== - dependencies: - "@babel/helper-plugin-utils" "^7.14.5" - -"@babel/plugin-syntax-typescript@^7.7.2": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz#1c09cd25795c7c2b8a4ba9ae49394576d4133285" - integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/template@^7.18.6", "@babel/template@^7.3.3": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.6.tgz#1283f4993e00b929d6e2d3c72fdc9168a2977a31" - integrity sha512-JoDWzPe+wgBsTTgdnIma3iHNFC7YVJoPssVBDjiHfNlyt4YcunDtcDOUmfVDfCK5MfdsaIoX9PkijPhjH3nYUw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.18.6" - "@babel/types" "^7.18.6" - -"@babel/traverse@^7.18.6", "@babel/traverse@^7.7.2": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.6.tgz#a228562d2f46e89258efa4ddd0416942e2fd671d" - integrity sha512-zS/OKyqmD7lslOtFqbscH6gMLFYOfG1YPqCKfAW5KrTeolKqvB8UelR49Fpr6y93kYkW2Ik00mT1LOGiAGvizw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-function-name" "^7.18.6" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.18.6" - "@babel/types" "^7.18.6" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/types@^7.0.0", "@babel/types@^7.18.6", "@babel/types@^7.18.7", "@babel/types@^7.3.0", "@babel/types@^7.3.3": - version "7.18.7" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.7.tgz#a4a2c910c15040ea52cdd1ddb1614a65c8041726" - integrity sha512-QG3yxTcTIBoAcQmkCs+wAPYZhu7Dk9rXKacINfNbdJDNERTbLQbHGyVG8q/YGMPeCJRIhSY0+fTc5+xuh6WPSQ== - dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@eslint/eslintrc@^1.3.0": - version "1.3.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.0.tgz#29f92c30bb3e771e4a2048c95fa6855392dfac4f" - integrity sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw== - dependencies: - ajv "^6.12.4" - debug "^4.3.2" - espree "^9.3.2" - globals "^13.15.0" - ignore "^5.2.0" - import-fresh "^3.2.1" - js-yaml "^4.1.0" - minimatch "^3.1.2" - strip-json-comments "^3.1.1" - -"@humanwhocodes/config-array@^0.9.2": - version "0.9.5" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.9.5.tgz#2cbaf9a89460da24b5ca6531b8bbfc23e1df50c7" - integrity sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw== - dependencies: - "@humanwhocodes/object-schema" "^1.2.1" - debug "^4.1.1" - minimatch "^3.0.4" - -"@humanwhocodes/object-schema@^1.2.1": - version "1.2.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" - integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" - integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== - -"@jest/console@^28.1.1": - version "28.1.1" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-28.1.1.tgz#305f8ca50b6e70413839f54c0e002b60a0f2fd7d" - integrity sha512-0RiUocPVFEm3WRMOStIHbRWllG6iW6E3/gUPnf4lkrVFyXIIDeCe+vlKeYyFOMhB2EPE6FLFCNADSOOQMaqvyA== - dependencies: - "@jest/types" "^28.1.1" - "@types/node" "*" - chalk "^4.0.0" - jest-message-util "^28.1.1" - jest-util "^28.1.1" - slash "^3.0.0" - -"@jest/core@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-28.1.2.tgz#eac519b9acbd154313854b8823a47b5c645f785a" - integrity sha512-Xo4E+Sb/nZODMGOPt2G3cMmCBqL4/W2Ijwr7/mrXlq4jdJwcFQ/9KrrJZT2adQRk2otVBXXOz1GRQ4Z5iOgvRQ== - dependencies: - "@jest/console" "^28.1.1" - "@jest/reporters" "^28.1.2" - "@jest/test-result" "^28.1.1" - "@jest/transform" "^28.1.2" - "@jest/types" "^28.1.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - ci-info "^3.2.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - jest-changed-files "^28.0.2" - jest-config "^28.1.2" - jest-haste-map "^28.1.1" - jest-message-util "^28.1.1" - jest-regex-util "^28.0.2" - jest-resolve "^28.1.1" - jest-resolve-dependencies "^28.1.2" - jest-runner "^28.1.2" - jest-runtime "^28.1.2" - jest-snapshot "^28.1.2" - jest-util "^28.1.1" - jest-validate "^28.1.1" - jest-watcher "^28.1.1" - micromatch "^4.0.4" - pretty-format "^28.1.1" - rimraf "^3.0.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/environment@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-28.1.2.tgz#94a052c0c5f9f8c8e6d13ea6da78dbc5d7d9b85b" - integrity sha512-I0CR1RUMmOzd0tRpz10oUfaChBWs+/Hrvn5xYhMEF/ZqrDaaeHwS8yDBqEWCrEnkH2g+WE/6g90oBv3nKpcm8Q== - dependencies: - "@jest/fake-timers" "^28.1.2" - "@jest/types" "^28.1.1" - "@types/node" "*" - jest-mock "^28.1.1" - -"@jest/expect-utils@^28.1.1": - version "28.1.1" - resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-28.1.1.tgz#d84c346025b9f6f3886d02c48a6177e2b0360587" - integrity sha512-n/ghlvdhCdMI/hTcnn4qV57kQuV9OTsZzH1TTCVARANKhl6hXJqLKUkwX69ftMGpsbpt96SsDD8n8LD2d9+FRw== - dependencies: - jest-get-type "^28.0.2" - -"@jest/expect@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/expect/-/expect-28.1.2.tgz#0b25acedff46e1e1e5606285306c8a399c12534f" - integrity sha512-HBzyZBeFBiOelNbBKN0pilWbbrGvwDUwAqMC46NVJmWm8AVkuE58NbG1s7DR4cxFt4U5cVLxofAoHxgvC5MyOw== - dependencies: - expect "^28.1.1" - jest-snapshot "^28.1.2" - -"@jest/fake-timers@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-28.1.2.tgz#d49e8ee4e02ba85a6e844a52a5e7c59c23e3b76f" - integrity sha512-xSYEI7Y0D5FbZN2LsCUj/EKRR1zfQYmGuAUVh6xTqhx7V5JhjgMcK5Pa0iR6WIk0GXiHDe0Ke4A+yERKE9saqg== - dependencies: - "@jest/types" "^28.1.1" - "@sinonjs/fake-timers" "^9.1.2" - "@types/node" "*" - jest-message-util "^28.1.1" - jest-mock "^28.1.1" - jest-util "^28.1.1" - -"@jest/globals@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-28.1.2.tgz#92fab296e337c7309c25e4202fb724f62249d83f" - integrity sha512-cz0lkJVDOtDaYhvT3Fv2U1B6FtBnV+OpEyJCzTHM1fdoTsU4QNLAt/H4RkiwEUU+dL4g/MFsoTuHeT2pvbo4Hg== - dependencies: - "@jest/environment" "^28.1.2" - "@jest/expect" "^28.1.2" - "@jest/types" "^28.1.1" - -"@jest/reporters@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-28.1.2.tgz#0327be4ce4d0d9ae49e7908656f89669d0c2a260" - integrity sha512-/whGLhiwAqeCTmQEouSigUZJPVl7sW8V26EiboImL+UyXznnr1a03/YZ2BX8OlFw0n+Zlwu+EZAITZtaeRTxyA== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^28.1.1" - "@jest/test-result" "^28.1.1" - "@jest/transform" "^28.1.2" - "@jest/types" "^28.1.1" - "@jridgewell/trace-mapping" "^0.3.13" - "@types/node" "*" - chalk "^4.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.3" - graceful-fs "^4.2.9" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^5.1.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.1.3" - jest-message-util "^28.1.1" - jest-util "^28.1.1" - jest-worker "^28.1.1" - slash "^3.0.0" - string-length "^4.0.1" - strip-ansi "^6.0.0" - terminal-link "^2.0.0" - v8-to-istanbul "^9.0.1" - -"@jest/schemas@^28.0.2": - version "28.0.2" - resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-28.0.2.tgz#08c30df6a8d07eafea0aef9fb222c5e26d72e613" - integrity sha512-YVDJZjd4izeTDkij00vHHAymNXQ6WWsdChFRK86qck6Jpr3DCL5W3Is3vslviRlP+bLuMYRLbdp98amMvqudhA== - dependencies: - "@sinclair/typebox" "^0.23.3" - -"@jest/source-map@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-28.1.2.tgz#7fe832b172b497d6663cdff6c13b0a920e139e24" - integrity sha512-cV8Lx3BeStJb8ipPHnqVw/IM2VCMWO3crWZzYodSIkxXnRcXJipCdx1JCK0K5MsJJouZQTH73mzf4vgxRaH9ww== - dependencies: - "@jridgewell/trace-mapping" "^0.3.13" - callsites "^3.0.0" - graceful-fs "^4.2.9" - -"@jest/test-result@^28.1.1": - version "28.1.1" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-28.1.1.tgz#c6f18d1bbb01aa88925dd687872a75f8414b317a" - integrity sha512-hPmkugBktqL6rRzwWAtp1JtYT4VHwv8OQ+9lE5Gymj6dHzubI/oJHMUpPOt8NrdVWSrz9S7bHjJUmv2ggFoUNQ== - dependencies: - "@jest/console" "^28.1.1" - "@jest/types" "^28.1.1" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^28.1.1": - version "28.1.1" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-28.1.1.tgz#f594ee2331df75000afe0d1ae3237630ecec732e" - integrity sha512-nuL+dNSVMcWB7OOtgb0EGH5AjO4UBCt68SLP08rwmC+iRhyuJWS9MtZ/MpipxFwKAlHFftbMsydXqWre8B0+XA== - dependencies: - "@jest/test-result" "^28.1.1" - graceful-fs "^4.2.9" - jest-haste-map "^28.1.1" - slash "^3.0.0" - -"@jest/transform@^28.1.2": - version "28.1.2" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-28.1.2.tgz#b367962c53fd53821269bde050ce373e111327c1" - integrity sha512-3o+lKF6iweLeJFHBlMJysdaPbpoMmtbHEFsjzSv37HIq/wWt5ijTeO2Yf7MO5yyczCopD507cNwNLeX8Y/CuIg== - dependencies: - "@babel/core" "^7.11.6" - "@jest/types" "^28.1.1" - "@jridgewell/trace-mapping" "^0.3.13" - babel-plugin-istanbul "^6.1.1" - chalk "^4.0.0" - convert-source-map "^1.4.0" - fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^28.1.1" - jest-regex-util "^28.0.2" - jest-util "^28.1.1" - micromatch "^4.0.4" - pirates "^4.0.4" - slash "^3.0.0" - write-file-atomic "^4.0.1" - -"@jest/types@^28.1.1": - version "28.1.1" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-28.1.1.tgz#d059bbc80e6da6eda9f081f293299348bd78ee0b" - integrity sha512-vRXVqSg1VhDnB8bWcmvLzmg0Bt9CRKVgHPXqYwvWMX3TvAjeO+nRuK6+VdTKCtWOvYlmkF/HqNAL/z+N3B53Kw== - dependencies: - "@jest/schemas" "^28.0.2" - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^3.0.0" - "@types/node" "*" - "@types/yargs" "^17.0.8" - chalk "^4.0.0" - -"@jridgewell/gen-mapping@^0.1.0": - version "0.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" - integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== - dependencies: - "@jridgewell/set-array" "^1.0.0" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@jridgewell/gen-mapping@^0.3.2": - version "0.3.2" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" - integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== - dependencies: - "@jridgewell/set-array" "^1.0.1" - "@jridgewell/sourcemap-codec" "^1.4.10" - "@jridgewell/trace-mapping" "^0.3.9" - -"@jridgewell/resolve-uri@^3.0.3": - version "3.0.8" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.8.tgz#687cc2bbf243f4e9a868ecf2262318e2658873a1" - integrity sha512-YK5G9LaddzGbcucK4c8h5tWFmMPBvRZ/uyWmN1/SbBdIvqGUdWGkJ5BAaccgs6XbzVLsqbPJrBSFwKv3kT9i7w== - -"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" - integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== - -"@jridgewell/sourcemap-codec@^1.4.10": - version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" - integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== - -"@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.13", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.14" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.14.tgz#b231a081d8f66796e475ad588a1ef473112701ed" - integrity sha512-bJWEfQ9lPTvm3SneWwRFVLzrh6nhjwqw7TUFFBEMzwvg7t7PCDenf2lDwqo4NQXzdpgBXyFgDWnQA+2vkruksQ== - dependencies: - "@jridgewell/resolve-uri" "^3.0.3" - "@jridgewell/sourcemap-codec" "^1.4.10" - -"@nodelib/fs.scandir@2.1.5": - version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" - integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== - dependencies: - "@nodelib/fs.stat" "2.0.5" - run-parallel "^1.1.9" - -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - -"@nodelib/fs.walk@^1.2.3": - version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" - integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== - dependencies: - "@nodelib/fs.scandir" "2.1.5" - fastq "^1.6.0" - -"@sinclair/typebox@^0.23.3": - version "0.23.5" - resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.23.5.tgz#93f7b9f4e3285a7a9ade7557d9a8d36809cbc47d" - integrity sha512-AFBVi/iT4g20DHoujvMH1aEDn8fGJh4xsRGCP6d8RpLPMqsNPvW01Jcn0QysXTsg++/xj25NmJsGyH9xug/wKg== - -"@sinonjs/commons@^1.7.0": - version "1.8.3" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" - integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ== - dependencies: - type-detect "4.0.8" - -"@sinonjs/fake-timers@^9.1.2": - version "9.1.2" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c" - integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw== - dependencies: - "@sinonjs/commons" "^1.7.0" - -"@types/babel__core@^7.1.14": - version "7.1.19" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.19.tgz#7b497495b7d1b4812bdb9d02804d0576f43ee460" - integrity sha512-WEOTgRsbYkvA/KCsDwVEGkd7WAr1e3g31VHQ8zy5gul/V1qKullU/BU5I68X5v7V3GnB9eotmom4v5a5gjxorw== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.4" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.4.tgz#1f20ce4c5b1990b37900b63f050182d28c2439b7" - integrity sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.4.1" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.4.1.tgz#3d1a48fd9d6c0edfd56f2ff578daed48f36c8969" - integrity sha512-azBFKemX6kMg5Io+/rdGT0dkGreboUVR0Cdm3fz9QJWpaQGJRQXl7C+6hOTCZcMll7KFyEQpgbYI2lHdsS4U7g== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": - version "7.17.1" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.17.1.tgz#1a0e73e8c28c7e832656db372b779bfd2ef37314" - integrity sha512-kVzjari1s2YVi77D3w1yuvohV2idweYXMCDzqBiVNN63TcDWrIlTVOYpqVrvbbyOE/IyzBoTKF0fdnLPEORFxA== - dependencies: - "@babel/types" "^7.3.0" - -"@types/clone-deep@^4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/clone-deep/-/clone-deep-4.0.1.tgz#7c488443ab9f571cd343d774551b78e9264ea990" - integrity sha512-bdkCSkyVHsgl3Goe1y16T9k6JuQx7SiDREkq728QjKmTZkGJZuS8R3gGcnGzVuGBP0mssKrzM/GlMOQxtip9cg== - -"@types/graceful-fs@^4.1.3": - version "4.1.5" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" - integrity sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz#8467d4b3c087805d63580480890791277ce35c44" - integrity sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g== - -"@types/istanbul-lib-report@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.1.tgz#9153fe98bba2bd565a63add9436d6f0d7f8468ff" - integrity sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw== - dependencies: - "@types/istanbul-lib-report" "*" - -"@types/jest@^28.1.4": - version "28.1.4" - resolved "https://registry.yarnpkg.com/@types/jest/-/jest-28.1.4.tgz#a11ee6c8fd0b52c19c9c18138b78bbcc201dad5a" - integrity sha512-telv6G5N7zRJiLcI3Rs3o+ipZ28EnE+7EvF0pSrt2pZOMnAVI/f+6/LucDxOvcBcTeTL3JMF744BbVQAVBUQRA== - dependencies: - jest-matcher-utils "^28.0.0" - pretty-format "^28.0.0" - -"@types/json-schema@^7.0.9": - version "7.0.11" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.11.tgz#d421b6c527a3037f7c84433fd2c4229e016863d3" - integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== - -"@types/node@*": - version "18.0.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.0.0.tgz#67c7b724e1bcdd7a8821ce0d5ee184d3b4dd525a" - integrity sha512-cHlGmko4gWLVI27cGJntjs/Sj8th9aYwplmZFwmmgYQQvL5NUsgVJG7OddLvNfLqYS31KFN0s3qlaD9qCaxACA== - -"@types/node@~16": - version "16.11.42" - resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.42.tgz#d2a75c58e9b0902b82dc54bd4c13f8ef12bd1020" - integrity sha512-iwLrPOopPy6V3E+1yHTpJea3bdsNso0b0utLOJJwaa/PLzqBt3GZl3stMcakc/gr89SfcNk2ki3z7Gvue9hYGQ== - -"@types/prettier@^2.1.5": - version "2.6.3" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.3.tgz#68ada76827b0010d0db071f739314fa429943d0a" - integrity sha512-ymZk3LEC/fsut+/Q5qejp6R9O1rMxz3XaRHDV6kX8MrGAhOSPqVARbDi+EZvInBpw+BnCX3TD240byVkOfQsHg== - -"@types/stack-utils@^2.0.0": - version "2.0.1" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.1.tgz#20f18294f797f2209b5f65c8e3b5c8e8261d127c" - integrity sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw== - -"@types/underscore@^1.11.4": - version "1.11.4" - resolved "https://registry.yarnpkg.com/@types/underscore/-/underscore-1.11.4.tgz#62e393f8bc4bd8a06154d110c7d042a93751def3" - integrity sha512-uO4CD2ELOjw8tasUrAhvnn2W4A0ZECOvMjCivJr4gA9pGgjv+qxKWY9GLTMVEK8ej85BxQOocUyE7hImmSQYcg== - -"@types/yargs-parser@*": - version "21.0.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.0.tgz#0c60e537fa790f5f9472ed2776c2b71ec117351b" - integrity sha512-iO9ZQHkZxHn4mSakYV0vFHAVDyEOIJQrV2uZ06HxEPcx+mt8swXoZHIbaaJ2crJYFfErySgktuTZ3BeLz+XmFA== - -"@types/yargs@^17.0.8": - version "17.0.10" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.10.tgz#591522fce85d8739bca7b8bb90d048e4478d186a" - integrity sha512-gmEaFwpj/7f/ROdtIlci1R1VYU1J4j95m8T+Tj3iBgiBFKg1foE/PSl93bBd5T9LDXNPo8UlNN6W0qwD8O5OaA== - dependencies: - "@types/yargs-parser" "*" - -"@typescript-eslint/eslint-plugin@~5.26": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.26.0.tgz#c1f98ccba9d345e38992975d3ca56ed6260643c2" - integrity sha512-oGCmo0PqnRZZndr+KwvvAUvD3kNE4AfyoGCwOZpoCncSh4MVD06JTE8XQa2u9u+NX5CsyZMBTEc2C72zx38eYA== - dependencies: - "@typescript-eslint/scope-manager" "5.26.0" - "@typescript-eslint/type-utils" "5.26.0" - "@typescript-eslint/utils" "5.26.0" - debug "^4.3.4" - functional-red-black-tree "^1.0.1" - ignore "^5.2.0" - regexpp "^3.2.0" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/parser@~5.26": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.26.0.tgz#a61b14205fe2ab7533deb4d35e604add9a4ceee2" - integrity sha512-n/IzU87ttzIdnAH5vQ4BBDnLPly7rC5VnjN3m0xBG82HK6rhRxnCb3w/GyWbNDghPd+NktJqB/wl6+YkzZ5T5Q== - dependencies: - "@typescript-eslint/scope-manager" "5.26.0" - "@typescript-eslint/types" "5.26.0" - "@typescript-eslint/typescript-estree" "5.26.0" - debug "^4.3.4" - -"@typescript-eslint/scope-manager@5.26.0": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.26.0.tgz#44209c7f649d1a120f0717e0e82da856e9871339" - integrity sha512-gVzTJUESuTwiju/7NiTb4c5oqod8xt5GhMbExKsCTp6adU3mya6AGJ4Pl9xC7x2DX9UYFsjImC0mA62BCY22Iw== - dependencies: - "@typescript-eslint/types" "5.26.0" - "@typescript-eslint/visitor-keys" "5.26.0" - -"@typescript-eslint/scope-manager@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.30.0.tgz#bf585ee801ab4ad84db2f840174e171a6bb002c7" - integrity sha512-3TZxvlQcK5fhTBw5solQucWSJvonXf5yua5nx8OqK94hxdrT7/6W3/CS42MLd/f1BmlmmbGEgQcTHHCktUX5bQ== - dependencies: - "@typescript-eslint/types" "5.30.0" - "@typescript-eslint/visitor-keys" "5.30.0" - -"@typescript-eslint/type-utils@5.26.0": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.26.0.tgz#937dee97702361744a3815c58991acf078230013" - integrity sha512-7ccbUVWGLmcRDSA1+ADkDBl5fP87EJt0fnijsMFTVHXKGduYMgienC/i3QwoVhDADUAPoytgjbZbCOMj4TY55A== - dependencies: - "@typescript-eslint/utils" "5.26.0" - debug "^4.3.4" - tsutils "^3.21.0" - -"@typescript-eslint/types@5.26.0": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.26.0.tgz#cb204bb154d3c103d9cc4d225f311b08219469f3" - integrity sha512-8794JZFE1RN4XaExLWLI2oSXsVImNkl79PzTOOWt9h0UHROwJedNOD2IJyfL0NbddFllcktGIO2aOu10avQQyA== - -"@typescript-eslint/types@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.30.0.tgz#db7d81d585a3da3801432a9c1d2fafbff125e110" - integrity sha512-vfqcBrsRNWw/LBXyncMF/KrUTYYzzygCSsVqlZ1qGu1QtGs6vMkt3US0VNSQ05grXi5Yadp3qv5XZdYLjpp8ag== - -"@typescript-eslint/typescript-estree@5.26.0": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.26.0.tgz#16cbceedb0011c2ed4f607255f3ee1e6e43b88c3" - integrity sha512-EyGpw6eQDsfD6jIqmXP3rU5oHScZ51tL/cZgFbFBvWuCwrIptl+oueUZzSmLtxFuSOQ9vDcJIs+279gnJkfd1w== - dependencies: - "@typescript-eslint/types" "5.26.0" - "@typescript-eslint/visitor-keys" "5.26.0" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/typescript-estree@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.30.0.tgz#4565ee8a6d2ac368996e20b2344ea0eab1a8f0bb" - integrity sha512-hDEawogreZB4n1zoqcrrtg/wPyyiCxmhPLpZ6kmWfKF5M5G0clRLaEexpuWr31fZ42F96SlD/5xCt1bT5Qm4Nw== - dependencies: - "@typescript-eslint/types" "5.30.0" - "@typescript-eslint/visitor-keys" "5.30.0" - debug "^4.3.4" - globby "^11.1.0" - is-glob "^4.0.3" - semver "^7.3.7" - tsutils "^3.21.0" - -"@typescript-eslint/utils@5.26.0": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.26.0.tgz#896b8480eb124096e99c8b240460bb4298afcfb4" - integrity sha512-PJFwcTq2Pt4AMOKfe3zQOdez6InIDOjUJJD3v3LyEtxHGVVRK3Vo7Dd923t/4M9hSH2q2CLvcTdxlLPjcIk3eg== - dependencies: - "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.26.0" - "@typescript-eslint/types" "5.26.0" - "@typescript-eslint/typescript-estree" "5.26.0" - eslint-scope "^5.1.1" - eslint-utils "^3.0.0" - -"@typescript-eslint/utils@^5.10.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.30.0.tgz#1dac771fead5eab40d31860716de219356f5f754" - integrity sha512-0bIgOgZflLKIcZsWvfklsaQTM3ZUbmtH0rJ1hKyV3raoUYyeZwcjQ8ZUJTzS7KnhNcsVT1Rxs7zeeMHEhGlltw== - dependencies: - "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.30.0" - "@typescript-eslint/types" "5.30.0" - "@typescript-eslint/typescript-estree" "5.30.0" - eslint-scope "^5.1.1" - eslint-utils "^3.0.0" - -"@typescript-eslint/visitor-keys@5.26.0": - version "5.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.26.0.tgz#7195f756e367f789c0e83035297c45b417b57f57" - integrity sha512-wei+ffqHanYDOQgg/fS6Hcar6wAWv0CUPQ3TZzOWd2BLfgP539rb49bwua8WRAs7R6kOSLn82rfEu2ro6Llt8Q== - dependencies: - "@typescript-eslint/types" "5.26.0" - eslint-visitor-keys "^3.3.0" - -"@typescript-eslint/visitor-keys@5.30.0": - version "5.30.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.30.0.tgz#07721d23daca2ec4c2da7f1e660d41cd78bacac3" - integrity sha512-6WcIeRk2DQ3pHKxU1Ni0qMXJkjO/zLjBymlYBy/53qxe7yjEFSvzKLDToJjURUhSl2Fzhkl4SMXQoETauF74cw== - dependencies: - "@typescript-eslint/types" "5.30.0" - eslint-visitor-keys "^3.3.0" - -acorn-jsx@^5.3.2: - version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" - integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== - -acorn@^8.7.1: - version "8.7.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.1.tgz#0197122c843d1bf6d0a5e83220a788f278f63c30" - integrity sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A== - -ajv@^6.10.0, ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-escapes@^4.2.1: - version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" - integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== - dependencies: - type-fest "^0.21.3" - -ansi-regex@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" - integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== - -ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" - integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== - dependencies: - color-convert "^2.0.1" - -ansi-styles@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" - integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== - -anymatch@^3.0.3: - version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" - integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -argparse@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" - integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== - -array-union@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" - integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== - -babel-jest@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-28.1.2.tgz#2b37fb81439f14d34d8b2cc4a4bd7efabf9acbfe" - integrity sha512-pfmoo6sh4L/+5/G2OOfQrGJgvH7fTa1oChnuYH2G/6gA+JwDvO8PELwvwnofKBMNrQsam0Wy/Rw+QSrBNewq2Q== - dependencies: - "@jest/transform" "^28.1.2" - "@types/babel__core" "^7.1.14" - babel-plugin-istanbul "^6.1.1" - babel-preset-jest "^28.1.1" - chalk "^4.0.0" - graceful-fs "^4.2.9" - slash "^3.0.0" - -babel-plugin-istanbul@^6.1.1: - version "6.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz#fa88ec59232fd9b4e36dbbc540a8ec9a9b47da73" - integrity sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^5.0.4" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-28.1.1.tgz#5e055cdcc47894f28341f87f5e35aad2df680b11" - integrity sha512-NovGCy5Hn25uMJSAU8FaHqzs13cFoOI4lhIujiepssjCKRsAo3TA734RDWSGxuFTsUJXerYOqQQodlxgmtqbzw== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__core" "^7.1.14" - "@types/babel__traverse" "^7.0.6" - -babel-preset-current-node-syntax@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz#b4399239b89b2a011f9ddbe3e4f401fc40cff73b" - integrity sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-top-level-await" "^7.8.3" - -babel-preset-jest@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-28.1.1.tgz#5b6e5e69f963eb2d70f739c607b8f723c0ee75e4" - integrity sha512-FCq9Oud0ReTeWtcneYf/48981aTfXYuB9gbU4rBNNJVBSQ6ssv7E6v/qvbBxtOWwZFXjLZwpg+W3q7J6vhH25g== - dependencies: - babel-plugin-jest-hoist "^28.1.1" - babel-preset-current-node-syntax "^1.0.0" - -balanced-match@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" - integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browserslist@^4.20.2: - version "4.21.1" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.1.tgz#c9b9b0a54c7607e8dc3e01a0d311727188011a00" - integrity sha512-Nq8MFCSrnJXSc88yliwlzQe3qNe3VntIjhsArW9IJOEPSHNx23FalwApUVbzAWABLhYJJ7y8AynWI/XM8OdfjQ== - dependencies: - caniuse-lite "^1.0.30001359" - electron-to-chromium "^1.4.172" - node-releases "^2.0.5" - update-browserslist-db "^1.0.4" - -bs-logger@0.x: - version "0.2.6" - resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" - integrity sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== - dependencies: - fast-json-stable-stringify "2.x" - -bser@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-from@^1.0.0: - version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" - integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camelcase@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -camelcase@^6.2.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" - integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== - -caniuse-lite@^1.0.30001359: - version "1.0.30001361" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001361.tgz#ba2adb2527566fb96f3ac7c67698ae7fc495a28d" - integrity sha512-ybhCrjNtkFji1/Wto6SSJKkWk6kZgVQsDq5QI83SafsF6FXv2JB4df9eEdH6g8sdGgqTXrFLjAxqBGgYoU3azQ== - -chalk@^2.0.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^4.0.0: - version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" - integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -char-regex@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" - integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== - -ci-info@^3.2.0: - version "3.3.2" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.2.tgz#6d2967ffa407466481c6c90b6e16b3098f080128" - integrity sha512-xmDt/QIAdeZ9+nfdPsaBCpMvHNLFiLdjj59qjqn+6iPe6YmHGQ35sBnQ8uslRBXFmXkiZQOJRjvQeoGppoTjjg== - -cjs-module-lexer@^1.0.0: - version "1.2.2" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" - integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== - -cliui@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" - integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^7.0.0" - -clone-deep@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/clone-deep/-/clone-deep-4.0.1.tgz#c19fd9bdbbf85942b4fd979c84dcf7d5f07c2387" - integrity sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ== - dependencies: - is-plain-object "^2.0.4" - kind-of "^6.0.2" - shallow-clone "^3.0.0" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ== - -collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== - -convert-hrtime@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/convert-hrtime/-/convert-hrtime-5.0.0.tgz#f2131236d4598b95de856926a67100a0a97e9fa3" - integrity sha512-lOETlkIeYSJWcbbcvjRKGxVMXJR+8+OQb/mTPbA4ObPMytYIsUbuOE0Jzy60hjARYszq1id0j8KgVhC+WGZVTg== - -convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" - -cross-spawn@^7.0.2, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.4: - version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" - integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== - dependencies: - ms "2.1.2" - -dedent@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/dedent/-/dedent-0.7.0.tgz#2495ddbaf6eb874abb0e1be9df22d2e5a544326c" - integrity sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== - -deep-is@^0.1.3: - version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" - integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== - -deepmerge@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" - integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== - -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -diff-sequences@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-28.1.1.tgz#9989dc731266dc2903457a70e996f3a041913ac6" - integrity sha512-FU0iFaH/E23a+a718l8Qa/19bF9p06kgE0KipMOMadwa3SjnaElKzPaUC0vnibs6/B/9ni97s61mcejk8W1fQw== - -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -electron-to-chromium@^1.4.172: - version "1.4.176" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.176.tgz#61ab2a1de3b5072ee31881a937c08ac6780d1cfa" - integrity sha512-92JdgyRlcNDwuy75MjuFSb3clt6DGJ2IXSpg0MCjKd3JV9eSmuUAIyWiGAp/EtT0z2D4rqbYqThQLV90maH3Zw== - -emittery@^0.10.2: - version "0.10.2" - resolved "https://registry.yarnpkg.com/emittery/-/emittery-0.10.2.tgz#902eec8aedb8c41938c46e9385e9db7e03182933" - integrity sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -escalade@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" - integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== - -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escape-string-regexp@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" - integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== - -eslint-config-prettier@~8.5: - version "8.5.0" - resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz#5a81680ec934beca02c7b1a61cf8ca34b66feab1" - integrity sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q== - -eslint-plugin-jest@~26.2: - version "26.2.2" - resolved "https://registry.yarnpkg.com/eslint-plugin-jest/-/eslint-plugin-jest-26.2.2.tgz#74e000544259f1ef0462a609a3fc9e5da3768f6c" - integrity sha512-etSFZ8VIFX470aA6kTqDPhIq7YWe0tjBcboFNV3WeiC18PJ/AVonGhuTwlmuz2fBkH8FJHA7JQ4k7GsQIj1Gew== - dependencies: - "@typescript-eslint/utils" "^5.10.0" - -eslint-scope@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-scope@^7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" - integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== - dependencies: - esrecurse "^4.3.0" - estraverse "^5.2.0" - -eslint-utils@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" - integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== - dependencies: - eslint-visitor-keys "^2.0.0" - -eslint-visitor-keys@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" - integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== - -eslint-visitor-keys@^3.3.0: - version "3.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" - integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== - -eslint@~8.16: - version "8.16.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.16.0.tgz#6d936e2d524599f2a86c708483b4c372c5d3bbae" - integrity sha512-MBndsoXY/PeVTDJeWsYj7kLZ5hQpJOfMYLsF6LicLHQWbRDG19lK5jOix4DPl8yY4SUFcE3txy86OzFLWT+yoA== - dependencies: - "@eslint/eslintrc" "^1.3.0" - "@humanwhocodes/config-array" "^0.9.2" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.3.2" - doctrine "^3.0.0" - escape-string-regexp "^4.0.0" - eslint-scope "^7.1.1" - eslint-utils "^3.0.0" - eslint-visitor-keys "^3.3.0" - espree "^9.3.2" - esquery "^1.4.0" - esutils "^2.0.2" - fast-deep-equal "^3.1.3" - file-entry-cache "^6.0.1" - functional-red-black-tree "^1.0.1" - glob-parent "^6.0.1" - globals "^13.15.0" - ignore "^5.2.0" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - js-yaml "^4.1.0" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash.merge "^4.6.2" - minimatch "^3.1.2" - natural-compare "^1.4.0" - optionator "^0.9.1" - regexpp "^3.2.0" - strip-ansi "^6.0.1" - strip-json-comments "^3.1.0" - text-table "^0.2.0" - v8-compile-cache "^2.0.3" - -espree@^9.3.2: - version "9.3.2" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.3.2.tgz#f58f77bd334731182801ced3380a8cc859091596" - integrity sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA== - dependencies: - acorn "^8.7.1" - acorn-jsx "^5.3.2" - eslint-visitor-keys "^3.3.0" - -esprima@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" - integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" - integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -execa@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/execa/-/execa-5.1.1.tgz#f80ad9cbf4298f7bd1d4c9555c21e93741c411dd" - integrity sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg== - dependencies: - cross-spawn "^7.0.3" - get-stream "^6.0.0" - human-signals "^2.1.0" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.1" - onetime "^5.1.2" - signal-exit "^3.0.3" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== - -expect@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/expect/-/expect-28.1.1.tgz#ca6fff65f6517cf7220c2e805a49c19aea30b420" - integrity sha512-/AANEwGL0tWBwzLNOvO0yUdy2D52jVdNXppOqswC49sxMN2cPWsGCQdzuIf9tj6hHoBQzNvx75JUYuQAckPo3w== - dependencies: - "@jest/expect-utils" "^28.1.1" - jest-get-type "^28.0.2" - jest-matcher-utils "^28.1.1" - jest-message-util "^28.1.1" - jest-util "^28.1.1" - -fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.2.9: - version "3.2.11" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" - integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== - -fastq@^1.6.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" - integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== - dependencies: - reusify "^1.0.4" - -fb-watchman@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" - integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== - dependencies: - bser "2.1.1" - -file-entry-cache@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" - integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== - dependencies: - flat-cache "^3.0.4" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== - dependencies: - flatted "^3.1.0" - rimraf "^3.0.2" - -flatted@^3.1.0: - version "3.2.6" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.6.tgz#022e9218c637f9f3fc9c35ab9c9193f05add60b2" - integrity sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== - -fsevents@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" - integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== - -gensync@^1.0.0-beta.2: - version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" - integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== - -get-caller-file@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^6.0.0: - version "6.0.1" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" - integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== - -glob-parent@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" - integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== - dependencies: - is-glob "^4.0.1" - -glob-parent@^6.0.1: - version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" - integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== - dependencies: - is-glob "^4.0.3" - -glob@^7.1.3, glob@^7.1.4: - version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" - integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.1.1" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^13.15.0: - version "13.15.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.15.0.tgz#38113218c907d2f7e98658af246cef8b77e90bac" - integrity sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog== - dependencies: - type-fest "^0.20.2" - -globby@^11.1.0: - version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - -graceful-fs@^4.2.9: - version "4.2.10" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -html-escaper@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -human-signals@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" - integrity sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw== - -ignore@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" - integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== - -import-fresh@^3.0.0, import-fresh@^3.2.1: - version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" - integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.1.0.tgz#b4479df8a5fd44f6cdce24070675676063c95cb4" - integrity sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== - -is-core-module@^2.9.0: - version "2.9.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" - integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== - dependencies: - has "^1.0.3" - -is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: - version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" - integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== - dependencies: - is-extglob "^2.1.1" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-stream@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" - integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== - -isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== - -istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" - integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== - -istanbul-lib-instrument@^5.0.4, istanbul-lib-instrument@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.0.tgz#31d18bdd127f825dd02ea7bfdfd906f8ab840e9f" - integrity sha512-6Lthe1hqXHBNsqvgDzGO6l03XNeu3CrG4RqQ1KM9+l5+jNGpEJfIELx1NS3SEHmJQA8np/u+E4EPRKRiu6m19A== - dependencies: - "@babel/core" "^7.12.3" - "@babel/parser" "^7.14.7" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.2.0" - semver "^6.3.0" - -istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" - integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.1.3: - version "3.1.4" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.4.tgz#1b6f068ecbc6c331040aab5741991273e609e40c" - integrity sha512-r1/DshN4KSE7xWEknZLLLLDn5CJybV3nw01VTkp6D5jzLuELlcbudfj/eSQFvrKsJuTVCGnePO7ho82Nw9zzfw== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jest-changed-files@^28.0.2: - version "28.0.2" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-28.0.2.tgz#7d7810660a5bd043af9e9cfbe4d58adb05e91531" - integrity sha512-QX9u+5I2s54ZnGoMEjiM2WeBvJR2J7w/8ZUmH2um/WLAuGAYFQcsVXY9+1YL6k0H/AGUdH8pXUAv6erDqEsvIA== - dependencies: - execa "^5.0.0" - throat "^6.0.1" - -jest-circus@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-circus/-/jest-circus-28.1.2.tgz#0d5a5623eccb244efe87d1edc365696e4fcf80ce" - integrity sha512-E2vdPIJG5/69EMpslFhaA46WkcrN74LI5V/cSJ59L7uS8UNoXbzTxmwhpi9XrIL3zqvMt5T0pl5k2l2u2GwBNQ== - dependencies: - "@jest/environment" "^28.1.2" - "@jest/expect" "^28.1.2" - "@jest/test-result" "^28.1.1" - "@jest/types" "^28.1.1" - "@types/node" "*" - chalk "^4.0.0" - co "^4.6.0" - dedent "^0.7.0" - is-generator-fn "^2.0.0" - jest-each "^28.1.1" - jest-matcher-utils "^28.1.1" - jest-message-util "^28.1.1" - jest-runtime "^28.1.2" - jest-snapshot "^28.1.2" - jest-util "^28.1.1" - pretty-format "^28.1.1" - slash "^3.0.0" - stack-utils "^2.0.3" - throat "^6.0.1" - -jest-cli@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-28.1.2.tgz#b89012e5bad14135e71b1628b85475d3773a1bbc" - integrity sha512-l6eoi5Do/IJUXAFL9qRmDiFpBeEJAnjJb1dcd9i/VWfVWbp3mJhuH50dNtX67Ali4Ecvt4eBkWb4hXhPHkAZTw== - dependencies: - "@jest/core" "^28.1.2" - "@jest/test-result" "^28.1.1" - "@jest/types" "^28.1.1" - chalk "^4.0.0" - exit "^0.1.2" - graceful-fs "^4.2.9" - import-local "^3.0.2" - jest-config "^28.1.2" - jest-util "^28.1.1" - jest-validate "^28.1.1" - prompts "^2.0.1" - yargs "^17.3.1" - -jest-config@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-28.1.2.tgz#ba00ad30caf62286c86e7c1099e915218a0ac8c6" - integrity sha512-g6EfeRqddVbjPVBVY4JWpUY4IvQoFRIZcv4V36QkqzE0IGhEC/VkugFeBMAeUE7PRgC8KJF0yvJNDeQRbamEVA== - dependencies: - "@babel/core" "^7.11.6" - "@jest/test-sequencer" "^28.1.1" - "@jest/types" "^28.1.1" - babel-jest "^28.1.2" - chalk "^4.0.0" - ci-info "^3.2.0" - deepmerge "^4.2.2" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-circus "^28.1.2" - jest-environment-node "^28.1.2" - jest-get-type "^28.0.2" - jest-regex-util "^28.0.2" - jest-resolve "^28.1.1" - jest-runner "^28.1.2" - jest-util "^28.1.1" - jest-validate "^28.1.1" - micromatch "^4.0.4" - parse-json "^5.2.0" - pretty-format "^28.1.1" - slash "^3.0.0" - strip-json-comments "^3.1.1" - -jest-diff@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-28.1.1.tgz#1a3eedfd81ae79810931c63a1d0f201b9120106c" - integrity sha512-/MUUxeR2fHbqHoMMiffe/Afm+U8U4olFRJ0hiVG2lZatPJcnGxx292ustVu7bULhjV65IYMxRdploAKLbcrsyg== - dependencies: - chalk "^4.0.0" - diff-sequences "^28.1.1" - jest-get-type "^28.0.2" - pretty-format "^28.1.1" - -jest-docblock@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-28.1.1.tgz#6f515c3bf841516d82ecd57a62eed9204c2f42a8" - integrity sha512-3wayBVNiOYx0cwAbl9rwm5kKFP8yHH3d/fkEaL02NPTkDojPtheGB7HZSFY4wzX+DxyrvhXz0KSCVksmCknCuA== - dependencies: - detect-newline "^3.0.0" - -jest-each@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-28.1.1.tgz#ba5238dacf4f31d9fe23ddc2c44c01e7c23885c4" - integrity sha512-A042rqh17ZvEhRceDMi784ppoXR7MWGDEKTXEZXb4svt0eShMZvijGxzKsx+yIjeE8QYmHPrnHiTSQVhN4nqaw== - dependencies: - "@jest/types" "^28.1.1" - chalk "^4.0.0" - jest-get-type "^28.0.2" - jest-util "^28.1.1" - pretty-format "^28.1.1" - -jest-environment-node@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-28.1.2.tgz#3e2eb47f6d173b0648d5f7c717cb1c26651d5c8a" - integrity sha512-oYsZz9Qw27XKmOgTtnl0jW7VplJkN2oeof+SwAwKFQacq3CLlG9u4kTGuuLWfvu3J7bVutWlrbEQMOCL/jughw== - dependencies: - "@jest/environment" "^28.1.2" - "@jest/fake-timers" "^28.1.2" - "@jest/types" "^28.1.1" - "@types/node" "*" - jest-mock "^28.1.1" - jest-util "^28.1.1" - -jest-get-type@^28.0.2: - version "28.0.2" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-28.0.2.tgz#34622e628e4fdcd793d46db8a242227901fcf203" - integrity sha512-ioj2w9/DxSYHfOm5lJKCdcAmPJzQXmbM/Url3rhlghrPvT3tt+7a/+oXc9azkKmLvoiXjtV83bEWqi+vs5nlPA== - -jest-haste-map@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-28.1.1.tgz#471685f1acd365a9394745bb97c8fc16289adca3" - integrity sha512-ZrRSE2o3Ezh7sb1KmeLEZRZ4mgufbrMwolcFHNRSjKZhpLa8TdooXOOFlSwoUzlbVs1t0l7upVRW2K7RWGHzbQ== - dependencies: - "@jest/types" "^28.1.1" - "@types/graceful-fs" "^4.1.3" - "@types/node" "*" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.9" - jest-regex-util "^28.0.2" - jest-util "^28.1.1" - jest-worker "^28.1.1" - micromatch "^4.0.4" - walker "^1.0.8" - optionalDependencies: - fsevents "^2.3.2" - -jest-leak-detector@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-28.1.1.tgz#537f37afd610a4b3f4cab15e06baf60484548efb" - integrity sha512-4jvs8V8kLbAaotE+wFR7vfUGf603cwYtFf1/PYEsyX2BAjSzj8hQSVTP6OWzseTl0xL6dyHuKs2JAks7Pfubmw== - dependencies: - jest-get-type "^28.0.2" - pretty-format "^28.1.1" - -jest-matcher-utils@^28.0.0, jest-matcher-utils@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-28.1.1.tgz#a7c4653c2b782ec96796eb3088060720f1e29304" - integrity sha512-NPJPRWrbmR2nAJ+1nmnfcKKzSwgfaciCCrYZzVnNoxVoyusYWIjkBMNvu0RHJe7dNj4hH3uZOPZsQA+xAYWqsw== - dependencies: - chalk "^4.0.0" - jest-diff "^28.1.1" - jest-get-type "^28.0.2" - pretty-format "^28.1.1" - -jest-message-util@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-28.1.1.tgz#60aa0b475cfc08c8a9363ed2fb9108514dd9ab89" - integrity sha512-xoDOOT66fLfmTRiqkoLIU7v42mal/SqwDKvfmfiWAdJMSJiU+ozgluO7KbvoAgiwIrrGZsV7viETjc8GNrA/IQ== - dependencies: - "@babel/code-frame" "^7.12.13" - "@jest/types" "^28.1.1" - "@types/stack-utils" "^2.0.0" - chalk "^4.0.0" - graceful-fs "^4.2.9" - micromatch "^4.0.4" - pretty-format "^28.1.1" - slash "^3.0.0" - stack-utils "^2.0.3" - -jest-mock@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-28.1.1.tgz#37903d269427fa1ef5b2447be874e1c62a39a371" - integrity sha512-bDCb0FjfsmKweAvE09dZT59IMkzgN0fYBH6t5S45NoJfd2DHkS3ySG2K+hucortryhO3fVuXdlxWcbtIuV/Skw== - dependencies: - "@jest/types" "^28.1.1" - "@types/node" "*" - -jest-pnp-resolver@^1.2.2: - version "1.2.2" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" - integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - -jest-regex-util@^28.0.2: - version "28.0.2" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-28.0.2.tgz#afdc377a3b25fb6e80825adcf76c854e5bf47ead" - integrity sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== - -jest-resolve-dependencies@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-28.1.2.tgz#ca528858e0c6642d5a1dda8fc7cda10230c275bc" - integrity sha512-OXw4vbOZuyRTBi3tapWBqdyodU+T33ww5cPZORuTWkg+Y8lmsxQlVu3MWtJh6NMlKRTHQetF96yGPv01Ye7Mbg== - dependencies: - jest-regex-util "^28.0.2" - jest-snapshot "^28.1.2" - -jest-resolve@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-28.1.1.tgz#bc2eaf384abdcc1aaf3ba7c50d1adf01e59095e5" - integrity sha512-/d1UbyUkf9nvsgdBildLe6LAD4DalgkgZcKd0nZ8XUGPyA/7fsnaQIlKVnDiuUXv/IeZhPEDrRJubVSulxrShA== - dependencies: - chalk "^4.0.0" - graceful-fs "^4.2.9" - jest-haste-map "^28.1.1" - jest-pnp-resolver "^1.2.2" - jest-util "^28.1.1" - jest-validate "^28.1.1" - resolve "^1.20.0" - resolve.exports "^1.1.0" - slash "^3.0.0" - -jest-runner@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-28.1.2.tgz#f293409592a62234285a71237e38499a3554e350" - integrity sha512-6/k3DlAsAEr5VcptCMdhtRhOoYClZQmxnVMZvZ/quvPGRpN7OBQYPIC32tWSgOnbgqLXNs5RAniC+nkdFZpD4A== - dependencies: - "@jest/console" "^28.1.1" - "@jest/environment" "^28.1.2" - "@jest/test-result" "^28.1.1" - "@jest/transform" "^28.1.2" - "@jest/types" "^28.1.1" - "@types/node" "*" - chalk "^4.0.0" - emittery "^0.10.2" - graceful-fs "^4.2.9" - jest-docblock "^28.1.1" - jest-environment-node "^28.1.2" - jest-haste-map "^28.1.1" - jest-leak-detector "^28.1.1" - jest-message-util "^28.1.1" - jest-resolve "^28.1.1" - jest-runtime "^28.1.2" - jest-util "^28.1.1" - jest-watcher "^28.1.1" - jest-worker "^28.1.1" - source-map-support "0.5.13" - throat "^6.0.1" - -jest-runtime@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-28.1.2.tgz#d68f34f814a848555a345ceda23289f14d59a688" - integrity sha512-i4w93OsWzLOeMXSi9epmakb2+3z0AchZtUQVF1hesBmcQQy4vtaql5YdVe9KexdJaVRyPDw8DoBR0j3lYsZVYw== - dependencies: - "@jest/environment" "^28.1.2" - "@jest/fake-timers" "^28.1.2" - "@jest/globals" "^28.1.2" - "@jest/source-map" "^28.1.2" - "@jest/test-result" "^28.1.1" - "@jest/transform" "^28.1.2" - "@jest/types" "^28.1.1" - chalk "^4.0.0" - cjs-module-lexer "^1.0.0" - collect-v8-coverage "^1.0.0" - execa "^5.0.0" - glob "^7.1.3" - graceful-fs "^4.2.9" - jest-haste-map "^28.1.1" - jest-message-util "^28.1.1" - jest-mock "^28.1.1" - jest-regex-util "^28.0.2" - jest-resolve "^28.1.1" - jest-snapshot "^28.1.2" - jest-util "^28.1.1" - slash "^3.0.0" - strip-bom "^4.0.0" - -jest-snapshot@^28.1.2: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-28.1.2.tgz#93d31b87b11b384f5946fe0767541496135f8d52" - integrity sha512-wzrieFttZYfLvrCVRJxX+jwML2YTArOUqFpCoSVy1QUapx+LlV9uLbV/mMEhYj4t7aMeE9aSQFHSvV/oNoDAMA== - dependencies: - "@babel/core" "^7.11.6" - "@babel/generator" "^7.7.2" - "@babel/plugin-syntax-typescript" "^7.7.2" - "@babel/traverse" "^7.7.2" - "@babel/types" "^7.3.3" - "@jest/expect-utils" "^28.1.1" - "@jest/transform" "^28.1.2" - "@jest/types" "^28.1.1" - "@types/babel__traverse" "^7.0.6" - "@types/prettier" "^2.1.5" - babel-preset-current-node-syntax "^1.0.0" - chalk "^4.0.0" - expect "^28.1.1" - graceful-fs "^4.2.9" - jest-diff "^28.1.1" - jest-get-type "^28.0.2" - jest-haste-map "^28.1.1" - jest-matcher-utils "^28.1.1" - jest-message-util "^28.1.1" - jest-util "^28.1.1" - natural-compare "^1.4.0" - pretty-format "^28.1.1" - semver "^7.3.5" - -jest-util@^28.0.0, jest-util@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-28.1.1.tgz#ff39e436a1aca397c0ab998db5a51ae2b7080d05" - integrity sha512-FktOu7ca1DZSyhPAxgxB6hfh2+9zMoJ7aEQA759Z6p45NuO8mWcqujH+UdHlCm/V6JTWwDztM2ITCzU1ijJAfw== - dependencies: - "@jest/types" "^28.1.1" - "@types/node" "*" - chalk "^4.0.0" - ci-info "^3.2.0" - graceful-fs "^4.2.9" - picomatch "^2.2.3" - -jest-validate@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-28.1.1.tgz#59b7b339b3c85b5144bd0c06ad3600f503a4acc8" - integrity sha512-Kpf6gcClqFCIZ4ti5++XemYJWUPCFUW+N2gknn+KgnDf549iLul3cBuKVe1YcWRlaF8tZV8eJCap0eECOEE3Ug== - dependencies: - "@jest/types" "^28.1.1" - camelcase "^6.2.0" - chalk "^4.0.0" - jest-get-type "^28.0.2" - leven "^3.1.0" - pretty-format "^28.1.1" - -jest-watcher@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-28.1.1.tgz#533597fb3bfefd52b5cd115cd916cffd237fb60c" - integrity sha512-RQIpeZ8EIJMxbQrXpJQYIIlubBnB9imEHsxxE41f54ZwcqWLysL/A0ZcdMirf+XsMn3xfphVQVV4EW0/p7i7Ug== - dependencies: - "@jest/test-result" "^28.1.1" - "@jest/types" "^28.1.1" - "@types/node" "*" - ansi-escapes "^4.2.1" - chalk "^4.0.0" - emittery "^0.10.2" - jest-util "^28.1.1" - string-length "^4.0.1" - -jest-worker@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-28.1.1.tgz#3480c73247171dfd01eda77200f0063ab6a3bf28" - integrity sha512-Au7slXB08C6h+xbJPp7VIb6U0XX5Kc9uel/WFc6/rcTzGiaVCBRngBExSYuXSLFPULPSYU3cJ3ybS988lNFQhQ== - dependencies: - "@types/node" "*" - merge-stream "^2.0.0" - supports-color "^8.0.0" - -jest@^28.1.1: - version "28.1.2" - resolved "https://registry.yarnpkg.com/jest/-/jest-28.1.2.tgz#451ff24081ce31ca00b07b60c61add13aa96f8eb" - integrity sha512-Tuf05DwLeCh2cfWCQbcz9UxldoDyiR1E9Igaei5khjonKncYdc6LDfynKCEWozK0oLE3GD+xKAo2u8x/0s6GOg== - dependencies: - "@jest/core" "^28.1.2" - "@jest/types" "^28.1.1" - import-local "^3.0.2" - jest-cli "^28.1.2" - -js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" - integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== - dependencies: - argparse "^2.0.1" - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== - -json5@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" - integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== - -kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -kleur@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -leven@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - -lines-and-columns@^1.1.6: - version "1.2.4" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" - integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -lodash.memoize@4.x: - version "4.1.2" - resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" - integrity sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag== - -lodash.merge@^4.6.2: - version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" - integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== - -lru-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" - integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== - dependencies: - yallist "^4.0.0" - -make-dir@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -make-error@1.x: - version "1.3.6" - resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" - integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw== - -makeerror@1.0.12: - version "1.0.12" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" - integrity sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg== - dependencies: - tmpl "1.0.5" - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -merge2@^1.3.0, merge2@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" - integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== - -micromatch@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" - integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== - dependencies: - braces "^3.0.2" - picomatch "^2.3.1" - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" - integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== - dependencies: - brace-expansion "^1.1.7" - -ms@2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== - -node-int64@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== - -node-releases@^2.0.5: - version "2.0.5" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666" - integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q== - -normalize-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -npm-run-path@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -once@^1.3.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== - dependencies: - wrappy "1" - -onetime@^5.1.2: - version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -optionator@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.3" - -p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" - integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.7: - version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" - integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== - -path-type@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" - integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== - -picocolors@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" - integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== - -picomatch@^2.0.4, picomatch@^2.2.3, picomatch@^2.3.1: - version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" - integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== - -pirates@^4.0.4: - version "4.0.5" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" - integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== - -pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - -prettier@~2.6: - version "2.6.2" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.6.2.tgz#e26d71a18a74c3d0f0597f55f01fb6c06c206032" - integrity sha512-PkUpF+qoXTqhOeWL9fu7As8LXsIUZ1WYaJiY/a7McAQzxjk82OF0tibkFXVCDImZtWxbvojFjerkiLb0/q8mew== - -pretty-format@^28.0.0, pretty-format@^28.1.1: - version "28.1.1" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-28.1.1.tgz#f731530394e0f7fcd95aba6b43c50e02d86b95cb" - integrity sha512-wwJbVTGFHeucr5Jw2bQ9P+VYHyLdAqedFLEkdQUVaBF/eiidDwH5OpilINq4mEfhbCjLnirt6HTTDhv1HaTIQw== - dependencies: - "@jest/schemas" "^28.0.2" - ansi-regex "^5.0.1" - ansi-styles "^5.0.0" - react-is "^18.0.0" - -prompts@^2.0.1: - version "2.4.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.4.2.tgz#7b57e73b3a48029ad10ebd44f74b01722a4cb069" - integrity sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - -punycode@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -queue-microtask@^1.2.2: - version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" - integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== - -react-is@^18.0.0: - version "18.2.0" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.2.0.tgz#199431eeaaa2e09f86427efbb4f1473edb47609b" - integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== - -regexpp@^3.2.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" - integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve.exports@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/resolve.exports/-/resolve.exports-1.1.0.tgz#5ce842b94b05146c0e03076985d1d0e7e48c90c9" - integrity sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ== - -resolve@^1.20.0: - version "1.22.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" - integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== - dependencies: - is-core-module "^2.9.0" - path-parse "^1.0.7" - supports-preserve-symlinks-flag "^1.0.0" - -reusify@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" - integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== - -rimraf@^3.0.0, rimraf@^3.0.2, rimraf@~3.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -run-parallel@^1.1.9: - version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" - integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== - dependencies: - queue-microtask "^1.2.2" - -safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -semver@7.x, semver@^7.3.5, semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== - dependencies: - lru-cache "^6.0.0" - -semver@^6.0.0, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -shallow-clone@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/shallow-clone/-/shallow-clone-3.0.1.tgz#8f2981ad92531f55035b01fb230769a40e02efa3" - integrity sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA== - dependencies: - kind-of "^6.0.2" - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -signal-exit@^3.0.3, signal-exit@^3.0.7: - version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" - integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== - -sisteransi@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -source-map-support@0.5.13: - version "0.5.13" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" - integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-support@^0.5.21: - version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" - integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map@^0.6.0, source-map@^0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== - -stack-utils@^2.0.3: - version "2.0.5" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.5.tgz#d25265fca995154659dbbfba3b49254778d2fdd5" - integrity sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA== - dependencies: - escape-string-regexp "^2.0.0" - -string-length@^4.0.1: - version "4.0.2" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" - integrity sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ== - dependencies: - char-regex "^1.0.2" - strip-ansi "^6.0.0" - -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-bom@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.0.0, supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-color@^8.0.0: - version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" - integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.2.0.tgz#4f77b42488765891774b70c79babd87f9bd594bb" - integrity sha512-6sXEzV5+I5j8Bmq9/vUphGRM/RJNT9SCURJLjwfOg51heRtguGWDzcaBlgAzKhQa0EVNpPEKzQuBwZ8S8WaCeQ== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - -supports-preserve-symlinks-flag@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" - integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== - -terminal-link@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== - -throat@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" - integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== - -time-span@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/time-span/-/time-span-5.1.0.tgz#80c76cf5a0ca28e0842d3f10a4e99034ce94b90d" - integrity sha512-75voc/9G4rDIJleOo4jPvN4/YC4GRZrY8yy1uU4lwrB3XEQbWve8zXoO5No4eFrGcTAMYyoY67p8jRQdtA1HbA== - dependencies: - convert-hrtime "^5.0.0" - -tmpl@1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" - integrity sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw== - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -ts-jest@^28.0.5: - version "28.0.5" - resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-28.0.5.tgz#31776f768fba6dfc8c061d488840ed0c8eeac8b9" - integrity sha512-Sx9FyP9pCY7pUzQpy4FgRZf2bhHY3za576HMKJFs+OnQ9jS96Du5vNsDKkyedQkik+sEabbKAnCliv9BEsHZgQ== - dependencies: - bs-logger "0.x" - fast-json-stable-stringify "2.x" - jest-util "^28.0.0" - json5 "^2.2.1" - lodash.memoize "4.x" - make-error "1.x" - semver "7.x" - yargs-parser "^21.0.1" - -tslib@^1.8.1: - version "1.14.1" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" - integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== - -tslib@~2.4: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - -tsutils@^3.21.0, tsutils@~3.21: - version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - -type-detect@4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== - -type-fest@^0.21.3: - version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" - integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== - -typescript@~4.7: - version "4.7.4" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.7.4.tgz#1a88596d1cf47d59507a1bcdfb5b9dfe4d488235" - integrity sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ== - -underscore@^1.13.4: - version "1.13.4" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.4.tgz#7886b46bbdf07f768e0052f1828e1dcab40c0dee" - integrity sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ== - -update-browserslist-db@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.4.tgz#dbfc5a789caa26b1db8990796c2c8ebbce304824" - integrity sha512-jnmO2BEGUjsMOe/Fg9u0oczOe/ppIDZPebzccl1yDWGLFP16Pa1/RM5wEoKYPG2zstNcDuAStejyxsOuKINdGA== - dependencies: - escalade "^3.1.1" - picocolors "^1.0.0" - -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - -v8-compile-cache@^2.0.3: - version "2.3.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" - integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== - -v8-to-istanbul@^9.0.1: - version "9.0.1" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz#b6f994b0b5d4ef255e17a0d17dc444a9f5132fa4" - integrity sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w== - dependencies: - "@jridgewell/trace-mapping" "^0.3.12" - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - -walker@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f" - integrity sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ== - dependencies: - makeerror "1.0.12" - -which@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -word-wrap@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -wrap-ansi@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== - -write-file-atomic@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-4.0.1.tgz#9faa33a964c1c85ff6f849b80b42a88c2c537c8f" - integrity sha512-nSKUxgAbyioruk6hU87QzVbY279oYT6uiwgDoujth2ju4mJ+TZau7SQBhtbTmUyuNYTuXnSyRn66FV0+eCgcrQ== - dependencies: - imurmurhash "^0.1.4" - signal-exit "^3.0.7" - -y18n@^5.0.5: - version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" - integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== - -yallist@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== - -yargs-parser@^21.0.0, yargs-parser@^21.0.1: - version "21.0.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.1.tgz#0267f286c877a4f0f728fceb6f8a3e4cb95c6e35" - integrity sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg== - -yargs@^17.3.1: - version "17.5.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.1.tgz#e109900cab6fcb7fd44b1d8249166feb0b36e58e" - integrity sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA== - dependencies: - cliui "^7.0.2" - escalade "^3.1.1" - get-caller-file "^2.0.5" - require-directory "^2.1.1" - string-width "^4.2.3" - y18n "^5.0.5" - yargs-parser "^21.0.0" From 06d2f7991cff5c6740f80bad5eec20ab0fa6a1ba Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 17:11:55 +0100 Subject: [PATCH 036/127] rename --- .../provider/{keyguard => keydel}/.gitignore | 0 .../keyguard.go => keydel/keydel.go} | 2 +- .../keydel_test.go} | 2 +- x/ccv/provider/{keyguard => keydel}/main.cfg | 0 x/ccv/provider/{keyguard => keydel}/main.tla | 0 x/ccv/provider/keyguard/example_test.go | 88 ------------------- 6 files changed, 2 insertions(+), 90 deletions(-) rename x/ccv/provider/{keyguard => keydel}/.gitignore (100%) rename x/ccv/provider/{keyguard/keyguard.go => keydel/keydel.go} (99%) rename x/ccv/provider/{keyguard/keyguard_test.go => keydel/keydel_test.go} (99%) rename x/ccv/provider/{keyguard => keydel}/main.cfg (100%) rename x/ccv/provider/{keyguard => keydel}/main.tla (100%) delete mode 100644 x/ccv/provider/keyguard/example_test.go diff --git a/x/ccv/provider/keyguard/.gitignore b/x/ccv/provider/keydel/.gitignore similarity index 100% rename from x/ccv/provider/keyguard/.gitignore rename to x/ccv/provider/keydel/.gitignore diff --git a/x/ccv/provider/keyguard/keyguard.go b/x/ccv/provider/keydel/keydel.go similarity index 99% rename from x/ccv/provider/keyguard/keyguard.go rename to x/ccv/provider/keydel/keydel.go index b400a8007b..992025a1d4 100644 --- a/x/ccv/provider/keyguard/keyguard.go +++ b/x/ccv/provider/keydel/keydel.go @@ -1,4 +1,4 @@ -package keyguard +package keydel import "errors" diff --git a/x/ccv/provider/keyguard/keyguard_test.go b/x/ccv/provider/keydel/keydel_test.go similarity index 99% rename from x/ccv/provider/keyguard/keyguard_test.go rename to x/ccv/provider/keydel/keydel_test.go index 24b416fca4..c4af18d264 100644 --- a/x/ccv/provider/keyguard/keyguard_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -1,4 +1,4 @@ -package keyguard +package keydel import ( "math/rand" diff --git a/x/ccv/provider/keyguard/main.cfg b/x/ccv/provider/keydel/main.cfg similarity index 100% rename from x/ccv/provider/keyguard/main.cfg rename to x/ccv/provider/keydel/main.cfg diff --git a/x/ccv/provider/keyguard/main.tla b/x/ccv/provider/keydel/main.tla similarity index 100% rename from x/ccv/provider/keyguard/main.tla rename to x/ccv/provider/keydel/main.tla diff --git a/x/ccv/provider/keyguard/example_test.go b/x/ccv/provider/keyguard/example_test.go deleted file mode 100644 index bd2df572b4..0000000000 --- a/x/ccv/provider/keyguard/example_test.go +++ /dev/null @@ -1,88 +0,0 @@ -package keyguard_test - -import ( - "testing" - - "pgregory.net/rapid" -) - -// Queue implements integer queue with a fixed maximum size. -type Queue struct { - buf []int - in int - out int -} - -func NewQueue(n int) *Queue { - return &Queue{ - buf: make([]int, n+1), - } -} - -// Precondition: Size() > 0. -func (q *Queue) Get() int { - i := q.buf[q.out] - q.out = (q.out + 1) % len(q.buf) - return i -} - -// Precondition: Size() < n. -func (q *Queue) Put(i int) { - q.buf[q.in] = i - q.in = (q.in + 1) % len(q.buf) -} - -func (q *Queue) Size() int { - return (q.in - q.out) % len(q.buf) -} - -// queueMachine is a description of a rapid state machine for testing Queue -type queueMachine struct { - q *Queue // queue being tested - n int // maximum queue size - state []int // model of the queue -} - -// Init is an action for initializing a queueMachine instance. -func (m *queueMachine) Init(t *rapid.T) { - n := rapid.IntRange(1, 1000).Draw(t, "n") - m.q = NewQueue(n) - m.n = n -} - -// Get is a conditional action which removes an item from the queue. -func (m *queueMachine) Get(t *rapid.T) { - if m.q.Size() == 0 { - t.Skip("queue empty") - } - - i := m.q.Get() - if i != m.state[0] { - t.Fatalf("got invalid value: %v vs expected %v", i, m.state[0]) - } - m.state = m.state[1:] -} - -// Put is a conditional action which adds an items to the queue. -func (m *queueMachine) Put(t *rapid.T) { - if m.q.Size() == m.n { - t.Skip("queue full") - } - - i := rapid.Int().Draw(t, "i") - m.q.Put(i) - m.state = append(m.state, i) -} - -// Check runs after every action and verifies that all required invariants hold. -func (m *queueMachine) Check(t *rapid.T) { - if m.q.Size() != len(m.state) { - t.Fatalf("queue size mismatch: %v vs expected %v", m.q.Size(), len(m.state)) - } -} - -// Rename to TestQueue(t *testing.T) to make an actual (failing) test. -func ExampleRun_queue() { - var t *testing.T - rapid.Check(t, rapid.Run[*queueMachine]()) -} From 8e5f250f288f18718d12eda630add042d74b0128 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 21 Sep 2022 17:14:11 +0100 Subject: [PATCH 037/127] Checkpoint --- x/ccv/provider/keydel/keydel.go | 44 ++++++++++++++-------------- x/ccv/provider/keydel/keydel_test.go | 22 +++++++------- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 992025a1d4..f4b6e3bb20 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -15,7 +15,7 @@ type update struct { // TODO: I need to integrate this into the system // TODO: I need to integrate with staking Create/Destroy validator -type KeyGuard struct { +type KeyDel struct { // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator TODO: impl this @@ -30,8 +30,8 @@ type KeyGuard struct { foreignToGreatestVSCID map[FK]VSCID } -func MakeKeyGuard() KeyGuard { - return KeyGuard{ +func MakeKeyDel() KeyDel { + return KeyDel{ localToLastPositiveForeignUpdate: map[LK]update{}, localToForeign: map[LK]FK{}, foreignToLocal: map[FK]LK{}, @@ -39,25 +39,25 @@ func MakeKeyGuard() KeyGuard { } } -func (m *KeyGuard) SetLocalToForeign(lk LK, fk FK) { - m.localToForeign[lk] = fk +func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) { + e.localToForeign[lk] = fk } -func (m *KeyGuard) GetLocal(fk FK) (LK, error) { - if lk, ok := m.foreignToLocal[fk]; ok { +func (e *KeyDel) GetLocal(fk FK) (LK, error) { + if lk, ok := e.foreignToLocal[fk]; ok { return lk, nil } else { return -1, errors.New("Nope") } } -func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { +func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} // Key changes - for lk, newFk := range m.localToForeign { - if u, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + for lk, newFk := range e.localToForeign { + if u, ok := e.localToLastPositiveForeignUpdate[lk]; ok { oldFk := u.key if oldFk != newFk { lks = append(lks, lk) @@ -73,14 +73,14 @@ func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Make a temporary copy localToLastPositiveForeignUpdate := map[LK]update{} - for lk, u := range m.localToLastPositiveForeignUpdate { + for lk, u := range e.localToLastPositiveForeignUpdate { localToLastPositiveForeignUpdate[lk] = u } // Iterate all local keys for which either the foreign key changed or there // has been a power update. for _, lk := range lks { - if last, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { // If the key has previously been shipped in an update // delete it. foreignUpdates[last.key] = 0 @@ -92,7 +92,7 @@ func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := m.localToLastPositiveForeignUpdate[lk]; ok { + if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { // If there was a positive power before, use it. power = last.power } @@ -102,22 +102,22 @@ func (m *KeyGuard) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { } // Only ship positive powers. if 0 < power { - fk := m.localToForeign[lk] + fk := e.localToForeign[lk] foreignUpdates[fk] = power localToLastPositiveForeignUpdate[lk] = update{key: fk, power: power} } } - m.localToLastPositiveForeignUpdate = localToLastPositiveForeignUpdate + e.localToLastPositiveForeignUpdate = localToLastPositiveForeignUpdate for fk := range foreignUpdates { - m.foreignToGreatestVSCID[fk] = vscid + e.foreignToGreatestVSCID[fk] = vscid } return foreignUpdates } -func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { +func (e *KeyDel) ComputeUpdates(vscid VSCID, localUpdates []update) []update { local := map[LK]int{} @@ -125,7 +125,7 @@ func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { local[u.key] = u.power } - foreign := m.inner(vscid, local) + foreign := e.inner(vscid, local) ret := []update{} @@ -136,15 +136,15 @@ func (m *KeyGuard) ComputeUpdates(vscid VSCID, localUpdates []update) []update { return ret } -func (m *KeyGuard) Prune(mostRecentlyMaturedVscid VSCID) { +func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { toRemove := []FK{} - for fk, vscid := range m.foreignToGreatestVSCID { + for fk, vscid := range e.foreignToGreatestVSCID { if vscid <= mostRecentlyMaturedVscid { toRemove = append(toRemove, fk) } } for _, fk := range toRemove { - delete(m.foreignToGreatestVSCID, fk) - delete(m.foreignToLocal, fk) + delete(e.foreignToGreatestVSCID, fk) + delete(e.foreignToLocal, fk) } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index c4af18d264..abc8807fc8 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -47,7 +47,7 @@ func (vs *ValSet) processUpdates(updates []update) { } func (d *Driver) runTrace() { - kg := MakeKeyGuard() + kg := MakeKeyDel() d.lastTP = 0 d.lastTC = 0 @@ -239,8 +239,6 @@ func getTrace(t *testing.T) []TraceState { } func TestPrototype(t *testing.T) { - - rand.Seed(40) for i := 0; i < 1000; i++ { trace := []TraceState{} for len(trace) < 2 { @@ -253,12 +251,14 @@ func TestPrototype(t *testing.T) { } } -func TestKeyDelegation(t *testing.T) { - traces := [][]TraceState{} - for _, trace := range traces { - d := Driver{} - d.trace = trace - d.t = t - d.runTrace() - } +func TestActual(t *testing.T) { + /* + traces := [][]TraceState{} + for _, trace := range traces { + d := Driver{} + d.trace = trace + d.t = t + d.runTrace() + } + */ } From 9cdd820032dc2ed68b04bf56743944df7b47b402 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 13:32:24 +0100 Subject: [PATCH 038/127] Work on gc --- x/ccv/provider/keydel/keydel.go | 84 ++++++++++++++-------------- x/ccv/provider/keydel/keydel_test.go | 45 ++++++++------- 2 files changed, 66 insertions(+), 63 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index f4b6e3bb20..9851eaed9b 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -27,7 +27,7 @@ type KeyDel struct { // Prunable state foreignToLocal map[FK]LK // Prunable state - foreignToGreatestVSCID map[FK]VSCID + foreignToGreatestVSCIDUsed map[FK]VSCID } func MakeKeyDel() KeyDel { @@ -35,7 +35,7 @@ func MakeKeyDel() KeyDel { localToLastPositiveForeignUpdate: map[LK]update{}, localToForeign: map[LK]FK{}, foreignToLocal: map[FK]LK{}, - foreignToGreatestVSCID: map[FK]VSCID{}, + foreignToGreatestVSCIDUsed: map[FK]VSCID{}, } } @@ -51,6 +51,38 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { } } +func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { + toRemove := []FK{} + for fk, vscid := range e.foreignToGreatestVSCIDUsed { + if vscid <= mostRecentlyMaturedVscid { + toRemove = append(toRemove, fk) + } + } + for _, fk := range toRemove { + delete(e.foreignToGreatestVSCIDUsed, fk) + delete(e.foreignToLocal, fk) + } +} + +func (e *KeyDel) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { + + local := map[LK]int{} + + for _, u := range localUpdates { + local[u.key] = u.power + } + + foreign := e.inner(vscid, local) + + foreignUpdates = []update{} + + for fk, power := range foreign { + foreignUpdates = append(foreignUpdates, update{key: fk, power: power}) + } + + return foreignUpdates +} + func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} @@ -72,9 +104,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { foreignUpdates := map[FK]int{} // Make a temporary copy - localToLastPositiveForeignUpdate := map[LK]update{} + lkTLPFU := map[LK]update{} for lk, u := range e.localToLastPositiveForeignUpdate { - localToLastPositiveForeignUpdate[lk] = u + lkTLPFU[lk] = u } // Iterate all local keys for which either the foreign key changed or there @@ -84,7 +116,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // If the key has previously been shipped in an update // delete it. foreignUpdates[last.key] = 0 - delete(localToLastPositiveForeignUpdate, lk) + delete(lkTLPFU, lk) } } @@ -104,47 +136,13 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { if 0 < power { fk := e.localToForeign[lk] foreignUpdates[fk] = power - localToLastPositiveForeignUpdate[lk] = update{key: fk, power: power} + lkTLPFU[lk] = update{key: fk, power: power} + e.foreignToLocal[fk] = lk + e.foreignToGreatestVSCIDUsed[fk] = vscid } } - e.localToLastPositiveForeignUpdate = localToLastPositiveForeignUpdate - - for fk := range foreignUpdates { - e.foreignToGreatestVSCID[fk] = vscid - } + e.localToLastPositiveForeignUpdate = lkTLPFU return foreignUpdates } - -func (e *KeyDel) ComputeUpdates(vscid VSCID, localUpdates []update) []update { - - local := map[LK]int{} - - for _, u := range localUpdates { - local[u.key] = u.power - } - - foreign := e.inner(vscid, local) - - ret := []update{} - - for fk, power := range foreign { - ret = append(ret, update{key: fk, power: power}) - } - - return ret -} - -func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { - toRemove := []FK{} - for fk, vscid := range e.foreignToGreatestVSCID { - if vscid <= mostRecentlyMaturedVscid { - toRemove = append(toRemove, fk) - } - } - for _, fk := range toRemove { - delete(e.foreignToGreatestVSCID, fk) - delete(e.foreignToLocal, fk) - } -} diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index abc8807fc8..fcbb4d0537 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -105,28 +105,33 @@ func (d *Driver) runTrace() { } func (d *Driver) checkProperties() { - // Check that the foreign ValSet is equal to the local ValSet - // at time TC via inverse mapping - foreignSet := d.foreignValSet.keyToPower - localSet := d.localValSets[d.lastTC].keyToPower - mapping := d.mappings[d.lastTC] - inverseMapping := map[FK]LK{} - for lk, fk := range mapping { - inverseMapping[fk] = lk - } - foreignSetAsLocal := map[LK]int{} - for fk, power := range foreignSet { - foreignSetAsLocal[inverseMapping[fk]] = power - } - for lk, actual := range foreignSetAsLocal { - expect := localSet[lk] - require.Equal(d.t, expect, actual) - } - for lk, expect := range localSet { - actual := foreignSetAsLocal[lk] - require.Equal(d.t, expect, actual) + + validatorSetReplication := func() { + // Check that the foreign ValSet is equal to the local ValSet + // at time TC via inverse mapping + foreignSet := d.foreignValSet.keyToPower + localSet := d.localValSets[d.lastTC].keyToPower + mapping := d.mappings[d.lastTC] + inverseMapping := map[FK]LK{} + for lk, fk := range mapping { + inverseMapping[fk] = lk + } + foreignSetAsLocal := map[LK]int{} + for fk, power := range foreignSet { + foreignSetAsLocal[inverseMapping[fk]] = power + } + for lk, actual := range foreignSetAsLocal { + expect := localSet[lk] + require.Equal(d.t, expect, actual) + } + for lk, expect := range localSet { + actual := foreignSetAsLocal[lk] + require.Equal(d.t, expect, actual) + } } + validatorSetReplication() + // TODO: check pruning is correct (reverse lookup) } From 3c57520b89901a25a93b8a884a870e241b17ea13 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 14:24:28 +0100 Subject: [PATCH 039/127] cp --- x/ccv/provider/keydel/keydel.go | 14 ++++- x/ccv/provider/keydel/keydel_test.go | 85 +++++++++++++++++++++------- 2 files changed, 76 insertions(+), 23 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 9851eaed9b..991d09c318 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -39,8 +39,20 @@ func MakeKeyDel() KeyDel { } } -func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) { +func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { + if existingLk, ok := e.foreignToLocal[fk]; ok { + if existingLk != lk { + // We prevent reusing foreign keys which are still used for local + // key lookups. Otherwise it would be possible for a local key A + // to commit an infraction under the foreign key X and change + // the mapping of foreign key X to a local key B before the evidence + // arrives. + return errors.New(`Cannot reuse foreign key which is associated + to a different local key.`) + } + } e.localToForeign[lk] = fk + return nil } func (e *KeyDel) GetLocal(fk FK) (LK, error) { diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index fcbb4d0537..0a4728c5a2 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -7,6 +7,10 @@ import ( "github.com/stretchr/testify/require" ) +const TRACE_LEN = 1000 +const NUM_VALS = 3 +const NUM_FKS = 9 + type TraceState struct { Mapping map[LK]FK LocalUpdates []update @@ -47,7 +51,7 @@ func (vs *ValSet) processUpdates(updates []update) { } func (d *Driver) runTrace() { - kg := MakeKeyDel() + kd := MakeKeyDel() d.lastTP = 0 d.lastTC = 0 @@ -60,15 +64,15 @@ func (d *Driver) runTrace() { init := d.trace[0] d.mappings = append(d.mappings, init.Mapping) for lk, fk := range init.Mapping { - kg.SetLocalToForeign(lk, fk) + kd.SetLocalToForeign(lk, fk) } // Set the initial local set d.localValSets = append(d.localValSets, MakeValSet()) d.localValSets[init.TP].processUpdates(init.LocalUpdates) // Set the initial foreign set - d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(init.TP, init.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, kd.ComputeUpdates(init.TP, init.LocalUpdates)) d.foreignValSet.processUpdates(d.foreignUpdates[init.TC]) - kg.Prune(init.TM) + kd.Prune(init.TM) require.Len(d.t, d.mappings, 1) require.Len(d.t, d.foreignUpdates, 1) @@ -84,9 +88,9 @@ func (d *Driver) runTrace() { d.localValSets[s.TP].processUpdates(s.LocalUpdates) d.lastTP = s.TP for lk, fk := range s.Mapping { - kg.SetLocalToForeign(lk, fk) + kd.SetLocalToForeign(lk, fk) } - d.foreignUpdates = append(d.foreignUpdates, kg.ComputeUpdates(s.TP, s.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, kd.ComputeUpdates(s.TP, s.LocalUpdates)) } if d.lastTC < s.TC { for j := d.lastTC + 1; j <= s.TC; j++ { @@ -95,22 +99,30 @@ func (d *Driver) runTrace() { d.lastTC = s.TC } if d.lastTM < s.TM { - // TODO: check this because TM is initialised to 0 but 0 has not actually matured - // TODO: I think one solution IS TO ACTUALLY prune 0 in init - kg.Prune(s.TM) + kd.Prune(s.TM) d.lastTM = s.TM } - d.checkProperties() + d.checkProperties(kd) } } -func (d *Driver) checkProperties() { +func (d *Driver) checkProperties(kd KeyDel) { + /* + A consumer who receives vscid i must have a validator set + equal to the validator set on the provider at vscid id mapped + through the key delegation. + */ validatorSetReplication := func() { // Check that the foreign ValSet is equal to the local ValSet // at time TC via inverse mapping + + // Get the current consumer val set foreignSet := d.foreignValSet.keyToPower + // Get the provider set at the relevant time localSet := d.localValSets[d.lastTC].keyToPower + + // Map the consumer set back through the inverse key mapping mapping := d.mappings[d.lastTC] inverseMapping := map[FK]LK{} for lk, fk := range mapping { @@ -120,27 +132,56 @@ func (d *Driver) checkProperties() { for fk, power := range foreignSet { foreignSetAsLocal[inverseMapping[fk]] = power } - for lk, actual := range foreignSetAsLocal { - expect := localSet[lk] - require.Equal(d.t, expect, actual) + + // Ensure that the validator sets match exactly + for lk, expectedPower := range localSet { + actualPower := foreignSetAsLocal[lk] + require.Equal(d.t, expectedPower, actualPower) } - for lk, expect := range localSet { - actual := foreignSetAsLocal[lk] + for lk, actualPower := range foreignSetAsLocal { + expectedPower := localSet[lk] + require.Equal(d.t, expectedPower, actualPower) + } + } + + /* + Two more properties which must be satisfied by KeyDel when + used correctly inside a wider system: + + 1. If a foreign key is delivered to the consumer with positive power at + VSCID i then the local key associated to it must be retrievable + until i is matured. (Consumer initiated slashing property). + 2. If a foreign key is not delivered to the consumer at any VSCID j + with i < j and i is matured, then the foreign key is deleted + from storage. (Garbage collection property). + */ + queries := func() { + expectQueryable := map[FK]bool{} + // If the foreign key was used in [TimeMaturity + 1, TimeConsumer] + // it must be queryable. + for i := d.lastTM + 1; i <= d.lastTC; i++ { + valSet := d.localValSets[i] + mapping := d.mappings[i] + for lk := range valSet.keyToPower { + expectQueryable[mapping[lk]] = true + } + } + for fk := 0; fk < NUM_FKS; fk++ { + _, actual := kd.foreignToLocal[fk] + _, expect := expectQueryable[fk] + require.Equal(d.t, expect, actual) + _, actual = kd.foreignToGreatestVSCIDUsed[fk] require.Equal(d.t, expect, actual) } } validatorSetReplication() + queries() - // TODO: check pruning is correct (reverse lookup) } func getTrace(t *testing.T) []TraceState { - TRACE_LEN := 1000 - NUM_VALS := 3 - NUM_FKS := 9 - mapping := func() map[LK]FK { // TODO: currently I don't generate partial mappings but I might want to // Create a mapping of nums [0, NUM_VALS] mapped injectively to [0, NUM_FKS] @@ -160,7 +201,7 @@ func getTrace(t *testing.T) []TraceState { } for !good() { for lk := 0; lk < NUM_VALS; lk++ { - ret[lk] = -rand.Intn(NUM_FKS) + ret[lk] = rand.Intn(NUM_FKS) } } return ret From 51e9e2527902baddba4d04370ebef12951ae4e70 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 14:33:55 +0100 Subject: [PATCH 040/127] cp --- x/ccv/provider/keydel/keydel.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 991d09c318..7ca09badfa 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -47,8 +47,8 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { // to commit an infraction under the foreign key X and change // the mapping of foreign key X to a local key B before the evidence // arrives. - return errors.New(`Cannot reuse foreign key which is associated - to a different local key.`) + return errors.New(`cannot reuse foreign key which is associated + to a different local key`) } } e.localToForeign[lk] = fk From 54d7135c2d75a906c94a5d22850b5766b0c1bae5 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 14:49:12 +0100 Subject: [PATCH 041/127] Pre-change way mapping is generated --- x/ccv/provider/keydel/keydel.go | 26 +++++++++++++++++++++++++ x/ccv/provider/keydel/keydel_test.go | 29 ++++++++++++++-------------- 2 files changed, 40 insertions(+), 15 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 7ca09badfa..f908eb9312 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -129,6 +129,8 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // delete it. foreignUpdates[last.key] = 0 delete(lkTLPFU, lk) + e.foreignToLocal[last.key] = lk + e.foreignToGreatestVSCIDUsed[last.key] = vscid } } @@ -158,3 +160,27 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { return foreignUpdates } + +// Returns true iff internal invariants hold +func (e *KeyDel) internalInvariants() bool { + // All keys of foreignToLocal and foreignToGreatestVSCIDUsed are equal + for fk := range e.foreignToLocal { + if _, ok := e.foreignToGreatestVSCIDUsed[fk]; !ok { + return false + } + } + for fk := range e.foreignToGreatestVSCIDUsed { + if _, ok := e.foreignToLocal[fk]; !ok { + return false + } + } + seen := map[FK]bool{} + // No two local keys can map to the same foreign key + for _, fk := range e.localToForeign { + if seen[fk] { + return false + } + seen[fk] = true + } + return true +} diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 0a4728c5a2..ed099521af 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -51,7 +51,7 @@ func (vs *ValSet) processUpdates(updates []update) { } func (d *Driver) runTrace() { - kd := MakeKeyDel() + e := MakeKeyDel() d.lastTP = 0 d.lastTC = 0 @@ -64,15 +64,15 @@ func (d *Driver) runTrace() { init := d.trace[0] d.mappings = append(d.mappings, init.Mapping) for lk, fk := range init.Mapping { - kd.SetLocalToForeign(lk, fk) + e.SetLocalToForeign(lk, fk) } // Set the initial local set d.localValSets = append(d.localValSets, MakeValSet()) d.localValSets[init.TP].processUpdates(init.LocalUpdates) // Set the initial foreign set - d.foreignUpdates = append(d.foreignUpdates, kd.ComputeUpdates(init.TP, init.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, e.ComputeUpdates(init.TP, init.LocalUpdates)) d.foreignValSet.processUpdates(d.foreignUpdates[init.TC]) - kd.Prune(init.TM) + e.Prune(init.TM) require.Len(d.t, d.mappings, 1) require.Len(d.t, d.foreignUpdates, 1) @@ -88,9 +88,9 @@ func (d *Driver) runTrace() { d.localValSets[s.TP].processUpdates(s.LocalUpdates) d.lastTP = s.TP for lk, fk := range s.Mapping { - kd.SetLocalToForeign(lk, fk) + e.SetLocalToForeign(lk, fk) } - d.foreignUpdates = append(d.foreignUpdates, kd.ComputeUpdates(s.TP, s.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, e.ComputeUpdates(s.TP, s.LocalUpdates)) } if d.lastTC < s.TC { for j := d.lastTC + 1; j <= s.TC; j++ { @@ -99,14 +99,15 @@ func (d *Driver) runTrace() { d.lastTC = s.TC } if d.lastTM < s.TM { - kd.Prune(s.TM) + e.Prune(s.TM) d.lastTM = s.TM } - d.checkProperties(kd) + d.checkProperties(e) + require.True(d.t, e.internalInvariants()) } } -func (d *Driver) checkProperties(kd KeyDel) { +func (d *Driver) checkProperties(e KeyDel) { /* A consumer who receives vscid i must have a validator set @@ -160,17 +161,15 @@ func (d *Driver) checkProperties(kd KeyDel) { // If the foreign key was used in [TimeMaturity + 1, TimeConsumer] // it must be queryable. for i := d.lastTM + 1; i <= d.lastTC; i++ { - valSet := d.localValSets[i] - mapping := d.mappings[i] - for lk := range valSet.keyToPower { - expectQueryable[mapping[lk]] = true + for _, u := range d.foreignUpdates[i] { + expectQueryable[u.key] = true } } for fk := 0; fk < NUM_FKS; fk++ { - _, actual := kd.foreignToLocal[fk] + _, actual := e.foreignToLocal[fk] _, expect := expectQueryable[fk] require.Equal(d.t, expect, actual) - _, actual = kd.foreignToGreatestVSCIDUsed[fk] + _, actual = e.foreignToGreatestVSCIDUsed[fk] require.Equal(d.t, expect, actual) } } From 852667835e8e3a678b006883023ae6ae4db1111c Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 15:04:35 +0100 Subject: [PATCH 042/127] cp --- x/ccv/provider/keydel/keydel.go | 36 +++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index f908eb9312..0070539625 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -33,9 +33,11 @@ type KeyDel struct { func MakeKeyDel() KeyDel { return KeyDel{ localToLastPositiveForeignUpdate: map[LK]update{}, - localToForeign: map[LK]FK{}, - foreignToLocal: map[FK]LK{}, - foreignToGreatestVSCIDUsed: map[FK]VSCID{}, + // At most one local key can map to a given foreign key + localToForeign: map[LK]FK{}, + // Many foreign keys can map to the same local key + foreignToLocal: map[FK]LK{}, + foreignToGreatestVSCIDUsed: map[FK]VSCID{}, } } @@ -52,6 +54,11 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { } } e.localToForeign[lk] = fk + // TODO: I need to rethink how garbage collection works a bit + // because fks are always for the current reverse local key set + // In the tests, I also need to change the way mappings are checked + // because some mappings might be rejected by this function + e.foreignToLocal[fk] = lk return nil } @@ -163,24 +170,23 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Returns true iff internal invariants hold func (e *KeyDel) internalInvariants() bool { - // All keys of foreignToLocal and foreignToGreatestVSCIDUsed are equal - for fk := range e.foreignToLocal { - if _, ok := e.foreignToGreatestVSCIDUsed[fk]; !ok { - return false - } - } - for fk := range e.foreignToGreatestVSCIDUsed { - if _, ok := e.foreignToLocal[fk]; !ok { - return false - } - } - seen := map[FK]bool{} + // No two local keys can map to the same foreign key + seen := map[FK]bool{} for _, fk := range e.localToForeign { if seen[fk] { return false } seen[fk] = true } + + // All local keys have a reverse lookup + for _, fk := range e.localToForeign { + if _, ok := e.foreignToLocal[fk]; !ok { + return false + } + } + return true + } From 7f7bc65604744dbbdcd5da4be7fdc2da35e4ba7b Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 16:15:35 +0100 Subject: [PATCH 043/127] pre rework map testing --- x/ccv/provider/keydel/keydel.go | 81 +++++++++++++++------------- x/ccv/provider/keydel/keydel_test.go | 4 +- 2 files changed, 45 insertions(+), 40 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 0070539625..2e6526a3b7 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -11,59 +11,64 @@ type update struct { power int } -// TODO: I need to integrate this into the keyStore -// TODO: I need to integrate this into the system -// TODO: I need to integrate with staking Create/Destroy validator +// TODO: +// 1. Integrate into kv store. +// 2. integrate into Provider::EndBlock, +// 3. integrate with create/destroy validator type KeyDel struct { - // A new key is added when a relevant update is returned by ComputeUpdates - // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator TODO: impl this - localToLastPositiveForeignUpdate map[LK]update // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator TODO: impl this + // to a call to staking::DeleteValidator + // At most one local key can map to a given foreign key localToForeign map[LK]FK + // Is the foreign key mapped to in localToForeign? + foreignIsMappedTo map[FK]bool // Prunable state - foreignToLocal map[FK]LK + usedForeignToLocal map[FK]LK // Prunable state - foreignToGreatestVSCIDUsed map[FK]VSCID + usedForeignToLastVSCID map[FK]VSCID + // A new key is added when a relevant update is returned by ComputeUpdates + // the key is deleted at earliest after sending an update corresponding + // to a call to staking::DeleteValidator + localToLastPositiveForeignUpdate map[LK]update } func MakeKeyDel() KeyDel { return KeyDel{ + localToForeign: map[LK]FK{}, + foreignIsMappedTo: map[FK]bool{}, + usedForeignToLocal: map[FK]LK{}, + usedForeignToLastVSCID: map[FK]VSCID{}, localToLastPositiveForeignUpdate: map[LK]update{}, - // At most one local key can map to a given foreign key - localToForeign: map[LK]FK{}, - // Many foreign keys can map to the same local key - foreignToLocal: map[FK]LK{}, - foreignToGreatestVSCIDUsed: map[FK]VSCID{}, } } func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { - if existingLk, ok := e.foreignToLocal[fk]; ok { - if existingLk != lk { - // We prevent reusing foreign keys which are still used for local - // key lookups. Otherwise it would be possible for a local key A - // to commit an infraction under the foreign key X and change - // the mapping of foreign key X to a local key B before the evidence - // arrives. - return errors.New(`cannot reuse foreign key which is associated + if _, ok := e.foreignIsMappedTo[fk]; ok { + return errors.New(`cannot reuse foreign key which is associated to a different local key`) - } + } + if _, ok := e.usedForeignToLocal[fk]; ok { + // We prevent reusing foreign keys which are still used for local + // key lookups. Otherwise it would be possible for a local key A + // to commit an infraction under the foreign key X and change + // the mapping of foreign key X to a local key B before evidence + // arrives. + return errors.New(`cannot reuse foreign key which was associated to a different + local key and which is still queryable`) + } + if otherFk, ok := e.localToForeign[lk]; ok { + delete(e.foreignIsMappedTo, otherFk) } e.localToForeign[lk] = fk - // TODO: I need to rethink how garbage collection works a bit - // because fks are always for the current reverse local key set - // In the tests, I also need to change the way mappings are checked - // because some mappings might be rejected by this function - e.foreignToLocal[fk] = lk + e.foreignIsMappedTo[fk] = true return nil } func (e *KeyDel) GetLocal(fk FK) (LK, error) { - if lk, ok := e.foreignToLocal[fk]; ok { + // TODO: make possible even for unused? + if lk, ok := e.usedForeignToLocal[fk]; ok { return lk, nil } else { return -1, errors.New("Nope") @@ -72,14 +77,14 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { toRemove := []FK{} - for fk, vscid := range e.foreignToGreatestVSCIDUsed { + for fk, vscid := range e.usedForeignToLastVSCID { if vscid <= mostRecentlyMaturedVscid { toRemove = append(toRemove, fk) } } for _, fk := range toRemove { - delete(e.foreignToGreatestVSCIDUsed, fk) - delete(e.foreignToLocal, fk) + delete(e.usedForeignToLastVSCID, fk) + delete(e.usedForeignToLocal, fk) } } @@ -136,8 +141,8 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // delete it. foreignUpdates[last.key] = 0 delete(lkTLPFU, lk) - e.foreignToLocal[last.key] = lk - e.foreignToGreatestVSCIDUsed[last.key] = vscid + e.usedForeignToLocal[last.key] = lk + e.usedForeignToLastVSCID[last.key] = vscid } } @@ -158,8 +163,8 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { fk := e.localToForeign[lk] foreignUpdates[fk] = power lkTLPFU[lk] = update{key: fk, power: power} - e.foreignToLocal[fk] = lk - e.foreignToGreatestVSCIDUsed[fk] = vscid + e.usedForeignToLocal[fk] = lk + e.usedForeignToLastVSCID[fk] = vscid } } @@ -182,7 +187,7 @@ func (e *KeyDel) internalInvariants() bool { // All local keys have a reverse lookup for _, fk := range e.localToForeign { - if _, ok := e.foreignToLocal[fk]; !ok { + if _, ok := e.usedForeignToLocal[fk]; !ok { return false } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index ed099521af..c8a4e0f181 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -166,10 +166,10 @@ func (d *Driver) checkProperties(e KeyDel) { } } for fk := 0; fk < NUM_FKS; fk++ { - _, actual := e.foreignToLocal[fk] + _, actual := e.usedForeignToLocal[fk] _, expect := expectQueryable[fk] require.Equal(d.t, expect, actual) - _, actual = e.foreignToGreatestVSCIDUsed[fk] + _, actual = e.usedForeignToLastVSCID[fk] require.Equal(d.t, expect, actual) } } From 2081454fb7d6519f50e7d19eccdd4679ca38f374 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 16:25:59 +0100 Subject: [PATCH 044/127] cp --- x/ccv/provider/keydel/keydel_test.go | 111 +++++++++++++++------------ 1 file changed, 62 insertions(+), 49 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index c8a4e0f181..7d7cf89dfe 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -11,16 +11,22 @@ const TRACE_LEN = 1000 const NUM_VALS = 3 const NUM_FKS = 9 +type MapInstruction struct { + lk LK + fk FK +} + type TraceState struct { - Mapping map[LK]FK - LocalUpdates []update - TP int - TC int - TM int + MapInstructions []MapInstruction + LocalUpdates []update + TP int + TC int + TM int } type Driver struct { t *testing.T + e *KeyDel trace []TraceState lastTP int lastTC int @@ -50,9 +56,18 @@ func (vs *ValSet) processUpdates(updates []update) { } } -func (d *Driver) runTrace() { - e := MakeKeyDel() +func (d *Driver) applyMapInstruction(instructions []MapInstruction) { + for _, instruction := range instructions { + _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) + } + copy := map[LK]FK{} + for lk, fk := range d.e.localToForeign { + copy[lk] = fk + } + d.mappings = append(d.mappings, copy) +} +func (d *Driver) runTrace() { d.lastTP = 0 d.lastTC = 0 d.lastTM = 0 @@ -62,17 +77,14 @@ func (d *Driver) runTrace() { d.foreignValSet = MakeValSet() init := d.trace[0] - d.mappings = append(d.mappings, init.Mapping) - for lk, fk := range init.Mapping { - e.SetLocalToForeign(lk, fk) - } + d.applyMapInstruction(init.MapInstructions) // Set the initial local set d.localValSets = append(d.localValSets, MakeValSet()) d.localValSets[init.TP].processUpdates(init.LocalUpdates) // Set the initial foreign set - d.foreignUpdates = append(d.foreignUpdates, e.ComputeUpdates(init.TP, init.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) d.foreignValSet.processUpdates(d.foreignUpdates[init.TC]) - e.Prune(init.TM) + d.e.Prune(init.TM) require.Len(d.t, d.mappings, 1) require.Len(d.t, d.foreignUpdates, 1) @@ -80,17 +92,16 @@ func (d *Driver) runTrace() { for _, s := range d.trace[1:] { if d.lastTP < s.TP { - d.mappings = append(d.mappings, s.Mapping) - d.localValSets = append(d.localValSets, MakeValSet()) - for lk, power := range d.localValSets[d.lastTP].keyToPower { - d.localValSets[s.TP].keyToPower[lk] = power + d.applyMapInstruction(s.MapInstructions) + { + d.localValSets = append(d.localValSets, MakeValSet()) + for lk, power := range d.localValSets[d.lastTP].keyToPower { + d.localValSets[s.TP].keyToPower[lk] = power + } + d.localValSets[s.TP].processUpdates(s.LocalUpdates) } - d.localValSets[s.TP].processUpdates(s.LocalUpdates) d.lastTP = s.TP - for lk, fk := range s.Mapping { - e.SetLocalToForeign(lk, fk) - } - d.foreignUpdates = append(d.foreignUpdates, e.ComputeUpdates(s.TP, s.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(s.TP, s.LocalUpdates)) } if d.lastTC < s.TC { for j := d.lastTC + 1; j <= s.TC; j++ { @@ -99,15 +110,15 @@ func (d *Driver) runTrace() { d.lastTC = s.TC } if d.lastTM < s.TM { - e.Prune(s.TM) + d.e.Prune(s.TM) d.lastTM = s.TM } - d.checkProperties(e) - require.True(d.t, e.internalInvariants()) + d.checkProperties() + require.True(d.t, d.e.internalInvariants()) } } -func (d *Driver) checkProperties(e KeyDel) { +func (d *Driver) checkProperties() { /* A consumer who receives vscid i must have a validator set @@ -166,10 +177,10 @@ func (d *Driver) checkProperties(e KeyDel) { } } for fk := 0; fk < NUM_FKS; fk++ { - _, actual := e.usedForeignToLocal[fk] + _, actual := d.e.usedForeignToLocal[fk] _, expect := expectQueryable[fk] require.Equal(d.t, expect, actual) - _, actual = e.usedForeignToLastVSCID[fk] + _, actual = d.e.usedForeignToLastVSCID[fk] require.Equal(d.t, expect, actual) } } @@ -181,7 +192,7 @@ func (d *Driver) checkProperties(e KeyDel) { func getTrace(t *testing.T) []TraceState { - mapping := func() map[LK]FK { + mappings := func() []MapInstruction { // TODO: currently I don't generate partial mappings but I might want to // Create a mapping of nums [0, NUM_VALS] mapped injectively to [0, NUM_FKS] ret := map[LK]FK{} @@ -218,11 +229,11 @@ func getTrace(t *testing.T) []TraceState { ret := []TraceState{ { - Mapping: mapping(), - LocalUpdates: localUpdates(), - TP: 0, - TC: 0, - TM: 0, + MapInstructions: mappings(), + LocalUpdates: localUpdates(), + TP: 0, + TC: 0, + TM: 0, }, } @@ -233,11 +244,11 @@ func getTrace(t *testing.T) []TraceState { good := false if choice == 0 { ret = append(ret, TraceState{ - Mapping: mapping(), - LocalUpdates: localUpdates(), - TP: last.TP + 1, - TC: last.TC, - TM: last.TM, + MapInstructions: mapping(), + LocalUpdates: localUpdates(), + TP: last.TP + 1, + TC: last.TC, + TM: last.TM, }) good = true } @@ -251,11 +262,11 @@ func getTrace(t *testing.T) []TraceState { newTC := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTC && curr <= limInclusive) ret = append(ret, TraceState{ - Mapping: nil, - LocalUpdates: nil, - TP: last.TP, - TC: newTC, - TM: last.TM, + MapInstructions: nil, + LocalUpdates: nil, + TP: last.TP, + TC: newTC, + TM: last.TM, }) good = true } @@ -267,11 +278,11 @@ func getTrace(t *testing.T) []TraceState { newTM := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTM && curr <= limInclusive) ret = append(ret, TraceState{ - Mapping: nil, - LocalUpdates: nil, - TP: last.TP, - TC: last.TC, - TM: newTM, + MapInstructions: nil, + LocalUpdates: nil, + TP: last.TP, + TC: last.TC, + TM: newTM, }) good = true } @@ -292,6 +303,8 @@ func TestPrototype(t *testing.T) { d := Driver{} d.trace = trace d.t = t + e := MakeKeyDel() + d.e = &e d.runTrace() } } From 706dbc36514a8dc37a2a3cee5af6240ce068e7a8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 16:28:18 +0100 Subject: [PATCH 045/127] cp --- x/ccv/provider/keydel/keydel_test.go | 35 ++++++++-------------------- 1 file changed, 10 insertions(+), 25 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 7d7cf89dfe..4a7e989ce6 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -11,13 +11,13 @@ const TRACE_LEN = 1000 const NUM_VALS = 3 const NUM_FKS = 9 -type MapInstruction struct { +type mapInstruction struct { lk LK fk FK } type TraceState struct { - MapInstructions []MapInstruction + MapInstructions []mapInstruction LocalUpdates []update TP int TC int @@ -56,7 +56,7 @@ func (vs *ValSet) processUpdates(updates []update) { } } -func (d *Driver) applyMapInstruction(instructions []MapInstruction) { +func (d *Driver) applyMapInstruction(instructions []mapInstruction) { for _, instruction := range instructions { _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) } @@ -192,27 +192,12 @@ func (d *Driver) checkProperties() { func getTrace(t *testing.T) []TraceState { - mappings := func() []MapInstruction { - // TODO: currently I don't generate partial mappings but I might want to - // Create a mapping of nums [0, NUM_VALS] mapped injectively to [0, NUM_FKS] - ret := map[LK]FK{} - good := func() bool { - if len(ret) != NUM_VALS { - return false - } - seen := map[FK]bool{} - for _, fk := range ret { - if _, ok := seen[fk]; ok { - return false - } - seen[fk] = true - } - return true - } - for !good() { - for lk := 0; lk < NUM_VALS; lk++ { - ret[lk] = rand.Intn(NUM_FKS) - } + mappings := func() []mapInstruction { + ret := []mapInstruction{} + // include 0 to all validators + include := rand.Intn(NUM_VALS + 1) + for _, lk := range rand.Perm(NUM_VALS)[0:include] { + ret = append(ret, mapInstruction{lk, rand.Intn(NUM_FKS)}) } return ret } @@ -244,7 +229,7 @@ func getTrace(t *testing.T) []TraceState { good := false if choice == 0 { ret = append(ret, TraceState{ - MapInstructions: mapping(), + MapInstructions: mappings(), LocalUpdates: localUpdates(), TP: last.TP + 1, TC: last.TC, From dea61ecd4c01869120a5987282188642a30063eb Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 16:49:20 +0100 Subject: [PATCH 046/127] cp --- x/ccv/provider/keydel/keydel_test.go | 68 ++++++++++++++-------------- 1 file changed, 33 insertions(+), 35 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 4a7e989ce6..7415406ca7 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -56,7 +56,7 @@ func (vs *ValSet) processUpdates(updates []update) { } } -func (d *Driver) applyMapInstruction(instructions []mapInstruction) { +func (d *Driver) applyMapInstructions(instructions []mapInstruction) { for _, instruction := range instructions { _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) } @@ -67,17 +67,20 @@ func (d *Driver) applyMapInstruction(instructions []mapInstruction) { d.mappings = append(d.mappings, copy) } +func (d *Driver) applyLocalUpdates(localUpdates []update) { + valSet := MakeValSet() + for lk, power := range d.localValSets[d.lastTP].keyToPower { + valSet.keyToPower[lk] = power + } + valSet.processUpdates(localUpdates) + d.localValSets = append(d.localValSets, valSet) +} + func (d *Driver) runTrace() { - d.lastTP = 0 - d.lastTC = 0 - d.lastTM = 0 - d.mappings = []map[LK]FK{} - d.foreignUpdates = [][]update{} - d.localValSets = []ValSet{} - d.foreignValSet = MakeValSet() init := d.trace[0] - d.applyMapInstruction(init.MapInstructions) + // Set the initial map + d.applyMapInstructions(init.MapInstructions) // Set the initial local set d.localValSets = append(d.localValSets, MakeValSet()) d.localValSets[init.TP].processUpdates(init.LocalUpdates) @@ -92,16 +95,10 @@ func (d *Driver) runTrace() { for _, s := range d.trace[1:] { if d.lastTP < s.TP { - d.applyMapInstruction(s.MapInstructions) - { - d.localValSets = append(d.localValSets, MakeValSet()) - for lk, power := range d.localValSets[d.lastTP].keyToPower { - d.localValSets[s.TP].keyToPower[lk] = power - } - d.localValSets[s.TP].processUpdates(s.LocalUpdates) - } - d.lastTP = s.TP + d.applyMapInstructions(s.MapInstructions) + d.applyLocalUpdates(s.LocalUpdates) d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(s.TP, s.LocalUpdates)) + d.lastTP = s.TP } if d.lastTC < s.TC { for j := d.lastTC + 1; j <= s.TC; j++ { @@ -126,8 +123,6 @@ func (d *Driver) checkProperties() { through the key delegation. */ validatorSetReplication := func() { - // Check that the foreign ValSet is equal to the local ValSet - // at time TC via inverse mapping // Get the current consumer val set foreignSet := d.foreignValSet.keyToPower @@ -194,10 +189,13 @@ func getTrace(t *testing.T) []TraceState { mappings := func() []mapInstruction { ret := []mapInstruction{} - // include 0 to all validators - include := rand.Intn(NUM_VALS + 1) - for _, lk := range rand.Perm(NUM_VALS)[0:include] { - ret = append(ret, mapInstruction{lk, rand.Intn(NUM_FKS)}) + // Go several times to have overlapping validator updates + for i := 0; i < 2; i++ { + // include 0 to all validators + include := rand.Intn(NUM_VALS + 1) + for _, lk := range rand.Perm(NUM_VALS)[0:include] { + ret = append(ret, mapInstruction{lk, rand.Intn(NUM_FKS)}) + } } return ret } @@ -214,7 +212,8 @@ func getTrace(t *testing.T) []TraceState { ret := []TraceState{ { - MapInstructions: mappings(), + // Hard code initial mapping + MapInstructions: []mapInstruction{{0, 0}, {1, 1}, {2, 2}}, LocalUpdates: localUpdates(), TP: 0, TC: 0, @@ -222,11 +221,9 @@ func getTrace(t *testing.T) []TraceState { }, } - i := 1 - for i < TRACE_LEN { + for i := 0; i < TRACE_LEN; i++ { choice := rand.Intn(3) last := ret[len(ret)-1] - good := false if choice == 0 { ret = append(ret, TraceState{ MapInstructions: mappings(), @@ -235,7 +232,6 @@ func getTrace(t *testing.T) []TraceState { TC: last.TC, TM: last.TM, }) - good = true } if choice == 1 { curr := last.TC @@ -253,7 +249,6 @@ func getTrace(t *testing.T) []TraceState { TC: newTC, TM: last.TM, }) - good = true } } if choice == 2 { @@ -269,12 +264,8 @@ func getTrace(t *testing.T) []TraceState { TC: last.TC, TM: newTM, }) - good = true } } - if good { - i++ - } } return ret } @@ -286,10 +277,17 @@ func TestPrototype(t *testing.T) { trace = getTrace(t) } d := Driver{} - d.trace = trace d.t = t e := MakeKeyDel() d.e = &e + d.trace = trace + d.lastTP = 0 + d.lastTC = 0 + d.lastTM = 0 + d.mappings = []map[LK]FK{} + d.foreignUpdates = [][]update{} + d.localValSets = []ValSet{} + d.foreignValSet = MakeValSet() d.runTrace() } } From 83ba7b20b72982780ff6012ad137ab55b0ce9cb5 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 16:51:51 +0100 Subject: [PATCH 047/127] cp --- x/ccv/provider/keydel/keydel_test.go | 34 ++++++++++++++++------------ 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 7415406ca7..211b0cb7f1 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -47,7 +47,7 @@ func MakeValSet() ValSet { return ValSet{keyToPower: map[int]int{}} } -func (vs *ValSet) processUpdates(updates []update) { +func (vs *ValSet) applyUpdates(updates []update) { for _, u := range updates { delete(vs.keyToPower, u.key) if 0 < u.power { @@ -72,22 +72,24 @@ func (d *Driver) applyLocalUpdates(localUpdates []update) { for lk, power := range d.localValSets[d.lastTP].keyToPower { valSet.keyToPower[lk] = power } - valSet.processUpdates(localUpdates) + valSet.applyUpdates(localUpdates) d.localValSets = append(d.localValSets, valSet) } func (d *Driver) runTrace() { - init := d.trace[0] - // Set the initial map - d.applyMapInstructions(init.MapInstructions) - // Set the initial local set - d.localValSets = append(d.localValSets, MakeValSet()) - d.localValSets[init.TP].processUpdates(init.LocalUpdates) - // Set the initial foreign set - d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) - d.foreignValSet.processUpdates(d.foreignUpdates[init.TC]) - d.e.Prune(init.TM) + { + init := d.trace[0] + // Set the initial map + d.applyMapInstructions(init.MapInstructions) + // Set the initial local set + d.localValSets = append(d.localValSets, MakeValSet()) + d.localValSets[init.TP].applyUpdates(init.LocalUpdates) + // Set the initial foreign set + d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) + d.foreignValSet.applyUpdates(d.foreignUpdates[init.TC]) + d.e.Prune(init.TM) + } require.Len(d.t, d.mappings, 1) require.Len(d.t, d.foreignUpdates, 1) @@ -102,7 +104,7 @@ func (d *Driver) runTrace() { } if d.lastTC < s.TC { for j := d.lastTC + 1; j <= s.TC; j++ { - d.foreignValSet.processUpdates(d.foreignUpdates[j]) + d.foreignValSet.applyUpdates(d.foreignUpdates[j]) } d.lastTC = s.TC } @@ -172,9 +174,13 @@ func (d *Driver) checkProperties() { } } for fk := 0; fk < NUM_FKS; fk++ { - _, actual := d.e.usedForeignToLocal[fk] _, expect := expectQueryable[fk] + + // Check foreign to local lookup is available (or not) + _, actual := d.e.usedForeignToLocal[fk] require.Equal(d.t, expect, actual) + + // Check internals are consistent _, actual = d.e.usedForeignToLastVSCID[fk] require.Equal(d.t, expect, actual) } From 3f590b39befba528b614cc12987574b966682c23 Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 16:53:48 +0100 Subject: [PATCH 048/127] cp --- x/ccv/provider/keydel/keydel.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 2e6526a3b7..2a0dd3e79c 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -67,7 +67,8 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { } func (e *KeyDel) GetLocal(fk FK) (LK, error) { - // TODO: make possible even for unused? + // TODO: make it possible lookup local keys even + // when the foreign key has not yet been used? if lk, ok := e.usedForeignToLocal[fk]; ok { return lk, nil } else { From 636da59545cf7ae2693531293e9336f90323b07a Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 17:11:43 +0100 Subject: [PATCH 049/127] impl garbage collection --- x/ccv/provider/keydel/keydel.go | 28 +++++++++++++++++++++------- x/ccv/provider/keydel/keydel_test.go | 9 ++++++--- 2 files changed, 27 insertions(+), 10 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 2a0dd3e79c..e130082789 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -46,8 +46,8 @@ func MakeKeyDel() KeyDel { func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { if _, ok := e.foreignIsMappedTo[fk]; ok { - return errors.New(`cannot reuse foreign key which is associated - to a different local key`) + return errors.New(`cannot use foreign key which is + already associated to a local key`) } if _, ok := e.usedForeignToLocal[fk]; ok { // We prevent reusing foreign keys which are still used for local @@ -55,8 +55,8 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { // to commit an infraction under the foreign key X and change // the mapping of foreign key X to a local key B before evidence // arrives. - return errors.New(`cannot reuse foreign key which was associated to a different - local key and which is still queryable`) + return errors.New(`cannot reuse foreign key which was associated to + a different local key and which is still queryable`) } if otherFk, ok := e.localToForeign[lk]; ok { delete(e.foreignIsMappedTo, otherFk) @@ -84,8 +84,8 @@ func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { } } for _, fk := range toRemove { - delete(e.usedForeignToLastVSCID, fk) delete(e.usedForeignToLocal, fk) + delete(e.usedForeignToLastVSCID, fk) } } @@ -186,9 +186,23 @@ func (e *KeyDel) internalInvariants() bool { seen[fk] = true } - // All local keys have a reverse lookup + // All foreign keys mapped to by local keys are noted for _, fk := range e.localToForeign { - if _, ok := e.usedForeignToLocal[fk]; !ok { + if _, ok := e.foreignIsMappedTo[fk]; !ok { + return false + } + } + + // All mapped to foreign keys are actually mapped toy + for fk := range e.foreignIsMappedTo { + good := false + for _, mappedFK := range e.localToForeign { + if mappedFK == fk { + good = true + break + } + } + if !good { return false } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 211b0cb7f1..5af772acdd 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -157,6 +157,9 @@ func (d *Driver) checkProperties() { Two more properties which must be satisfied by KeyDel when used correctly inside a wider system: + TODO: need to rephrase the below because they aren't completely accurate + the upper bound on time is actually TP (inclusive), not TC. + 1. If a foreign key is delivered to the consumer with positive power at VSCID i then the local key associated to it must be retrievable until i is matured. (Consumer initiated slashing property). @@ -164,11 +167,11 @@ func (d *Driver) checkProperties() { with i < j and i is matured, then the foreign key is deleted from storage. (Garbage collection property). */ - queries := func() { + queriesAndGarbageCollection := func() { expectQueryable := map[FK]bool{} // If the foreign key was used in [TimeMaturity + 1, TimeConsumer] // it must be queryable. - for i := d.lastTM + 1; i <= d.lastTC; i++ { + for i := d.lastTM + 1; i <= d.lastTP; i++ { for _, u := range d.foreignUpdates[i] { expectQueryable[u.key] = true } @@ -187,7 +190,7 @@ func (d *Driver) checkProperties() { } validatorSetReplication() - queries() + queriesAndGarbageCollection() } From 3d26f3e120a2d14702eadd64a230f791d4e6482d Mon Sep 17 00:00:00 2001 From: Daniel Date: Thu, 22 Sep 2022 17:17:03 +0100 Subject: [PATCH 050/127] CP with better test --- x/ccv/provider/keydel/keydel_test.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 5af772acdd..9b6ba415f3 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -8,8 +8,8 @@ import ( ) const TRACE_LEN = 1000 -const NUM_VALS = 3 -const NUM_FKS = 9 +const NUM_VALS = 4 +const NUM_FKS = 50 type mapInstruction struct { lk LK @@ -219,10 +219,15 @@ func getTrace(t *testing.T) []TraceState { return ret } + initialMappings := []mapInstruction{} + for i := 0; i < NUM_VALS; i++ { + initialMappings = append(initialMappings, mapInstruction{i, i}) + } + ret := []TraceState{ { // Hard code initial mapping - MapInstructions: []mapInstruction{{0, 0}, {1, 1}, {2, 2}}, + MapInstructions: initialMappings, LocalUpdates: localUpdates(), TP: 0, TC: 0, From 1c7f993832c3005b61f55aaa1a0a976932dd4e2c Mon Sep 17 00:00:00 2001 From: Daniel Date: Mon, 3 Oct 2022 08:37:21 -0500 Subject: [PATCH 051/127] CP string --- x/ccv/provider/keydel/keydel.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index e130082789..4affd75cb3 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -47,7 +47,7 @@ func MakeKeyDel() KeyDel { func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { if _, ok := e.foreignIsMappedTo[fk]; ok { return errors.New(`cannot use foreign key which is - already associated to a local key`) + already currently associated to a local key`) } if _, ok := e.usedForeignToLocal[fk]; ok { // We prevent reusing foreign keys which are still used for local From 0d7ce228b44a807f2d181c45ed46157904067ba5 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:27:30 -0500 Subject: [PATCH 052/127] typo --- x/ccv/provider/keydel/keydel.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 4affd75cb3..2dbd31edfd 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -193,7 +193,7 @@ func (e *KeyDel) internalInvariants() bool { } } - // All mapped to foreign keys are actually mapped toy + // All mapped to foreign keys are actually mapped to for fk := range e.foreignIsMappedTo { good := false for _, mappedFK := range e.localToForeign { From 5327bb032d9ba73e069de68da5055f98056141d5 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:39:31 -0500 Subject: [PATCH 053/127] In test use list of foreignValSets for consistency --- x/ccv/provider/keydel/keydel_test.go | 29 ++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 9b6ba415f3..ae947b19ad 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -35,8 +35,8 @@ type Driver struct { mappings []map[LK]FK foreignUpdates [][]update localValSets []ValSet - // corresponds to TC - foreignValSet ValSet + // indexed by TC + foreignValSets []ValSet } type ValSet struct { @@ -87,7 +87,8 @@ func (d *Driver) runTrace() { d.localValSets[init.TP].applyUpdates(init.LocalUpdates) // Set the initial foreign set d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) - d.foreignValSet.applyUpdates(d.foreignUpdates[init.TC]) + d.foreignValSets = append(d.foreignValSets, MakeValSet()) + d.foreignValSets[init.TC].applyUpdates(d.foreignUpdates[init.TC]) d.e.Prune(init.TM) } @@ -97,18 +98,30 @@ func (d *Driver) runTrace() { for _, s := range d.trace[1:] { if d.lastTP < s.TP { + // Provider time increment: + // Apply some key mappings and create some new validator power updates d.applyMapInstructions(s.MapInstructions) d.applyLocalUpdates(s.LocalUpdates) d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(s.TP, s.LocalUpdates)) d.lastTP = s.TP } if d.lastTC < s.TC { + // Duplicate the valSet known at lastTC for j := d.lastTC + 1; j <= s.TC; j++ { - d.foreignValSet.applyUpdates(d.foreignUpdates[j]) + d.foreignValSets = append(d.foreignValSets, MakeValSet()) + for fk, power := range d.foreignValSets[d.lastTC].keyToPower { + d.foreignValSets[j].keyToPower[fk] = power + } + } + // Apply the updates since lastTC ONLY TO s.TC + // This models the consumer receiving updates from several blocks at once + for j := d.lastTC + 1; j <= s.TC; j++ { + d.foreignValSets[s.TC].applyUpdates(d.foreignUpdates[j]) } d.lastTC = s.TC } if d.lastTM < s.TM { + // Models maturations being received on the provider. d.e.Prune(s.TM) d.lastTM = s.TM } @@ -127,7 +140,7 @@ func (d *Driver) checkProperties() { validatorSetReplication := func() { // Get the current consumer val set - foreignSet := d.foreignValSet.keyToPower + foreignSet := d.foreignValSets[d.lastTC].keyToPower // Get the provider set at the relevant time localSet := d.localValSets[d.lastTC].keyToPower @@ -167,7 +180,7 @@ func (d *Driver) checkProperties() { with i < j and i is matured, then the foreign key is deleted from storage. (Garbage collection property). */ - queriesAndGarbageCollection := func() { + queriesAndCorrectPruning := func() { expectQueryable := map[FK]bool{} // If the foreign key was used in [TimeMaturity + 1, TimeConsumer] // it must be queryable. @@ -190,7 +203,7 @@ func (d *Driver) checkProperties() { } validatorSetReplication() - queriesAndGarbageCollection() + queriesAndCorrectPruning() } @@ -301,7 +314,7 @@ func TestPrototype(t *testing.T) { d.mappings = []map[LK]FK{} d.foreignUpdates = [][]update{} d.localValSets = []ValSet{} - d.foreignValSet = MakeValSet() + d.foreignValSets = []ValSet{} d.runTrace() } } From 034ebc2c82eeb6fdd3eb12b668929805d22e6ebd Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:41:08 -0500 Subject: [PATCH 054/127] DEL TestActual --- x/ccv/provider/keydel/keydel_test.go | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index ae947b19ad..0da73e7022 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -318,15 +318,3 @@ func TestPrototype(t *testing.T) { d.runTrace() } } - -func TestActual(t *testing.T) { - /* - traces := [][]TraceState{} - for _, trace := range traces { - d := Driver{} - d.trace = trace - d.t = t - d.runTrace() - } - */ -} From 7556cd8ca0edfd86dfdc629577e50ea838348081 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:46:17 -0500 Subject: [PATCH 055/127] minor tidyups --- x/ccv/provider/keydel/keydel_test.go | 42 ++++++++++++++++++++-------- 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 0da73e7022..2429086be8 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -39,6 +39,22 @@ type Driver struct { foreignValSets []ValSet } +func MakeDriver(t *testing.T, trace []TraceState) Driver { + d := Driver{} + d.t = t + e := MakeKeyDel() + d.e = &e + d.trace = trace + d.lastTP = 0 + d.lastTC = 0 + d.lastTM = 0 + d.mappings = []map[LK]FK{} + d.foreignUpdates = [][]update{} + d.localValSets = []ValSet{} + d.foreignValSets = []ValSet{} + return d +} + type ValSet struct { keyToPower map[int]int } @@ -92,10 +108,13 @@ func (d *Driver) runTrace() { d.e.Prune(init.TM) } + // Sanity check the initial state require.Len(d.t, d.mappings, 1) require.Len(d.t, d.foreignUpdates, 1) require.Len(d.t, d.localValSets, 1) + require.Len(d.t, d.foreignValSets, 1) + // Check properties for each state after the initial for _, s := range d.trace[1:] { if d.lastTP < s.TP { // Provider time increment: @@ -202,8 +221,18 @@ func (d *Driver) checkProperties() { } } + /* + For a given foreign key: if a local key lookup is successful, the + local key returned is the unique local key which was known to the + consumer + */ + correctSlashing := func() { + + } + validatorSetReplication() queriesAndCorrectPruning() + correctSlashing() } @@ -303,18 +332,7 @@ func TestPrototype(t *testing.T) { for len(trace) < 2 { trace = getTrace(t) } - d := Driver{} - d.t = t - e := MakeKeyDel() - d.e = &e - d.trace = trace - d.lastTP = 0 - d.lastTC = 0 - d.lastTM = 0 - d.mappings = []map[LK]FK{} - d.foreignUpdates = [][]update{} - d.localValSets = []ValSet{} - d.foreignValSets = []ValSet{} + d := MakeDriver(t, trace) d.runTrace() } } From 438d50966b7fb721f4198131d0ca33e81afdbe3e Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:53:02 -0500 Subject: [PATCH 056/127] Improve comment for queriesAndPruningProperty --- x/ccv/provider/keydel/keydel_test.go | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 2429086be8..5e73def846 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -189,25 +189,31 @@ func (d *Driver) checkProperties() { Two more properties which must be satisfied by KeyDel when used correctly inside a wider system: - TODO: need to rephrase the below because they aren't completely accurate - the upper bound on time is actually TP (inclusive), not TC. - - 1. If a foreign key is delivered to the consumer with positive power at - VSCID i then the local key associated to it must be retrievable - until i is matured. (Consumer initiated slashing property). - 2. If a foreign key is not delivered to the consumer at any VSCID j - with i < j and i is matured, then the foreign key is deleted - from storage. (Garbage collection property). + 1. If a foreign key IS used in an update for the consumer, with a positive + power, at VSCID i, then the local key associated to it must be queryable + until i is matured. (Consumer Initiated Slashing Property). Phrased another + way: foreign keys which are known to the consumer may be useable for slashing + until the corresponding maturity is received. Thus they must be queryable. + 2. If a foreign key IS NOT used in an update for a consumer for a VSCID j + with i < j, and i is matured, then the foreign key is deleted from storage. + (Pruning property). Phrased another way: i matured, and there was no update + after i that references the foreign key. The foreign key cannot be used + for slashing anymore, so it can and should be garbage collected. */ queriesAndCorrectPruning := func() { expectQueryable := map[FK]bool{} - // If the foreign key was used in [TimeMaturity + 1, TimeConsumer] - // it must be queryable. + for i := d.lastTM + 1; i <= d.lastTP; i++ { + // If the foreign key was used, recently, and did not mature + /// then we expect it to be queryable (for slashing). for _, u := range d.foreignUpdates[i] { expectQueryable[u.key] = true } + // Otherwise, it was not used, or was used a long time ago + // (after maturity). Then we expect it to be garbage collected. } + + // Simply check every foreign key for the correct queryable-ness. for fk := 0; fk < NUM_FKS; fk++ { _, expect := expectQueryable[fk] From be34202eda83c97bba539886c89e7d58cfa09322 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:53:18 -0500 Subject: [PATCH 057/127] RN garbage -> pruned --- x/ccv/provider/keydel/keydel_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 5e73def846..83dab98e7b 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -198,7 +198,7 @@ func (d *Driver) checkProperties() { with i < j, and i is matured, then the foreign key is deleted from storage. (Pruning property). Phrased another way: i matured, and there was no update after i that references the foreign key. The foreign key cannot be used - for slashing anymore, so it can and should be garbage collected. + for slashing anymore, so it can and should be pruned. */ queriesAndCorrectPruning := func() { expectQueryable := map[FK]bool{} @@ -210,7 +210,7 @@ func (d *Driver) checkProperties() { expectQueryable[u.key] = true } // Otherwise, it was not used, or was used a long time ago - // (after maturity). Then we expect it to be garbage collected. + // (after maturity). Then we expect it to be pruned. } // Simply check every foreign key for the correct queryable-ness. From fd91d27a350562d3b3498f8b8d473c159170f9ad Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 13:57:58 -0500 Subject: [PATCH 058/127] Improve test property descriptions --- x/ccv/provider/keydel/keydel_test.go | 32 ++++++++++++++++------------ 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 83dab98e7b..831d38b89f 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -152,29 +152,33 @@ func (d *Driver) runTrace() { func (d *Driver) checkProperties() { /* - A consumer who receives vscid i must have a validator set - equal to the validator set on the provider at vscid id mapped - through the key delegation. + When a consumer receives and processes up to VSCID i, + it must have a validator set equal to that on the provider at i + mapped through the key mapping that was on the provider when i + was sent. */ validatorSetReplication := func() { - // Get the current consumer val set + // Get the current consumer val set. foreignSet := d.foreignValSets[d.lastTC].keyToPower - // Get the provider set at the relevant time + // Get the provider set at the corresponding time. localSet := d.localValSets[d.lastTC].keyToPower - // Map the consumer set back through the inverse key mapping - mapping := d.mappings[d.lastTC] - inverseMapping := map[FK]LK{} - for lk, fk := range mapping { - inverseMapping[fk] = lk - } + // Compute a lookup mapping consumer powers + // back to provider powers, to enable comparison. foreignSetAsLocal := map[LK]int{} - for fk, power := range foreignSet { - foreignSetAsLocal[inverseMapping[fk]] = power + { + mapping := d.mappings[d.lastTC] + inverseMapping := map[FK]LK{} + for lk, fk := range mapping { + inverseMapping[fk] = lk + } + for fk, power := range foreignSet { + foreignSetAsLocal[inverseMapping[fk]] = power + } } - // Ensure that the validator sets match exactly + // Ensure that the sets match exactly for lk, expectedPower := range localSet { actualPower := foreignSetAsLocal[lk] require.Equal(d.t, expectedPower, actualPower) From 4dda68d0eb757fd8b75ecc98bab64bd2a6d71812 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 14:28:28 -0500 Subject: [PATCH 059/127] Work on slash query property --- x/ccv/provider/keydel/keydel_test.go | 34 +++++++++++++++++++++------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 831d38b89f..0df6708305 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -204,7 +204,7 @@ func (d *Driver) checkProperties() { after i that references the foreign key. The foreign key cannot be used for slashing anymore, so it can and should be pruned. */ - queriesAndCorrectPruning := func() { + pruning := func() { expectQueryable := map[FK]bool{} for i := d.lastTM + 1; i <= d.lastTP; i++ { @@ -232,17 +232,35 @@ func (d *Driver) checkProperties() { } /* - For a given foreign key: if a local key lookup is successful, the - local key returned is the unique local key which was known to the - consumer - */ - correctSlashing := func() { + */ + queries := func() { + // For each vscid that the consumer has received, but not yet + // matured + for i := d.lastTM + 1; i <= d.lastTC; i++ { + for consumerFK := range d.foreignValSets[i].keyToPower { + queriedLK, err := d.e.GetLocal(consumerFK) + // There must be a corresponding local key + require.Nil(d.t, err) + providerFKs := map[FK]bool{} + for providerLK, providerFK := range d.mappings[i] { + require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") + providerFKs[providerFK] = true + if consumerFK == providerFK { + // A mapping to the consumer FK was found + // The corresponding LK must be the one queried. + require.Equal(d.t, providerLK, queriedLK) + } + } + // Check that the comparison was actually made! + require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") + } + } } validatorSetReplication() - queriesAndCorrectPruning() - correctSlashing() + pruning() + queries() } From 7cf7d1f887c4ab3164318eaf88ac007249b0e7ef Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 15:16:14 -0500 Subject: [PATCH 060/127] Clarify meaning of foregnValSets[i] --- x/ccv/provider/keydel/keydel_test.go | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 0df6708305..7a97323801 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -131,11 +131,9 @@ func (d *Driver) runTrace() { for fk, power := range d.foreignValSets[d.lastTC].keyToPower { d.foreignValSets[j].keyToPower[fk] = power } - } - // Apply the updates since lastTC ONLY TO s.TC - // This models the consumer receiving updates from several blocks at once - for j := d.lastTC + 1; j <= s.TC; j++ { - d.foreignValSets[s.TC].applyUpdates(d.foreignUpdates[j]) + for k := d.lastTC + 1; k <= j; k++ { + d.foreignValSets[j].applyUpdates(d.foreignUpdates[k]) + } } d.lastTC = s.TC } @@ -251,8 +249,9 @@ func (d *Driver) checkProperties() { require.Equal(d.t, providerLK, queriedLK) } } + good := providerFKs[consumerFK] // Check that the comparison was actually made! - require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") + require.Truef(d.t, good, "no mapping found for foreign key") } } From 596b7f119380f9f389a8b13334b58360f937890c Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 15:19:48 -0500 Subject: [PATCH 061/127] revert --- x/ccv/provider/keydel/keydel_test.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 7a97323801..1e8147566a 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -131,9 +131,11 @@ func (d *Driver) runTrace() { for fk, power := range d.foreignValSets[d.lastTC].keyToPower { d.foreignValSets[j].keyToPower[fk] = power } - for k := d.lastTC + 1; k <= j; k++ { - d.foreignValSets[j].applyUpdates(d.foreignUpdates[k]) - } + } + // Apply the updates since lastTC ONLY TO s.TC + // This models the consumer receiving updates from several blocks at once + for j := d.lastTC + 1; j <= s.TC; j++ { + d.foreignValSets[s.TC].applyUpdates(d.foreignUpdates[j]) } d.lastTC = s.TC } From b363366591c9385324acd4a16f7be3fbbafd6ff6 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 15:34:24 -0500 Subject: [PATCH 062/127] Adds foreignValSetTimes to test --- x/ccv/provider/keydel/keydel.go | 2 +- x/ccv/provider/keydel/keydel_test.go | 29 +++++++++++++++++----------- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 2dbd31edfd..192fe928d8 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -72,7 +72,7 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { if lk, ok := e.usedForeignToLocal[fk]; ok { return lk, nil } else { - return -1, errors.New("Nope") + return -1, errors.New("local key not found for foreign key") } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 1e8147566a..23ef6b81a6 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -31,12 +31,12 @@ type Driver struct { lastTP int lastTC int lastTM int - // indexed by TP - mappings []map[LK]FK - foreignUpdates [][]update - localValSets []ValSet - // indexed by TC - foreignValSets []ValSet + // indexed by time (starting at 0) + mappings []map[LK]FK + foreignUpdates [][]update + localValSets []ValSet + foreignValSets []ValSet + foreignValSetTimes []int } func MakeDriver(t *testing.T, trace []TraceState) Driver { @@ -52,6 +52,7 @@ func MakeDriver(t *testing.T, trace []TraceState) Driver { d.foreignUpdates = [][]update{} d.localValSets = []ValSet{} d.foreignValSets = []ValSet{} + d.foreignValSetTimes = []int{} return d } @@ -105,6 +106,8 @@ func (d *Driver) runTrace() { d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) d.foreignValSets = append(d.foreignValSets, MakeValSet()) d.foreignValSets[init.TC].applyUpdates(d.foreignUpdates[init.TC]) + // The first foreign set equal to the local set at time 0 + d.foreignValSetTimes = append(d.foreignValSetTimes, 0) d.e.Prune(init.TM) } @@ -128,6 +131,7 @@ func (d *Driver) runTrace() { // Duplicate the valSet known at lastTC for j := d.lastTC + 1; j <= s.TC; j++ { d.foreignValSets = append(d.foreignValSets, MakeValSet()) + d.foreignValSetTimes = append(d.foreignValSetTimes, d.lastTC) for fk, power := range d.foreignValSets[d.lastTC].keyToPower { d.foreignValSets[j].keyToPower[fk] = power } @@ -137,6 +141,7 @@ func (d *Driver) runTrace() { for j := d.lastTC + 1; j <= s.TC; j++ { d.foreignValSets[s.TC].applyUpdates(d.foreignUpdates[j]) } + d.foreignValSetTimes[s.TC] = s.TC d.lastTC = s.TC } if d.lastTM < s.TM { @@ -234,15 +239,18 @@ func (d *Driver) checkProperties() { /* */ queries := func() { - // For each vscid that the consumer has received, but not yet - // matured + // For each possible validator set on the consumer, since the latest + // maturity. for i := d.lastTM + 1; i <= d.lastTC; i++ { for consumerFK := range d.foreignValSets[i].keyToPower { queriedLK, err := d.e.GetLocal(consumerFK) // There must be a corresponding local key require.Nil(d.t, err) providerFKs := map[FK]bool{} - for providerLK, providerFK := range d.mappings[i] { + // Check that the local key was indeed the key used at the time + // corresponding to the foreign set. + mapping := d.mappings[d.foreignValSetTimes[i]] + for providerLK, providerFK := range mapping { require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") providerFKs[providerFK] = true if consumerFK == providerFK { @@ -251,9 +259,8 @@ func (d *Driver) checkProperties() { require.Equal(d.t, providerLK, queriedLK) } } - good := providerFKs[consumerFK] // Check that the comparison was actually made! - require.Truef(d.t, good, "no mapping found for foreign key") + require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") } } From 0f0c40d931ecc86bde8a9ed7c07b3e0af4ebf454 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 15:47:33 -0500 Subject: [PATCH 063/127] field RN --- x/ccv/provider/keydel/keydel_test.go | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 23ef6b81a6..04a2256fd9 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -32,11 +32,11 @@ type Driver struct { lastTC int lastTM int // indexed by time (starting at 0) - mappings []map[LK]FK - foreignUpdates [][]update - localValSets []ValSet - foreignValSets []ValSet - foreignValSetTimes []int + mappings []map[LK]FK + foreignUpdates [][]update + localValSets []ValSet + foreignValSets []ValSet + foreignValSetT []int } func MakeDriver(t *testing.T, trace []TraceState) Driver { @@ -52,7 +52,7 @@ func MakeDriver(t *testing.T, trace []TraceState) Driver { d.foreignUpdates = [][]update{} d.localValSets = []ValSet{} d.foreignValSets = []ValSet{} - d.foreignValSetTimes = []int{} + d.foreignValSetT = []int{} return d } @@ -107,7 +107,7 @@ func (d *Driver) runTrace() { d.foreignValSets = append(d.foreignValSets, MakeValSet()) d.foreignValSets[init.TC].applyUpdates(d.foreignUpdates[init.TC]) // The first foreign set equal to the local set at time 0 - d.foreignValSetTimes = append(d.foreignValSetTimes, 0) + d.foreignValSetT = append(d.foreignValSetT, 0) d.e.Prune(init.TM) } @@ -129,19 +129,23 @@ func (d *Driver) runTrace() { } if d.lastTC < s.TC { // Duplicate the valSet known at lastTC - for j := d.lastTC + 1; j <= s.TC; j++ { + for j := d.lastTC + 1; j < s.TC; j++ { + d.foreignValSetT = append(d.foreignValSetT, d.lastTC) d.foreignValSets = append(d.foreignValSets, MakeValSet()) - d.foreignValSetTimes = append(d.foreignValSetTimes, d.lastTC) for fk, power := range d.foreignValSets[d.lastTC].keyToPower { d.foreignValSets[j].keyToPower[fk] = power } } // Apply the updates since lastTC ONLY TO s.TC // This models the consumer receiving updates from several blocks at once + d.foreignValSetT = append(d.foreignValSetT, s.TC) + d.foreignValSets = append(d.foreignValSets, MakeValSet()) + for fk, power := range d.foreignValSets[d.lastTC].keyToPower { + d.foreignValSets[s.TC].keyToPower[fk] = power + } for j := d.lastTC + 1; j <= s.TC; j++ { d.foreignValSets[s.TC].applyUpdates(d.foreignUpdates[j]) } - d.foreignValSetTimes[s.TC] = s.TC d.lastTC = s.TC } if d.lastTM < s.TM { @@ -249,7 +253,7 @@ func (d *Driver) checkProperties() { providerFKs := map[FK]bool{} // Check that the local key was indeed the key used at the time // corresponding to the foreign set. - mapping := d.mappings[d.foreignValSetTimes[i]] + mapping := d.mappings[d.foreignValSetT[i]] for providerLK, providerFK := range mapping { require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") providerFKs[providerFK] = true From 9319d7a9d0a90c6060346494df596973e6b52860 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 16:16:00 -0500 Subject: [PATCH 064/127] cp --- x/ccv/provider/keydel/keydel_test.go | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 04a2256fd9..846b5b92cc 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -1,6 +1,7 @@ package keydel import ( + "fmt" "math/rand" "testing" @@ -104,10 +105,10 @@ func (d *Driver) runTrace() { d.localValSets[init.TP].applyUpdates(init.LocalUpdates) // Set the initial foreign set d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) - d.foreignValSets = append(d.foreignValSets, MakeValSet()) - d.foreignValSets[init.TC].applyUpdates(d.foreignUpdates[init.TC]) // The first foreign set equal to the local set at time 0 d.foreignValSetT = append(d.foreignValSetT, 0) + d.foreignValSets = append(d.foreignValSets, MakeValSet()) + d.foreignValSets[init.TC].applyUpdates(d.foreignUpdates[init.TC]) d.e.Prune(init.TM) } @@ -118,7 +119,11 @@ func (d *Driver) runTrace() { require.Len(d.t, d.foreignValSets, 1) // Check properties for each state after the initial + deb := 1 for _, s := range d.trace[1:] { + fmt.Println("deb:", deb) + deb += 1 + if d.lastTP < s.TP { // Provider time increment: // Apply some key mappings and create some new validator power updates @@ -368,11 +373,14 @@ func getTrace(t *testing.T) []TraceState { func TestPrototype(t *testing.T) { for i := 0; i < 1000; i++ { + rand.Seed(int64(i)) trace := []TraceState{} for len(trace) < 2 { trace = getTrace(t) } d := MakeDriver(t, trace) + fmt.Println(i) d.runTrace() + } } From 6b0c67aaef23c7155c56b32150d64a974a8dde97 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 16:32:20 -0500 Subject: [PATCH 065/127] checkpoint reasoning --- x/ccv/provider/keydel/keydel.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 192fe928d8..b4e0ffa670 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -16,6 +16,14 @@ type update struct { // 2. integrate into Provider::EndBlock, // 3. integrate with create/destroy validator +/* +TODO: there is a scenario which invalidates the current design of the system. + +A vsc packet is sent whenever there is an unbonding op of any kind, or val power changes. +It is possible for a validator to be sent with positive power, and the maturity to be received. +This will delete the local key lookup, but it must be kept around. +*/ + type KeyDel struct { // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding From ac842e23b0d99dbdd29aed4ae10d9f09df40d6f6 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 16:38:23 -0500 Subject: [PATCH 066/127] CP --- x/ccv/provider/keydel/keydel.go | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index b4e0ffa670..483b750cd7 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -142,12 +142,10 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lkTLPFU[lk] = u } - // Iterate all local keys for which either the foreign key changed or there - // has been a power update. + // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { - // If the key has previously been shipped in an update - // delete it. + // Create a deletion update foreignUpdates[last.key] = 0 delete(lkTLPFU, lk) e.usedForeignToLocal[last.key] = lk @@ -167,7 +165,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { if newPower, ok := localUpdates[lk]; ok { power = newPower } - // Only ship positive powers. + // Only ship positive powers. Zero powers are accounted for above. if 0 < power { fk := e.localToForeign[lk] foreignUpdates[fk] = power From cb453d74547d22bcea4c92cfc0cee42bcb12c643 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 16:48:13 -0500 Subject: [PATCH 067/127] Partial improvement --- x/ccv/provider/keydel/keydel.go | 39 +++++++++++++++------------- x/ccv/provider/keydel/keydel_test.go | 7 ++--- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 483b750cd7..ef6974202b 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -7,7 +7,14 @@ type FK = int type VSCID = int type update struct { - key FK + key int + power int +} + +type lastUpdate struct { + fk FK + lk LK + vscid int power int } @@ -32,10 +39,8 @@ type KeyDel struct { localToForeign map[LK]FK // Is the foreign key mapped to in localToForeign? foreignIsMappedTo map[FK]bool - // Prunable state - usedForeignToLocal map[FK]LK - // Prunable state - usedForeignToLastVSCID map[FK]VSCID + //TODO: + usedForeignToLastUpdate map[FK]lastUpdate // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator @@ -46,8 +51,7 @@ func MakeKeyDel() KeyDel { return KeyDel{ localToForeign: map[LK]FK{}, foreignIsMappedTo: map[FK]bool{}, - usedForeignToLocal: map[FK]LK{}, - usedForeignToLastVSCID: map[FK]VSCID{}, + usedForeignToLastUpdate: map[FK]lastUpdate{}, localToLastPositiveForeignUpdate: map[LK]update{}, } } @@ -57,7 +61,7 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { return errors.New(`cannot use foreign key which is already currently associated to a local key`) } - if _, ok := e.usedForeignToLocal[fk]; ok { + if _, ok := e.usedForeignToLastUpdate[fk]; ok { // We prevent reusing foreign keys which are still used for local // key lookups. Otherwise it would be possible for a local key A // to commit an infraction under the foreign key X and change @@ -77,8 +81,8 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { func (e *KeyDel) GetLocal(fk FK) (LK, error) { // TODO: make it possible lookup local keys even // when the foreign key has not yet been used? - if lk, ok := e.usedForeignToLocal[fk]; ok { - return lk, nil + if u, ok := e.usedForeignToLastUpdate[fk]; ok { + return u.lk, nil } else { return -1, errors.New("local key not found for foreign key") } @@ -86,14 +90,13 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { toRemove := []FK{} - for fk, vscid := range e.usedForeignToLastVSCID { - if vscid <= mostRecentlyMaturedVscid { + for fk, u := range e.usedForeignToLastUpdate { + if u.vscid <= mostRecentlyMaturedVscid { toRemove = append(toRemove, fk) } } for _, fk := range toRemove { - delete(e.usedForeignToLocal, fk) - delete(e.usedForeignToLastVSCID, fk) + delete(e.usedForeignToLastUpdate, fk) } } @@ -148,8 +151,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Create a deletion update foreignUpdates[last.key] = 0 delete(lkTLPFU, lk) - e.usedForeignToLocal[last.key] = lk - e.usedForeignToLastVSCID[last.key] = vscid + e.usedForeignToLastUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -170,11 +172,12 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { fk := e.localToForeign[lk] foreignUpdates[fk] = power lkTLPFU[lk] = update{key: fk, power: power} - e.usedForeignToLocal[fk] = lk - e.usedForeignToLastVSCID[fk] = vscid + e.usedForeignToLastUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } + // TODO: I can replace RHS with some logic which does addition/deletion based on + // power in e.usedForeignToLastUpdate e.localToLastPositiveForeignUpdate = lkTLPFU return foreignUpdates diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 846b5b92cc..d6fcc005b8 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -235,12 +235,9 @@ func (d *Driver) checkProperties() { for fk := 0; fk < NUM_FKS; fk++ { _, expect := expectQueryable[fk] - // Check foreign to local lookup is available (or not) - _, actual := d.e.usedForeignToLocal[fk] - require.Equal(d.t, expect, actual) + // Chech that lookup is available + _, actual := d.e.usedForeignToLastUpdate[fk] - // Check internals are consistent - _, actual = d.e.usedForeignToLastVSCID[fk] require.Equal(d.t, expect, actual) } } From d014bf4c43c9b2c80d4819f9be5a9dbbfb0719dd Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 16:55:02 -0500 Subject: [PATCH 068/127] CP --- x/ccv/provider/keydel/keydel.go | 2 +- x/ccv/provider/keydel/keydel_test.go | 10 ++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index ef6974202b..42d5c54c69 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -91,7 +91,7 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { toRemove := []FK{} for fk, u := range e.usedForeignToLastUpdate { - if u.vscid <= mostRecentlyMaturedVscid { + if u.vscid <= mostRecentlyMaturedVscid && u.power == 0 { toRemove = append(toRemove, fk) } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index d6fcc005b8..19a88ef99d 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -206,6 +206,7 @@ func (d *Driver) checkProperties() { /* Two more properties which must be satisfied by KeyDel when used correctly inside a wider system: + TODO: fix this description 1. If a foreign key IS used in an update for the consumer, with a positive power, at VSCID i, then the local key associated to it must be queryable @@ -221,14 +222,15 @@ func (d *Driver) checkProperties() { pruning := func() { expectQueryable := map[FK]bool{} + for i := 0; i <= d.lastTM; i++ { + for _, u := range d.foreignUpdates[i] { + expectQueryable[u.key] = 0 < u.power + } + } for i := d.lastTM + 1; i <= d.lastTP; i++ { - // If the foreign key was used, recently, and did not mature - /// then we expect it to be queryable (for slashing). for _, u := range d.foreignUpdates[i] { expectQueryable[u.key] = true } - // Otherwise, it was not used, or was used a long time ago - // (after maturity). Then we expect it to be pruned. } // Simply check every foreign key for the correct queryable-ness. From e2c9cc3bb3e9aeb8d2263d26ba4394a7ffcd8a2f Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 17:00:25 -0500 Subject: [PATCH 069/127] cp --- x/ccv/provider/keydel/keydel.go | 20 ++++++++++---------- x/ccv/provider/keydel/keydel_test.go | 4 ++-- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 42d5c54c69..3e8846640c 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -40,7 +40,7 @@ type KeyDel struct { // Is the foreign key mapped to in localToForeign? foreignIsMappedTo map[FK]bool //TODO: - usedForeignToLastUpdate map[FK]lastUpdate + foreignToLastUpdate map[FK]lastUpdate // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator @@ -51,7 +51,7 @@ func MakeKeyDel() KeyDel { return KeyDel{ localToForeign: map[LK]FK{}, foreignIsMappedTo: map[FK]bool{}, - usedForeignToLastUpdate: map[FK]lastUpdate{}, + foreignToLastUpdate: map[FK]lastUpdate{}, localToLastPositiveForeignUpdate: map[LK]update{}, } } @@ -61,7 +61,7 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { return errors.New(`cannot use foreign key which is already currently associated to a local key`) } - if _, ok := e.usedForeignToLastUpdate[fk]; ok { + if _, ok := e.foreignToLastUpdate[fk]; ok { // We prevent reusing foreign keys which are still used for local // key lookups. Otherwise it would be possible for a local key A // to commit an infraction under the foreign key X and change @@ -81,22 +81,22 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { func (e *KeyDel) GetLocal(fk FK) (LK, error) { // TODO: make it possible lookup local keys even // when the foreign key has not yet been used? - if u, ok := e.usedForeignToLastUpdate[fk]; ok { + if u, ok := e.foreignToLastUpdate[fk]; ok { return u.lk, nil } else { return -1, errors.New("local key not found for foreign key") } } -func (e *KeyDel) Prune(mostRecentlyMaturedVscid VSCID) { +func (e *KeyDel) Prune(vscid VSCID) { toRemove := []FK{} - for fk, u := range e.usedForeignToLastUpdate { - if u.vscid <= mostRecentlyMaturedVscid && u.power == 0 { + for fk, u := range e.foreignToLastUpdate { + if u.vscid <= vscid && u.power == 0 { toRemove = append(toRemove, fk) } } for _, fk := range toRemove { - delete(e.usedForeignToLastUpdate, fk) + delete(e.foreignToLastUpdate, fk) } } @@ -151,7 +151,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Create a deletion update foreignUpdates[last.key] = 0 delete(lkTLPFU, lk) - e.usedForeignToLastUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} + e.foreignToLastUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -172,7 +172,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { fk := e.localToForeign[lk] foreignUpdates[fk] = power lkTLPFU[lk] = update{key: fk, power: power} - e.usedForeignToLastUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} + e.foreignToLastUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 19a88ef99d..95797cf119 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -158,8 +158,8 @@ func (d *Driver) runTrace() { d.e.Prune(s.TM) d.lastTM = s.TM } - d.checkProperties() require.True(d.t, d.e.internalInvariants()) + d.checkProperties() } } @@ -238,7 +238,7 @@ func (d *Driver) checkProperties() { _, expect := expectQueryable[fk] // Chech that lookup is available - _, actual := d.e.usedForeignToLastUpdate[fk] + _, actual := d.e.foreignToLastUpdate[fk] require.Equal(d.t, expect, actual) } From 706d634ae3c32bc49048b7171f546ad3c464810a Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 17:17:08 -0500 Subject: [PATCH 070/127] Fix queryable --- x/ccv/provider/keydel/keydel.go | 6 +++--- x/ccv/provider/keydel/keydel_test.go | 22 +++++++++++----------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 3e8846640c..c1a392f731 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -90,9 +90,9 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(vscid VSCID) { toRemove := []FK{} - for fk, u := range e.foreignToLastUpdate { + for _, u := range e.foreignToLastUpdate { if u.vscid <= vscid && u.power == 0 { - toRemove = append(toRemove, fk) + toRemove = append(toRemove, u.fk) } } for _, fk := range toRemove { @@ -177,7 +177,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { } // TODO: I can replace RHS with some logic which does addition/deletion based on - // power in e.usedForeignToLastUpdate + // power in e.usedForeignToLastUpdate?? e.localToLastPositiveForeignUpdate = lkTLPFU return foreignUpdates diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 95797cf119..194ac97ad6 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -119,10 +119,10 @@ func (d *Driver) runTrace() { require.Len(d.t, d.foreignValSets, 1) // Check properties for each state after the initial - deb := 1 + deb := 0 for _, s := range d.trace[1:] { - fmt.Println("deb:", deb) - deb += 1 + fmt.Println(deb) + deb++ if d.lastTP < s.TP { // Provider time increment: @@ -235,12 +235,13 @@ func (d *Driver) checkProperties() { // Simply check every foreign key for the correct queryable-ness. for fk := 0; fk < NUM_FKS; fk++ { - _, expect := expectQueryable[fk] - - // Chech that lookup is available - _, actual := d.e.foreignToLastUpdate[fk] - - require.Equal(d.t, expect, actual) + _, err := d.e.GetLocal(fk) + actualQueryable := err == nil + if expect, found := expectQueryable[fk]; found && expect { + require.True(d.t, actualQueryable) + } else { + require.False(d.t, actualQueryable) + } } } @@ -371,14 +372,13 @@ func getTrace(t *testing.T) []TraceState { } func TestPrototype(t *testing.T) { + rand.Seed(8) for i := 0; i < 1000; i++ { - rand.Seed(int64(i)) trace := []TraceState{} for len(trace) < 2 { trace = getTrace(t) } d := MakeDriver(t, trace) - fmt.Println(i) d.runTrace() } From df2efa3df3f3711d3218a2ed6765fe93fbee201a Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 17:39:46 -0500 Subject: [PATCH 071/127] pre rework lastUpdate --- x/ccv/provider/keydel/keydel.go | 4 +++- x/ccv/provider/keydel/keydel_test.go | 10 +++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index c1a392f731..72480708b0 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -1,6 +1,8 @@ package keydel -import "errors" +import ( + "errors" +) type LK = int type FK = int diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 194ac97ad6..b8ba4b3eb6 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -8,7 +8,8 @@ import ( "github.com/stretchr/testify/require" ) -const TRACE_LEN = 1000 +const NUM_TRACES = 100000 +const TRACE_LEN = 5 const NUM_VALS = 4 const NUM_FKS = 50 @@ -372,8 +373,11 @@ func getTrace(t *testing.T) []TraceState { } func TestPrototype(t *testing.T) { - rand.Seed(8) - for i := 0; i < 1000; i++ { + rand.Seed(7337) + for i := 0; i < NUM_TRACES; i++ { + // rand.Seed(int64(i)) + fmt.Println(i) + trace := []TraceState{} for len(trace) < 2 { trace = getTrace(t) From 84fdedec898de4127ee6c2b9f70bad66197c65f6 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 17:43:10 -0500 Subject: [PATCH 072/127] pre rework --- x/ccv/provider/keydel/keydel.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 72480708b0..3b84bd78a5 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -26,11 +26,10 @@ type lastUpdate struct { // 3. integrate with create/destroy validator /* -TODO: there is a scenario which invalidates the current design of the system. - -A vsc packet is sent whenever there is an unbonding op of any kind, or val power changes. -It is possible for a validator to be sent with positive power, and the maturity to be received. -This will delete the local key lookup, but it must be kept around. +There is a bug: +You send a positive power update +You send a 0 power update +Prune is called with positive power vscid and succeeds because associated power is 0 */ type KeyDel struct { From a193a21c2e24eb1d2ec6106ec4e39b33078d4a47 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 17:58:28 -0500 Subject: [PATCH 073/127] cp --- x/ccv/provider/keydel/keydel.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 3b84bd78a5..d53e492d59 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -27,8 +27,10 @@ type lastUpdate struct { /* There is a bug: -You send a positive power update -You send a 0 power update +You send a positive power update at vsc 0 +You send a 0 power update at vsc 1 +Prune is called with vscid 1 + Prune is called with positive power vscid and succeeds because associated power is 0 */ From 6b7416ca608f8ae4c7ba37d679997b60d9fa36c9 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:12:47 -0500 Subject: [PATCH 074/127] Tests pass --- x/ccv/provider/keydel/keydel.go | 19 ++--- x/ccv/provider/keydel/keydel_test.go | 108 ++++++++++----------------- 2 files changed, 45 insertions(+), 82 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index d53e492d59..69a915ab20 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -25,15 +25,6 @@ type lastUpdate struct { // 2. integrate into Provider::EndBlock, // 3. integrate with create/destroy validator -/* -There is a bug: -You send a positive power update at vsc 0 -You send a 0 power update at vsc 1 -Prune is called with vscid 1 - -Prune is called with positive power vscid and succeeds because associated power is 0 -*/ - type KeyDel struct { // A new key is added on staking::CreateValidator // the key is deleted at earliest after sending an update corresponding @@ -52,9 +43,10 @@ type KeyDel struct { func MakeKeyDel() KeyDel { return KeyDel{ - localToForeign: map[LK]FK{}, - foreignIsMappedTo: map[FK]bool{}, - foreignToLastUpdate: map[FK]lastUpdate{}, + localToForeign: map[LK]FK{}, + foreignIsMappedTo: map[FK]bool{}, + foreignToLastUpdate: map[FK]lastUpdate{}, + // TODO: can compute necessary logic from this field from foreignToLastUpdate localToLastPositiveForeignUpdate: map[LK]update{}, } } @@ -94,6 +86,9 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(vscid VSCID) { toRemove := []FK{} for _, u := range e.foreignToLastUpdate { + // If the last update has matured, and that + // update was a deletion (0 power), pruning + // is possible. if u.vscid <= vscid && u.power == 0 { toRemove = append(toRemove, u.fk) } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index b8ba4b3eb6..202bd80061 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -1,15 +1,14 @@ package keydel import ( - "fmt" "math/rand" "testing" "github.com/stretchr/testify/require" ) -const NUM_TRACES = 100000 -const TRACE_LEN = 5 +const NUM_TRACES = 4000 +const TRACE_LEN = 1000 const NUM_VALS = 4 const NUM_FKS = 50 @@ -37,8 +36,7 @@ type Driver struct { mappings []map[LK]FK foreignUpdates [][]update localValSets []ValSet - foreignValSets []ValSet - foreignValSetT []int + foreignValSet ValSet } func MakeDriver(t *testing.T, trace []TraceState) Driver { @@ -53,8 +51,7 @@ func MakeDriver(t *testing.T, trace []TraceState) Driver { d.mappings = []map[LK]FK{} d.foreignUpdates = [][]update{} d.localValSets = []ValSet{} - d.foreignValSets = []ValSet{} - d.foreignValSetT = []int{} + d.foreignValSet = ValSet{} return d } @@ -107,9 +104,8 @@ func (d *Driver) runTrace() { // Set the initial foreign set d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) // The first foreign set equal to the local set at time 0 - d.foreignValSetT = append(d.foreignValSetT, 0) - d.foreignValSets = append(d.foreignValSets, MakeValSet()) - d.foreignValSets[init.TC].applyUpdates(d.foreignUpdates[init.TC]) + d.foreignValSet = MakeValSet() + d.foreignValSet.applyUpdates(d.foreignUpdates[init.TC]) d.e.Prune(init.TM) } @@ -117,14 +113,9 @@ func (d *Driver) runTrace() { require.Len(d.t, d.mappings, 1) require.Len(d.t, d.foreignUpdates, 1) require.Len(d.t, d.localValSets, 1) - require.Len(d.t, d.foreignValSets, 1) // Check properties for each state after the initial - deb := 0 for _, s := range d.trace[1:] { - fmt.Println(deb) - deb++ - if d.lastTP < s.TP { // Provider time increment: // Apply some key mappings and create some new validator power updates @@ -134,23 +125,8 @@ func (d *Driver) runTrace() { d.lastTP = s.TP } if d.lastTC < s.TC { - // Duplicate the valSet known at lastTC - for j := d.lastTC + 1; j < s.TC; j++ { - d.foreignValSetT = append(d.foreignValSetT, d.lastTC) - d.foreignValSets = append(d.foreignValSets, MakeValSet()) - for fk, power := range d.foreignValSets[d.lastTC].keyToPower { - d.foreignValSets[j].keyToPower[fk] = power - } - } - // Apply the updates since lastTC ONLY TO s.TC - // This models the consumer receiving updates from several blocks at once - d.foreignValSetT = append(d.foreignValSetT, s.TC) - d.foreignValSets = append(d.foreignValSets, MakeValSet()) - for fk, power := range d.foreignValSets[d.lastTC].keyToPower { - d.foreignValSets[s.TC].keyToPower[fk] = power - } for j := d.lastTC + 1; j <= s.TC; j++ { - d.foreignValSets[s.TC].applyUpdates(d.foreignUpdates[j]) + d.foreignValSet.applyUpdates(d.foreignUpdates[j]) } d.lastTC = s.TC } @@ -175,7 +151,7 @@ func (d *Driver) checkProperties() { validatorSetReplication := func() { // Get the current consumer val set. - foreignSet := d.foreignValSets[d.lastTC].keyToPower + foreignSet := d.foreignValSet.keyToPower // Get the provider set at the corresponding time. localSet := d.localValSets[d.lastTC].keyToPower @@ -207,18 +183,17 @@ func (d *Driver) checkProperties() { /* Two more properties which must be satisfied by KeyDel when used correctly inside a wider system: - TODO: fix this description - - 1. If a foreign key IS used in an update for the consumer, with a positive - power, at VSCID i, then the local key associated to it must be queryable - until i is matured. (Consumer Initiated Slashing Property). Phrased another - way: foreign keys which are known to the consumer may be useable for slashing - until the corresponding maturity is received. Thus they must be queryable. - 2. If a foreign key IS NOT used in an update for a consumer for a VSCID j - with i < j, and i is matured, then the foreign key is deleted from storage. - (Pruning property). Phrased another way: i matured, and there was no update - after i that references the foreign key. The foreign key cannot be used - for slashing anymore, so it can and should be pruned. + + 1. (Consumer Initiated Slashing Property) If a foreign key IS used in an update + for the consumer, with a positive power, at VSCID i, and no 0 power update + follows, then the local key associated to it must be queryable. + Phrased another way: foreign keys which are known to the consumer must be + useable for slashing indefinitely. + 2. (Pruning) If a foreign key IS NOT used in an update for a VSCID j with i < j, + and i is a 0 power update and has matured, then the foreign key is deleted + from storage. + Phrased another way: if the last 0 power update has matured, the key should + be pruned. */ pruning := func() { expectQueryable := map[FK]bool{} @@ -249,30 +224,27 @@ func (d *Driver) checkProperties() { /* */ queries := func() { - // For each possible validator set on the consumer, since the latest - // maturity. - for i := d.lastTM + 1; i <= d.lastTC; i++ { - for consumerFK := range d.foreignValSets[i].keyToPower { - queriedLK, err := d.e.GetLocal(consumerFK) - // There must be a corresponding local key - require.Nil(d.t, err) - providerFKs := map[FK]bool{} - // Check that the local key was indeed the key used at the time - // corresponding to the foreign set. - mapping := d.mappings[d.foreignValSetT[i]] - for providerLK, providerFK := range mapping { - require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") - providerFKs[providerFK] = true - if consumerFK == providerFK { - // A mapping to the consumer FK was found - // The corresponding LK must be the one queried. - require.Equal(d.t, providerLK, queriedLK) - } + // For each fk known to the consumer + for consumerFK := range d.foreignValSet.keyToPower { + queriedLK, err := d.e.GetLocal(consumerFK) + // There must be a corresponding local key + require.Nil(d.t, err) + providerFKs := map[FK]bool{} + // The local key must be the one that was actually referenced + // in the latest mapping used to compute updates sent to the + // consumer. + mapping := d.mappings[d.lastTC] + for providerLK, providerFK := range mapping { + require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") + providerFKs[providerFK] = true + if consumerFK == providerFK { + // A mapping to the consumer FK was found + // The corresponding LK must be the one queried. + require.Equal(d.t, providerLK, queriedLK) } - // Check that the comparison was actually made! - require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") } - + // Check that the comparison was actually made! + require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") } } @@ -373,11 +345,7 @@ func getTrace(t *testing.T) []TraceState { } func TestPrototype(t *testing.T) { - rand.Seed(7337) for i := 0; i < NUM_TRACES; i++ { - // rand.Seed(int64(i)) - fmt.Println(i) - trace := []TraceState{} for len(trace) < 2 { trace = getTrace(t) From 6376ff4103d51ea32a0d6df2c78affb5006f9960 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:46:28 -0500 Subject: [PATCH 075/127] CP --- x/ccv/provider/keydel/keydel.go | 156 +++++++++++++-------------- x/ccv/provider/keydel/keydel_test.go | 2 +- 2 files changed, 73 insertions(+), 85 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 69a915ab20..3381c4b907 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -13,7 +13,7 @@ type update struct { power int } -type lastUpdate struct { +type timedUpdate struct { fk FK lk LK vscid int @@ -26,57 +26,46 @@ type lastUpdate struct { // 3. integrate with create/destroy validator type KeyDel struct { - // A new key is added on staking::CreateValidator - // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator - // At most one local key can map to a given foreign key - localToForeign map[LK]FK - // Is the foreign key mapped to in localToForeign? - foreignIsMappedTo map[FK]bool - //TODO: - foreignToLastUpdate map[FK]lastUpdate - // A new key is added when a relevant update is returned by ComputeUpdates - // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator - localToLastPositiveForeignUpdate map[LK]update + lkToCurrFk map[LK]FK + lkToLastFk map[LK]FK + fkInUse map[FK]bool + fkToUpdate map[FK]timedUpdate } func MakeKeyDel() KeyDel { return KeyDel{ - localToForeign: map[LK]FK{}, - foreignIsMappedTo: map[FK]bool{}, - foreignToLastUpdate: map[FK]lastUpdate{}, - // TODO: can compute necessary logic from this field from foreignToLastUpdate - localToLastPositiveForeignUpdate: map[LK]update{}, + lkToCurrFk: map[LK]FK{}, + lkToLastFk: map[LK]FK{}, + fkInUse: map[FK]bool{}, + fkToUpdate: map[FK]timedUpdate{}, } } func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { - if _, ok := e.foreignIsMappedTo[fk]; ok { - return errors.New(`cannot use foreign key which is - already currently associated to a local key`) - } - if _, ok := e.foreignToLastUpdate[fk]; ok { - // We prevent reusing foreign keys which are still used for local - // key lookups. Otherwise it would be possible for a local key A - // to commit an infraction under the foreign key X and change - // the mapping of foreign key X to a local key B before evidence - // arrives. - return errors.New(`cannot reuse foreign key which was associated to - a different local key and which is still queryable`) - } - if otherFk, ok := e.localToForeign[lk]; ok { - delete(e.foreignIsMappedTo, otherFk) - } - e.localToForeign[lk] = fk - e.foreignIsMappedTo[fk] = true + + inUse := false + if _, ok := e.fkInUse[fk]; ok { + inUse = true + } + if _, ok := e.fkToUpdate[fk]; ok { + inUse = true + } + if inUse { + return errors.New(`cannot reuse foreign key which is still in use for + local key lookups`) + } + if otherFk, ok := e.lkToCurrFk[lk]; ok { + delete(e.fkInUse, otherFk) + } + e.lkToCurrFk[lk] = fk + e.fkInUse[fk] = true return nil } func (e *KeyDel) GetLocal(fk FK) (LK, error) { - // TODO: make it possible lookup local keys even - // when the foreign key has not yet been used? - if u, ok := e.foreignToLastUpdate[fk]; ok { + // TODO: implement lookup for keys currently mapped + // but that have not yet been used to compute an update + if u, ok := e.fkToUpdate[fk]; ok { return u.lk, nil } else { return -1, errors.New("local key not found for foreign key") @@ -84,17 +73,15 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { } func (e *KeyDel) Prune(vscid VSCID) { - toRemove := []FK{} - for _, u := range e.foreignToLastUpdate { - // If the last update has matured, and that - // update was a deletion (0 power), pruning - // is possible. - if u.vscid <= vscid && u.power == 0 { - toRemove = append(toRemove, u.fk) + toDel := []FK{} + for _, u := range e.fkToUpdate { + // If the last update was a deletion, and it has matured. + if u.power == 0 && u.vscid <= vscid { + toDel = append(toDel, u.fk) } } - for _, fk := range toRemove { - delete(e.foreignToLastUpdate, fk) + for _, fk := range toDel { + delete(e.fkToUpdate, fk) } } @@ -121,35 +108,38 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} - // Key changes - for lk, newFk := range e.localToForeign { - if u, ok := e.localToLastPositiveForeignUpdate[lk]; ok { - oldFk := u.key - if oldFk != newFk { + // Grab all local keys for which the foreign key changed + for lk, currFk := range e.lkToCurrFk { + if lastFk, ok := e.lkToLastFk[lk]; ok { + u := e.fkToUpdate[lastFk] + if 0 < u.power && lastFk != currFk { + // Has the key changed? lks = append(lks, lk) } } } - // Power changes + // Grab all local keys for which there was a power update for lk := range localUpdates { lks = append(lks, lk) } - foreignUpdates := map[FK]int{} + ret := map[FK]int{} // Make a temporary copy - lkTLPFU := map[LK]update{} - for lk, u := range e.localToLastPositiveForeignUpdate { - lkTLPFU[lk] = u + lkToLastFkCopy := map[LK]FK{} + for lk, fk := range e.lkToLastFk { + lkToLastFkCopy[lk] = fk } // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { - // Create a deletion update - foreignUpdates[last.key] = 0 - delete(lkTLPFU, lk) - e.foreignToLastUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} + if fk, ok := e.lkToLastFk[lk]; ok { + if 0 < e.fkToUpdate[fk].power { + // Create a deletion update + ret[fk] = 0 + lkToLastFkCopy[lk] = fk + e.fkToUpdate[fk] = timedUpdate{fk: fk, lk: lk, vscid: vscid, power: 0} + } } } @@ -157,9 +147,8 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { - // If there was a positive power before, use it. - power = last.power + if fk, ok := e.lkToLastFk[lk]; ok { + power = e.fkToUpdate[fk].power } // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { @@ -167,18 +156,17 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { } // Only ship positive powers. Zero powers are accounted for above. if 0 < power { - fk := e.localToForeign[lk] - foreignUpdates[fk] = power - lkTLPFU[lk] = update{key: fk, power: power} - e.foreignToLastUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} + fk := e.lkToCurrFk[lk] + ret[fk] = power + lkToLastFkCopy[lk] = fk + e.fkToUpdate[fk] = timedUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } - // TODO: I can replace RHS with some logic which does addition/deletion based on - // power in e.usedForeignToLastUpdate?? - e.localToLastPositiveForeignUpdate = lkTLPFU + // TODO:??? + e.lkToLastFk = lkToLastFkCopy - return foreignUpdates + return ret } // Returns true iff internal invariants hold @@ -186,25 +174,25 @@ func (e *KeyDel) internalInvariants() bool { // No two local keys can map to the same foreign key seen := map[FK]bool{} - for _, fk := range e.localToForeign { + for _, fk := range e.lkToCurrFk { if seen[fk] { return false } seen[fk] = true } - // All foreign keys mapped to by local keys are noted - for _, fk := range e.localToForeign { - if _, ok := e.foreignIsMappedTo[fk]; !ok { + // All foreign keys mapped to by local keys are tracked + for _, fk := range e.lkToCurrFk { + if _, ok := e.fkInUse[fk]; !ok { return false } } - - // All mapped to foreign keys are actually mapped to - for fk := range e.foreignIsMappedTo { + // All tracked foreign keys are actually mapped to + for fk := range e.fkInUse { good := false - for _, mappedFK := range e.localToForeign { - if mappedFK == fk { + for _, candidateFk := range e.lkToCurrFk { + if fk == candidateFk { + // Mapped to by at least one lk good = true break } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 202bd80061..53bc5cafad 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -77,7 +77,7 @@ func (d *Driver) applyMapInstructions(instructions []mapInstruction) { _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) } copy := map[LK]FK{} - for lk, fk := range d.e.localToForeign { + for lk, fk := range d.e.lkToCurrFk { copy[lk] = fk } d.mappings = append(d.mappings, copy) From 6631e845e04b84e35624e39a6f85c6068300c407 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:48:48 -0500 Subject: [PATCH 076/127] Revert "CP" This reverts commit 6376ff4103d51ea32a0d6df2c78affb5006f9960. --- x/ccv/provider/keydel/keydel.go | 156 ++++++++++++++------------- x/ccv/provider/keydel/keydel_test.go | 2 +- 2 files changed, 85 insertions(+), 73 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 3381c4b907..69a915ab20 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -13,7 +13,7 @@ type update struct { power int } -type timedUpdate struct { +type lastUpdate struct { fk FK lk LK vscid int @@ -26,46 +26,57 @@ type timedUpdate struct { // 3. integrate with create/destroy validator type KeyDel struct { - lkToCurrFk map[LK]FK - lkToLastFk map[LK]FK - fkInUse map[FK]bool - fkToUpdate map[FK]timedUpdate + // A new key is added on staking::CreateValidator + // the key is deleted at earliest after sending an update corresponding + // to a call to staking::DeleteValidator + // At most one local key can map to a given foreign key + localToForeign map[LK]FK + // Is the foreign key mapped to in localToForeign? + foreignIsMappedTo map[FK]bool + //TODO: + foreignToLastUpdate map[FK]lastUpdate + // A new key is added when a relevant update is returned by ComputeUpdates + // the key is deleted at earliest after sending an update corresponding + // to a call to staking::DeleteValidator + localToLastPositiveForeignUpdate map[LK]update } func MakeKeyDel() KeyDel { return KeyDel{ - lkToCurrFk: map[LK]FK{}, - lkToLastFk: map[LK]FK{}, - fkInUse: map[FK]bool{}, - fkToUpdate: map[FK]timedUpdate{}, + localToForeign: map[LK]FK{}, + foreignIsMappedTo: map[FK]bool{}, + foreignToLastUpdate: map[FK]lastUpdate{}, + // TODO: can compute necessary logic from this field from foreignToLastUpdate + localToLastPositiveForeignUpdate: map[LK]update{}, } } func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { - - inUse := false - if _, ok := e.fkInUse[fk]; ok { - inUse = true - } - if _, ok := e.fkToUpdate[fk]; ok { - inUse = true - } - if inUse { - return errors.New(`cannot reuse foreign key which is still in use for - local key lookups`) - } - if otherFk, ok := e.lkToCurrFk[lk]; ok { - delete(e.fkInUse, otherFk) - } - e.lkToCurrFk[lk] = fk - e.fkInUse[fk] = true + if _, ok := e.foreignIsMappedTo[fk]; ok { + return errors.New(`cannot use foreign key which is + already currently associated to a local key`) + } + if _, ok := e.foreignToLastUpdate[fk]; ok { + // We prevent reusing foreign keys which are still used for local + // key lookups. Otherwise it would be possible for a local key A + // to commit an infraction under the foreign key X and change + // the mapping of foreign key X to a local key B before evidence + // arrives. + return errors.New(`cannot reuse foreign key which was associated to + a different local key and which is still queryable`) + } + if otherFk, ok := e.localToForeign[lk]; ok { + delete(e.foreignIsMappedTo, otherFk) + } + e.localToForeign[lk] = fk + e.foreignIsMappedTo[fk] = true return nil } func (e *KeyDel) GetLocal(fk FK) (LK, error) { - // TODO: implement lookup for keys currently mapped - // but that have not yet been used to compute an update - if u, ok := e.fkToUpdate[fk]; ok { + // TODO: make it possible lookup local keys even + // when the foreign key has not yet been used? + if u, ok := e.foreignToLastUpdate[fk]; ok { return u.lk, nil } else { return -1, errors.New("local key not found for foreign key") @@ -73,15 +84,17 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { } func (e *KeyDel) Prune(vscid VSCID) { - toDel := []FK{} - for _, u := range e.fkToUpdate { - // If the last update was a deletion, and it has matured. - if u.power == 0 && u.vscid <= vscid { - toDel = append(toDel, u.fk) + toRemove := []FK{} + for _, u := range e.foreignToLastUpdate { + // If the last update has matured, and that + // update was a deletion (0 power), pruning + // is possible. + if u.vscid <= vscid && u.power == 0 { + toRemove = append(toRemove, u.fk) } } - for _, fk := range toDel { - delete(e.fkToUpdate, fk) + for _, fk := range toRemove { + delete(e.foreignToLastUpdate, fk) } } @@ -108,38 +121,35 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} - // Grab all local keys for which the foreign key changed - for lk, currFk := range e.lkToCurrFk { - if lastFk, ok := e.lkToLastFk[lk]; ok { - u := e.fkToUpdate[lastFk] - if 0 < u.power && lastFk != currFk { - // Has the key changed? + // Key changes + for lk, newFk := range e.localToForeign { + if u, ok := e.localToLastPositiveForeignUpdate[lk]; ok { + oldFk := u.key + if oldFk != newFk { lks = append(lks, lk) } } } - // Grab all local keys for which there was a power update + // Power changes for lk := range localUpdates { lks = append(lks, lk) } - ret := map[FK]int{} + foreignUpdates := map[FK]int{} // Make a temporary copy - lkToLastFkCopy := map[LK]FK{} - for lk, fk := range e.lkToLastFk { - lkToLastFkCopy[lk] = fk + lkTLPFU := map[LK]update{} + for lk, u := range e.localToLastPositiveForeignUpdate { + lkTLPFU[lk] = u } // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if fk, ok := e.lkToLastFk[lk]; ok { - if 0 < e.fkToUpdate[fk].power { - // Create a deletion update - ret[fk] = 0 - lkToLastFkCopy[lk] = fk - e.fkToUpdate[fk] = timedUpdate{fk: fk, lk: lk, vscid: vscid, power: 0} - } + if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { + // Create a deletion update + foreignUpdates[last.key] = 0 + delete(lkTLPFU, lk) + e.foreignToLastUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -147,8 +157,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if fk, ok := e.lkToLastFk[lk]; ok { - power = e.fkToUpdate[fk].power + if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { + // If there was a positive power before, use it. + power = last.power } // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { @@ -156,17 +167,18 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { } // Only ship positive powers. Zero powers are accounted for above. if 0 < power { - fk := e.lkToCurrFk[lk] - ret[fk] = power - lkToLastFkCopy[lk] = fk - e.fkToUpdate[fk] = timedUpdate{fk: fk, lk: lk, vscid: vscid, power: power} + fk := e.localToForeign[lk] + foreignUpdates[fk] = power + lkTLPFU[lk] = update{key: fk, power: power} + e.foreignToLastUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } - // TODO:??? - e.lkToLastFk = lkToLastFkCopy + // TODO: I can replace RHS with some logic which does addition/deletion based on + // power in e.usedForeignToLastUpdate?? + e.localToLastPositiveForeignUpdate = lkTLPFU - return ret + return foreignUpdates } // Returns true iff internal invariants hold @@ -174,25 +186,25 @@ func (e *KeyDel) internalInvariants() bool { // No two local keys can map to the same foreign key seen := map[FK]bool{} - for _, fk := range e.lkToCurrFk { + for _, fk := range e.localToForeign { if seen[fk] { return false } seen[fk] = true } - // All foreign keys mapped to by local keys are tracked - for _, fk := range e.lkToCurrFk { - if _, ok := e.fkInUse[fk]; !ok { + // All foreign keys mapped to by local keys are noted + for _, fk := range e.localToForeign { + if _, ok := e.foreignIsMappedTo[fk]; !ok { return false } } - // All tracked foreign keys are actually mapped to - for fk := range e.fkInUse { + + // All mapped to foreign keys are actually mapped to + for fk := range e.foreignIsMappedTo { good := false - for _, candidateFk := range e.lkToCurrFk { - if fk == candidateFk { - // Mapped to by at least one lk + for _, mappedFK := range e.localToForeign { + if mappedFK == fk { good = true break } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 53bc5cafad..202bd80061 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -77,7 +77,7 @@ func (d *Driver) applyMapInstructions(instructions []mapInstruction) { _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) } copy := map[LK]FK{} - for lk, fk := range d.e.lkToCurrFk { + for lk, fk := range d.e.localToForeign { copy[lk] = fk } d.mappings = append(d.mappings, copy) From 4e624e859b7b453e72a99cbf9967276fdf5f3d72 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:49:56 -0500 Subject: [PATCH 077/127] minor --- x/ccv/provider/keydel/keydel.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 69a915ab20..dff9783607 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -203,8 +203,8 @@ func (e *KeyDel) internalInvariants() bool { // All mapped to foreign keys are actually mapped to for fk := range e.foreignIsMappedTo { good := false - for _, mappedFK := range e.localToForeign { - if mappedFK == fk { + for _, candidateFk := range e.localToForeign { + if candidateFk == fk { good = true break } From f83ae911a01bd7223b677e5739d6f41caf2f7c5d Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:51:11 -0500 Subject: [PATCH 078/127] Simplify SetLocalToForeign --- x/ccv/provider/keydel/keydel.go | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index dff9783607..903bdaf69c 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -52,18 +52,15 @@ func MakeKeyDel() KeyDel { } func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { + inUse := false if _, ok := e.foreignIsMappedTo[fk]; ok { - return errors.New(`cannot use foreign key which is - already currently associated to a local key`) + inUse = true } if _, ok := e.foreignToLastUpdate[fk]; ok { - // We prevent reusing foreign keys which are still used for local - // key lookups. Otherwise it would be possible for a local key A - // to commit an infraction under the foreign key X and change - // the mapping of foreign key X to a local key B before evidence - // arrives. - return errors.New(`cannot reuse foreign key which was associated to - a different local key and which is still queryable`) + inUse = true + } + if inUse { + return errors.New(`cannot reuse foreign key which is currently being used for lookups`) } if otherFk, ok := e.localToForeign[lk]; ok { delete(e.foreignIsMappedTo, otherFk) From c96a8c3f4f02d798284840ac78050786dcf32dd8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:51:54 -0500 Subject: [PATCH 079/127] Symbol renames --- x/ccv/provider/keydel/keydel.go | 65 +++++++++++++--------------- x/ccv/provider/keydel/keydel_test.go | 2 +- 2 files changed, 32 insertions(+), 35 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 903bdaf69c..2048c9aa26 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -30,50 +30,49 @@ type KeyDel struct { // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator // At most one local key can map to a given foreign key - localToForeign map[LK]FK + lkToFk map[LK]FK // Is the foreign key mapped to in localToForeign? - foreignIsMappedTo map[FK]bool + fkInUse map[FK]bool //TODO: - foreignToLastUpdate map[FK]lastUpdate + fkToUpdate map[FK]lastUpdate // A new key is added when a relevant update is returned by ComputeUpdates // the key is deleted at earliest after sending an update corresponding // to a call to staking::DeleteValidator - localToLastPositiveForeignUpdate map[LK]update + lkToPositiveUpdate map[LK]update } func MakeKeyDel() KeyDel { return KeyDel{ - localToForeign: map[LK]FK{}, - foreignIsMappedTo: map[FK]bool{}, - foreignToLastUpdate: map[FK]lastUpdate{}, - // TODO: can compute necessary logic from this field from foreignToLastUpdate - localToLastPositiveForeignUpdate: map[LK]update{}, + lkToFk: map[LK]FK{}, + fkInUse: map[FK]bool{}, + fkToUpdate: map[FK]lastUpdate{}, + lkToPositiveUpdate: map[LK]update{}, } } func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { inUse := false - if _, ok := e.foreignIsMappedTo[fk]; ok { + if _, ok := e.fkInUse[fk]; ok { inUse = true } - if _, ok := e.foreignToLastUpdate[fk]; ok { + if _, ok := e.fkToUpdate[fk]; ok { inUse = true } if inUse { return errors.New(`cannot reuse foreign key which is currently being used for lookups`) } - if otherFk, ok := e.localToForeign[lk]; ok { - delete(e.foreignIsMappedTo, otherFk) + if otherFk, ok := e.lkToFk[lk]; ok { + delete(e.fkInUse, otherFk) } - e.localToForeign[lk] = fk - e.foreignIsMappedTo[fk] = true + e.lkToFk[lk] = fk + e.fkInUse[fk] = true return nil } func (e *KeyDel) GetLocal(fk FK) (LK, error) { // TODO: make it possible lookup local keys even // when the foreign key has not yet been used? - if u, ok := e.foreignToLastUpdate[fk]; ok { + if u, ok := e.fkToUpdate[fk]; ok { return u.lk, nil } else { return -1, errors.New("local key not found for foreign key") @@ -82,7 +81,7 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(vscid VSCID) { toRemove := []FK{} - for _, u := range e.foreignToLastUpdate { + for _, u := range e.fkToUpdate { // If the last update has matured, and that // update was a deletion (0 power), pruning // is possible. @@ -91,7 +90,7 @@ func (e *KeyDel) Prune(vscid VSCID) { } } for _, fk := range toRemove { - delete(e.foreignToLastUpdate, fk) + delete(e.fkToUpdate, fk) } } @@ -119,8 +118,8 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} // Key changes - for lk, newFk := range e.localToForeign { - if u, ok := e.localToLastPositiveForeignUpdate[lk]; ok { + for lk, newFk := range e.lkToFk { + if u, ok := e.lkToPositiveUpdate[lk]; ok { oldFk := u.key if oldFk != newFk { lks = append(lks, lk) @@ -136,17 +135,17 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Make a temporary copy lkTLPFU := map[LK]update{} - for lk, u := range e.localToLastPositiveForeignUpdate { + for lk, u := range e.lkToPositiveUpdate { lkTLPFU[lk] = u } // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { + if last, ok := e.lkToPositiveUpdate[lk]; ok { // Create a deletion update foreignUpdates[last.key] = 0 delete(lkTLPFU, lk) - e.foreignToLastUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} + e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -154,7 +153,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := e.localToLastPositiveForeignUpdate[lk]; ok { + if last, ok := e.lkToPositiveUpdate[lk]; ok { // If there was a positive power before, use it. power = last.power } @@ -164,16 +163,14 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { } // Only ship positive powers. Zero powers are accounted for above. if 0 < power { - fk := e.localToForeign[lk] + fk := e.lkToFk[lk] foreignUpdates[fk] = power lkTLPFU[lk] = update{key: fk, power: power} - e.foreignToLastUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} + e.fkToUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } - // TODO: I can replace RHS with some logic which does addition/deletion based on - // power in e.usedForeignToLastUpdate?? - e.localToLastPositiveForeignUpdate = lkTLPFU + e.lkToPositiveUpdate = lkTLPFU return foreignUpdates } @@ -183,7 +180,7 @@ func (e *KeyDel) internalInvariants() bool { // No two local keys can map to the same foreign key seen := map[FK]bool{} - for _, fk := range e.localToForeign { + for _, fk := range e.lkToFk { if seen[fk] { return false } @@ -191,16 +188,16 @@ func (e *KeyDel) internalInvariants() bool { } // All foreign keys mapped to by local keys are noted - for _, fk := range e.localToForeign { - if _, ok := e.foreignIsMappedTo[fk]; !ok { + for _, fk := range e.lkToFk { + if _, ok := e.fkInUse[fk]; !ok { return false } } // All mapped to foreign keys are actually mapped to - for fk := range e.foreignIsMappedTo { + for fk := range e.fkInUse { good := false - for _, candidateFk := range e.localToForeign { + for _, candidateFk := range e.lkToFk { if candidateFk == fk { good = true break diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 202bd80061..a9c90a6e18 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -77,7 +77,7 @@ func (d *Driver) applyMapInstructions(instructions []mapInstruction) { _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) } copy := map[LK]FK{} - for lk, fk := range d.e.localToForeign { + for lk, fk := range d.e.lkToFk { copy[lk] = fk } d.mappings = append(d.mappings, copy) From 8a243824c0865ef2c317fc0a443eb4450a89af68 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:52:45 -0500 Subject: [PATCH 080/127] Symbol rn --- x/ccv/provider/keydel/keydel.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 2048c9aa26..f7710bcb60 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -134,9 +134,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { foreignUpdates := map[FK]int{} // Make a temporary copy - lkTLPFU := map[LK]update{} + lkToPU := map[LK]update{} for lk, u := range e.lkToPositiveUpdate { - lkTLPFU[lk] = u + lkToPU[lk] = u } // Iterate all local keys for which there was previously a positive update. @@ -144,7 +144,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { if last, ok := e.lkToPositiveUpdate[lk]; ok { // Create a deletion update foreignUpdates[last.key] = 0 - delete(lkTLPFU, lk) + delete(lkToPU, lk) e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -165,12 +165,12 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { if 0 < power { fk := e.lkToFk[lk] foreignUpdates[fk] = power - lkTLPFU[lk] = update{key: fk, power: power} + lkToPU[lk] = update{key: fk, power: power} e.fkToUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } - e.lkToPositiveUpdate = lkTLPFU + e.lkToPositiveUpdate = lkToPU return foreignUpdates } From 6ac40c6f31f733fd8fe0d7dc419ee747268eb538 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:54:19 -0500 Subject: [PATCH 081/127] Comments --- x/ccv/provider/keydel/keydel.go | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index f7710bcb60..7bb9576ab9 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -70,8 +70,7 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { } func (e *KeyDel) GetLocal(fk FK) (LK, error) { - // TODO: make it possible lookup local keys even - // when the foreign key has not yet been used? + // TODO: implement lookups via keys current key if u, ok := e.fkToUpdate[fk]; ok { return u.lk, nil } else { @@ -80,16 +79,15 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { } func (e *KeyDel) Prune(vscid VSCID) { - toRemove := []FK{} + toDel := []FK{} for _, u := range e.fkToUpdate { - // If the last update has matured, and that - // update was a deletion (0 power), pruning - // is possible. - if u.vscid <= vscid && u.power == 0 { - toRemove = append(toRemove, u.fk) + // If the last update was a deletion (0 power) and the update + // matured then pruning is possible. + if u.power == 0 && u.vscid <= vscid { + toDel = append(toDel, u.fk) } } - for _, fk := range toRemove { + for _, fk := range toDel { delete(e.fkToUpdate, fk) } } From 3e4efda0b78e6982b6bf8647962cf66c2cee3b0f Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:56:22 -0500 Subject: [PATCH 082/127] (pass) rm docstrings --- x/ccv/provider/keydel/keydel.go | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 7bb9576ab9..7353c2eaa5 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -26,18 +26,9 @@ type lastUpdate struct { // 3. integrate with create/destroy validator type KeyDel struct { - // A new key is added on staking::CreateValidator - // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator - // At most one local key can map to a given foreign key - lkToFk map[LK]FK - // Is the foreign key mapped to in localToForeign? - fkInUse map[FK]bool - //TODO: - fkToUpdate map[FK]lastUpdate - // A new key is added when a relevant update is returned by ComputeUpdates - // the key is deleted at earliest after sending an update corresponding - // to a call to staking::DeleteValidator + lkToFk map[LK]FK + fkInUse map[FK]bool + fkToUpdate map[FK]lastUpdate lkToPositiveUpdate map[LK]update } From 020e63c64f87c647a36255f3fad4dc41f9a1451d Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 18:58:21 -0500 Subject: [PATCH 083/127] Symbol rename --- x/ccv/provider/keydel/keydel.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 7353c2eaa5..d3240a78ca 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -52,8 +52,8 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { if inUse { return errors.New(`cannot reuse foreign key which is currently being used for lookups`) } - if otherFk, ok := e.lkToFk[lk]; ok { - delete(e.fkInUse, otherFk) + if oldFk, ok := e.lkToFk[lk]; ok { + delete(e.fkInUse, oldFk) } e.lkToFk[lk] = fk e.fkInUse[fk] = true From 5b0870cfff50622ba1d6570d9b79e9f5ee0df0af Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:02:07 -0500 Subject: [PATCH 084/127] (pass) add intermediate --- x/ccv/provider/keydel/keydel.go | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index d3240a78ca..84cc12adc0 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -109,9 +109,11 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Key changes for lk, newFk := range e.lkToFk { if u, ok := e.lkToPositiveUpdate[lk]; ok { - oldFk := u.key - if oldFk != newFk { - lks = append(lks, lk) + if 0 < u.power { + oldFk := u.key + if oldFk != newFk { + lks = append(lks, lk) + } } } } @@ -130,11 +132,11 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if last, ok := e.lkToPositiveUpdate[lk]; ok { + if u, ok := e.lkToPositiveUpdate[lk]; ok { // Create a deletion update - foreignUpdates[last.key] = 0 + foreignUpdates[u.key] = 0 delete(lkToPU, lk) - e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} + e.fkToUpdate[u.key] = lastUpdate{fk: u.key, lk: lk, vscid: vscid, power: 0} } } @@ -142,9 +144,8 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := e.lkToPositiveUpdate[lk]; ok { - // If there was a positive power before, use it. - power = last.power + if u, ok := e.lkToPositiveUpdate[lk]; ok { + power = u.power } // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { From 26811f3d9ea5334f8c939029e1901e643c85a62d Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:05:12 -0500 Subject: [PATCH 085/127] Revert "(pass) add intermediate" This reverts commit 5b0870cfff50622ba1d6570d9b79e9f5ee0df0af. --- x/ccv/provider/keydel/keydel.go | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 84cc12adc0..d3240a78ca 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -109,11 +109,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Key changes for lk, newFk := range e.lkToFk { if u, ok := e.lkToPositiveUpdate[lk]; ok { - if 0 < u.power { - oldFk := u.key - if oldFk != newFk { - lks = append(lks, lk) - } + oldFk := u.key + if oldFk != newFk { + lks = append(lks, lk) } } } @@ -132,11 +130,11 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if u, ok := e.lkToPositiveUpdate[lk]; ok { + if last, ok := e.lkToPositiveUpdate[lk]; ok { // Create a deletion update - foreignUpdates[u.key] = 0 + foreignUpdates[last.key] = 0 delete(lkToPU, lk) - e.fkToUpdate[u.key] = lastUpdate{fk: u.key, lk: lk, vscid: vscid, power: 0} + e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -144,8 +142,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if u, ok := e.lkToPositiveUpdate[lk]; ok { - power = u.power + if last, ok := e.lkToPositiveUpdate[lk]; ok { + // If there was a positive power before, use it. + power = last.power } // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { From 87511102e5e30231ab1f8f945a4ffb307822215b Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:09:49 -0500 Subject: [PATCH 086/127] Add invariant --- x/ccv/provider/keydel/keydel.go | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index d3240a78ca..d996ab2e21 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -167,6 +167,14 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Returns true iff internal invariants hold func (e *KeyDel) internalInvariants() bool { + // If an update is stored in positive update, the last update + // for the foreign key was not 0 + for _, u := range e.lkToPositiveUpdate { + if e.fkToUpdate[u.key].power == 0 { + return false + } + } + // No two local keys can map to the same foreign key seen := map[FK]bool{} for _, fk := range e.lkToFk { From c0d6046da02ebfed3a507a193e8be3c18fc23a84 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:22:26 -0500 Subject: [PATCH 087/127] (pass) simplify --- x/ccv/provider/keydel/keydel.go | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index d996ab2e21..adfa907775 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -122,18 +122,11 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { foreignUpdates := map[FK]int{} - // Make a temporary copy - lkToPU := map[LK]update{} - for lk, u := range e.lkToPositiveUpdate { - lkToPU[lk] = u - } - // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { if last, ok := e.lkToPositiveUpdate[lk]; ok { // Create a deletion update foreignUpdates[last.key] = 0 - delete(lkToPU, lk) e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} } } @@ -154,12 +147,16 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { if 0 < power { fk := e.lkToFk[lk] foreignUpdates[fk] = power - lkToPU[lk] = update{key: fk, power: power} e.fkToUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} } } - e.lkToPositiveUpdate = lkToPU + e.lkToPositiveUpdate = map[LK]update{} + for _, u := range e.fkToUpdate { + if 0 < u.power { + e.lkToPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} + } + } return foreignUpdates } From badd610aa5a6a130640f7aa2e52cfccf1e58e754 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:24:42 -0500 Subject: [PATCH 088/127] Simplify (field removal) (pass) --- x/ccv/provider/keydel/keydel.go | 42 +++++++++++++-------------------- 1 file changed, 16 insertions(+), 26 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index adfa907775..4b47ff3d01 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -26,18 +26,16 @@ type lastUpdate struct { // 3. integrate with create/destroy validator type KeyDel struct { - lkToFk map[LK]FK - fkInUse map[FK]bool - fkToUpdate map[FK]lastUpdate - lkToPositiveUpdate map[LK]update + lkToFk map[LK]FK + fkInUse map[FK]bool + fkToUpdate map[FK]lastUpdate } func MakeKeyDel() KeyDel { return KeyDel{ - lkToFk: map[LK]FK{}, - fkInUse: map[FK]bool{}, - fkToUpdate: map[FK]lastUpdate{}, - lkToPositiveUpdate: map[LK]update{}, + lkToFk: map[LK]FK{}, + fkInUse: map[FK]bool{}, + fkToUpdate: map[FK]lastUpdate{}, } } @@ -106,9 +104,16 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} + lkToPositiveUpdate := map[LK]update{} + for _, u := range e.fkToUpdate { + if 0 < u.power { + lkToPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} + } + } + // Key changes for lk, newFk := range e.lkToFk { - if u, ok := e.lkToPositiveUpdate[lk]; ok { + if u, ok := lkToPositiveUpdate[lk]; ok { oldFk := u.key if oldFk != newFk { lks = append(lks, lk) @@ -124,7 +129,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if last, ok := e.lkToPositiveUpdate[lk]; ok { + if last, ok := lkToPositiveUpdate[lk]; ok { // Create a deletion update foreignUpdates[last.key] = 0 e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} @@ -135,7 +140,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := e.lkToPositiveUpdate[lk]; ok { + if last, ok := lkToPositiveUpdate[lk]; ok { // If there was a positive power before, use it. power = last.power } @@ -151,27 +156,12 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { } } - e.lkToPositiveUpdate = map[LK]update{} - for _, u := range e.fkToUpdate { - if 0 < u.power { - e.lkToPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} - } - } - return foreignUpdates } // Returns true iff internal invariants hold func (e *KeyDel) internalInvariants() bool { - // If an update is stored in positive update, the last update - // for the foreign key was not 0 - for _, u := range e.lkToPositiveUpdate { - if e.fkToUpdate[u.key].power == 0 { - return false - } - } - // No two local keys can map to the same foreign key seen := map[FK]bool{} for _, fk := range e.lkToFk { From 241e65a544a921aa7de22be3121e0b28566c6301 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:32:36 -0500 Subject: [PATCH 089/127] (pass) simplify --- x/ccv/provider/keydel/keydel.go | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 4b47ff3d01..bd932c1a05 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -104,32 +104,31 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} - lkToPositiveUpdate := map[LK]update{} - for _, u := range e.fkToUpdate { - if 0 < u.power { - lkToPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} - } - } - - // Key changes - for lk, newFk := range e.lkToFk { - if u, ok := lkToPositiveUpdate[lk]; ok { - oldFk := u.key - if oldFk != newFk { - lks = append(lks, lk) + // Grab lks for which fk changed + for oldFk, u := range e.fkToUpdate { + if newFk, ok := e.lkToFk[u.lk]; ok { + if oldFk != newFk && 0 < u.power { + lks = append(lks, u.lk) } } } - // Power changes + // Grab lks for which power changed for lk := range localUpdates { lks = append(lks, lk) } foreignUpdates := map[FK]int{} + lkToLastPositiveUpdate := map[LK]update{} + for _, u := range e.fkToUpdate { + if 0 < u.power { + lkToLastPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} + } + } + // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if last, ok := lkToPositiveUpdate[lk]; ok { + if last, ok := lkToLastPositiveUpdate[lk]; ok { // Create a deletion update foreignUpdates[last.key] = 0 e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} @@ -140,7 +139,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := lkToPositiveUpdate[lk]; ok { + if last, ok := lkToLastPositiveUpdate[lk]; ok { // If there was a positive power before, use it. power = last.power } From 0c980b78b56099ed934d0085cad8200c1301f6cb Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:37:17 -0500 Subject: [PATCH 090/127] (pass) symbol rename --- x/ccv/provider/keydel/keydel.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index bd932c1a05..a9feeeb018 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -128,10 +128,10 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if last, ok := lkToLastPositiveUpdate[lk]; ok { + if u, ok := lkToLastPositiveUpdate[lk]; ok { // Create a deletion update - foreignUpdates[last.key] = 0 - e.fkToUpdate[last.key] = lastUpdate{fk: last.key, lk: lk, vscid: vscid, power: 0} + foreignUpdates[u.key] = 0 + e.fkToUpdate[u.key] = lastUpdate{fk: u.key, lk: lk, vscid: vscid, power: 0} } } @@ -139,9 +139,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if last, ok := lkToLastPositiveUpdate[lk]; ok { + if u, ok := lkToLastPositiveUpdate[lk]; ok { // If there was a positive power before, use it. - power = last.power + power = u.power } // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { From 1ebbcb9da7ca2dad81ce5fe893c5925915d05af6 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:39:04 -0500 Subject: [PATCH 091/127] (pass) add clarity panic --- x/ccv/provider/keydel/keydel.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index a9feeeb018..9395bc997c 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -122,6 +122,9 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lkToLastPositiveUpdate := map[LK]update{} for _, u := range e.fkToUpdate { if 0 < u.power { + if _, ok := lkToLastPositiveUpdate[u.lk]; ok { + panic("Already have positive update for lk") + } lkToLastPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} } } From e293383b9a5596153233e50796bf6af64bc2be88 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:49:27 -0500 Subject: [PATCH 092/127] (pass) cleanup --- x/ccv/provider/keydel/keydel.go | 33 +++++++++++++++------------------ 1 file changed, 15 insertions(+), 18 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 9395bc997c..77fd7501b3 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -117,24 +117,20 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks = append(lks, lk) } - foreignUpdates := map[FK]int{} + ret := map[FK]int{} - lkToLastPositiveUpdate := map[LK]update{} - for _, u := range e.fkToUpdate { - if 0 < u.power { - if _, ok := lkToLastPositiveUpdate[u.lk]; ok { - panic("Already have positive update for lk") - } - lkToLastPositiveUpdate[u.lk] = update{key: u.fk, power: u.power} - } + fkToUpdateClone := map[FK]lastUpdate{} + for k, v := range e.fkToUpdate { + fkToUpdateClone[k] = v } // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - if u, ok := lkToLastPositiveUpdate[lk]; ok { - // Create a deletion update - foreignUpdates[u.key] = 0 - e.fkToUpdate[u.key] = lastUpdate{fk: u.key, lk: lk, vscid: vscid, power: 0} + for _, u := range fkToUpdateClone { + if u.lk == lk && 0 < u.power { + e.fkToUpdate[u.fk] = lastUpdate{fk: u.fk, lk: lk, vscid: vscid, power: 0} + ret[u.fk] = 0 + } } } @@ -142,9 +138,10 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - if u, ok := lkToLastPositiveUpdate[lk]; ok { - // If there was a positive power before, use it. - power = u.power + for _, u := range fkToUpdateClone { + if u.lk == lk && 0 < u.power { + power = u.power + } } // If there is a new power use it. if newPower, ok := localUpdates[lk]; ok { @@ -153,12 +150,12 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Only ship positive powers. Zero powers are accounted for above. if 0 < power { fk := e.lkToFk[lk] - foreignUpdates[fk] = power e.fkToUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} + ret[fk] = power } } - return foreignUpdates + return ret } // Returns true iff internal invariants hold From 9120e80c168a3b328f62fcd8637ae9d13f0017a3 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:50:14 -0500 Subject: [PATCH 093/127] rename struct --- x/ccv/provider/keydel/keydel.go | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 77fd7501b3..26be351582 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -13,7 +13,7 @@ type update struct { power int } -type lastUpdate struct { +type updateMemo struct { fk FK lk LK vscid int @@ -28,14 +28,14 @@ type lastUpdate struct { type KeyDel struct { lkToFk map[LK]FK fkInUse map[FK]bool - fkToUpdate map[FK]lastUpdate + fkToUpdate map[FK]updateMemo } func MakeKeyDel() KeyDel { return KeyDel{ lkToFk: map[LK]FK{}, fkInUse: map[FK]bool{}, - fkToUpdate: map[FK]lastUpdate{}, + fkToUpdate: map[FK]updateMemo{}, } } @@ -119,7 +119,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { ret := map[FK]int{} - fkToUpdateClone := map[FK]lastUpdate{} + fkToUpdateClone := map[FK]updateMemo{} for k, v := range e.fkToUpdate { fkToUpdateClone[k] = v } @@ -128,7 +128,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { for _, lk := range lks { for _, u := range fkToUpdateClone { if u.lk == lk && 0 < u.power { - e.fkToUpdate[u.fk] = lastUpdate{fk: u.fk, lk: lk, vscid: vscid, power: 0} + e.fkToUpdate[u.fk] = updateMemo{fk: u.fk, lk: lk, vscid: vscid, power: 0} ret[u.fk] = 0 } } @@ -150,7 +150,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Only ship positive powers. Zero powers are accounted for above. if 0 < power { fk := e.lkToFk[lk] - e.fkToUpdate[fk] = lastUpdate{fk: fk, lk: lk, vscid: vscid, power: power} + e.fkToUpdate[fk] = updateMemo{fk: fk, lk: lk, vscid: vscid, power: power} ret[fk] = power } } From 4caa642ca25753961b631f4994435d58a7c10484 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 19:58:04 -0500 Subject: [PATCH 094/127] impl current mapping queries --- x/ccv/provider/keydel/keydel.go | 51 +++++++++++++++++----------- x/ccv/provider/keydel/keydel_test.go | 3 ++ 2 files changed, 34 insertions(+), 20 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 26be351582..43db9cf146 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -26,42 +26,43 @@ type updateMemo struct { // 3. integrate with create/destroy validator type KeyDel struct { - lkToFk map[LK]FK - fkInUse map[FK]bool - fkToUpdate map[FK]updateMemo + lkToFk map[LK]FK + fkToLk map[FK]LK + fkToMemo map[FK]updateMemo } func MakeKeyDel() KeyDel { return KeyDel{ - lkToFk: map[LK]FK{}, - fkInUse: map[FK]bool{}, - fkToUpdate: map[FK]updateMemo{}, + lkToFk: map[LK]FK{}, + fkToLk: map[FK]LK{}, + fkToMemo: map[FK]updateMemo{}, } } func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { inUse := false - if _, ok := e.fkInUse[fk]; ok { + if _, ok := e.fkToLk[fk]; ok { inUse = true } - if _, ok := e.fkToUpdate[fk]; ok { + if _, ok := e.fkToMemo[fk]; ok { inUse = true } if inUse { return errors.New(`cannot reuse foreign key which is currently being used for lookups`) } if oldFk, ok := e.lkToFk[lk]; ok { - delete(e.fkInUse, oldFk) + delete(e.fkToLk, oldFk) } e.lkToFk[lk] = fk - e.fkInUse[fk] = true + e.fkToLk[fk] = lk return nil } func (e *KeyDel) GetLocal(fk FK) (LK, error) { - // TODO: implement lookups via keys current key - if u, ok := e.fkToUpdate[fk]; ok { + if u, ok := e.fkToMemo[fk]; ok { return u.lk, nil + } else if lk, ok := e.fkToLk[fk]; ok { + return lk, nil } else { return -1, errors.New("local key not found for foreign key") } @@ -69,7 +70,7 @@ func (e *KeyDel) GetLocal(fk FK) (LK, error) { func (e *KeyDel) Prune(vscid VSCID) { toDel := []FK{} - for _, u := range e.fkToUpdate { + for _, u := range e.fkToMemo { // If the last update was a deletion (0 power) and the update // matured then pruning is possible. if u.power == 0 && u.vscid <= vscid { @@ -77,7 +78,7 @@ func (e *KeyDel) Prune(vscid VSCID) { } } for _, fk := range toDel { - delete(e.fkToUpdate, fk) + delete(e.fkToMemo, fk) } } @@ -105,7 +106,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { lks := []LK{} // Grab lks for which fk changed - for oldFk, u := range e.fkToUpdate { + for oldFk, u := range e.fkToMemo { if newFk, ok := e.lkToFk[u.lk]; ok { if oldFk != newFk && 0 < u.power { lks = append(lks, u.lk) @@ -120,7 +121,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { ret := map[FK]int{} fkToUpdateClone := map[FK]updateMemo{} - for k, v := range e.fkToUpdate { + for k, v := range e.fkToMemo { fkToUpdateClone[k] = v } @@ -128,7 +129,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { for _, lk := range lks { for _, u := range fkToUpdateClone { if u.lk == lk && 0 < u.power { - e.fkToUpdate[u.fk] = updateMemo{fk: u.fk, lk: lk, vscid: vscid, power: 0} + e.fkToMemo[u.fk] = updateMemo{fk: u.fk, lk: lk, vscid: vscid, power: 0} ret[u.fk] = 0 } } @@ -150,7 +151,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Only ship positive powers. Zero powers are accounted for above. if 0 < power { fk := e.lkToFk[lk] - e.fkToUpdate[fk] = updateMemo{fk: fk, lk: lk, vscid: vscid, power: power} + e.fkToMemo[fk] = updateMemo{fk: fk, lk: lk, vscid: vscid, power: power} ret[fk] = power } } @@ -172,13 +173,13 @@ func (e *KeyDel) internalInvariants() bool { // All foreign keys mapped to by local keys are noted for _, fk := range e.lkToFk { - if _, ok := e.fkInUse[fk]; !ok { + if _, ok := e.fkToLk[fk]; !ok { return false } } // All mapped to foreign keys are actually mapped to - for fk := range e.fkInUse { + for fk := range e.fkToLk { good := false for _, candidateFk := range e.lkToFk { if candidateFk == fk { @@ -191,6 +192,16 @@ func (e *KeyDel) internalInvariants() bool { } } + // If a foreign key is directly mapped to a local key + // there is no disagreeing on the local key. + for fk, lk := range e.fkToLk { + if u, ok := e.fkToMemo[fk]; ok { + if lk != u.lk { + return false + } + } + } + return true } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index a9c90a6e18..d91665a826 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -208,6 +208,9 @@ func (d *Driver) checkProperties() { expectQueryable[u.key] = true } } + for _, fk := range d.e.lkToFk { + expectQueryable[fk] = true + } // Simply check every foreign key for the correct queryable-ness. for fk := 0; fk < NUM_FKS; fk++ { From a1165c37853fa329fe5bfe56d2230a55ccc0c0ef Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 4 Oct 2022 20:01:32 -0500 Subject: [PATCH 095/127] rn --- x/ccv/provider/keydel/keydel.go | 12 ++++++------ x/ccv/provider/keydel/keydel_test.go | 3 ++- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 43db9cf146..274435dbb9 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -13,7 +13,7 @@ type update struct { power int } -type updateMemo struct { +type memo struct { fk FK lk LK vscid int @@ -28,14 +28,14 @@ type updateMemo struct { type KeyDel struct { lkToFk map[LK]FK fkToLk map[FK]LK - fkToMemo map[FK]updateMemo + fkToMemo map[FK]memo } func MakeKeyDel() KeyDel { return KeyDel{ lkToFk: map[LK]FK{}, fkToLk: map[FK]LK{}, - fkToMemo: map[FK]updateMemo{}, + fkToMemo: map[FK]memo{}, } } @@ -120,7 +120,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { ret := map[FK]int{} - fkToUpdateClone := map[FK]updateMemo{} + fkToUpdateClone := map[FK]memo{} for k, v := range e.fkToMemo { fkToUpdateClone[k] = v } @@ -129,7 +129,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { for _, lk := range lks { for _, u := range fkToUpdateClone { if u.lk == lk && 0 < u.power { - e.fkToMemo[u.fk] = updateMemo{fk: u.fk, lk: lk, vscid: vscid, power: 0} + e.fkToMemo[u.fk] = memo{fk: u.fk, lk: lk, vscid: vscid, power: 0} ret[u.fk] = 0 } } @@ -151,7 +151,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // Only ship positive powers. Zero powers are accounted for above. if 0 < power { fk := e.lkToFk[lk] - e.fkToMemo[fk] = updateMemo{fk: fk, lk: lk, vscid: vscid, power: power} + e.fkToMemo[fk] = memo{fk: fk, lk: lk, vscid: vscid, power: power} ret[fk] = power } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index d91665a826..ad41da1318 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -225,7 +225,8 @@ func (d *Driver) checkProperties() { } /* - */ + TODO: + */ queries := func() { // For each fk known to the consumer for consumerFK := range d.foreignValSet.keyToPower { From 595aeab939ad72a772f5e0ecd9df4ec1eced81a8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 08:21:08 -0500 Subject: [PATCH 096/127] merge main into --- Dockerfile | 1 + Makefile | 5 +- app/consumer-democracy/ante_handler.go | 59 + app/consumer-democracy/app.go | 855 ++++++++++ app/consumer-democracy/export.go | 198 +++ app/consumer-democracy/genesis.go | 21 + app/consumer/app.go | 2 +- app/consumer/export.go | 10 +- cmd/interchain-security-cdd/main.go | 32 + docs/interchain-security/proto-docs.md | 8 +- docs/quality_assurance.md | 130 +- go.mod | 1 + go.sum | 4 +- proto/buf.lock | 8 +- .../ccv/consumer/v1/consumer.proto | 23 +- .../ccv/consumer/v1/genesis.proto | 43 +- .../ccv/provider/v1/genesis.proto | 67 +- .../ccv/provider/v1/provider.proto | 16 +- .../ccv/provider/v1/query.proto | 17 +- proto/interchain_security/ccv/v1/ccv.proto | 39 +- scripts/protocgen.sh | 2 +- tests/difference/core/driver/setup.go | 2 +- tests/e2e/channel_init_test.go | 773 +-------- tests/e2e/common_test.go | 79 +- tests/e2e/democracy_test.go | 262 +++ tests/e2e/distribution_test.go | 3 +- tests/e2e/normal_operations_test.go | 8 +- tests/e2e/setup_test.go | 330 +--- tests/e2e/slashing_test.go | 144 +- tests/e2e/stop_consumer_test.go | 183 +-- tests/e2e/unbonding_test.go | 19 +- tests/e2e/valset_update_test.go | 5 +- tests/integration/actions.go | 268 ++- tests/integration/config.go | 11 + tests/integration/main.go | 25 +- tests/integration/state.go | 164 +- tests/integration/steps.go | 2 +- tests/integration/steps_democracy.go | 485 ++++++ testutil/keeper/expectations.go | 93 ++ testutil/keeper/unit_test_helpers.go | 311 ++-- testutil/simapp/simapp.go | 22 + .../cosmos/staking/v1beta1/staking.proto | 21 +- x/ccv/consumer/ibc_module.go | 2 +- x/ccv/consumer/ibc_module_test.go | 374 +++++ x/ccv/consumer/keeper/genesis.go | 110 +- x/ccv/consumer/keeper/genesis_test.go | 265 +++ x/ccv/consumer/keeper/keeper.go | 95 +- x/ccv/consumer/keeper/keeper_test.go | 145 +- x/ccv/consumer/keeper/params_test.go | 3 +- x/ccv/consumer/keeper/relay.go | 9 +- x/ccv/consumer/keeper/relay_test.go | 57 +- x/ccv/consumer/keeper/validators.go | 18 + x/ccv/consumer/keeper/validators_test.go | 38 +- x/ccv/consumer/module.go | 3 - x/ccv/consumer/types/codec.go | 10 + x/ccv/consumer/types/consumer.pb.go | 262 ++- x/ccv/consumer/types/genesis.go | 24 +- x/ccv/consumer/types/genesis.pb.go | 636 +++++++- x/ccv/consumer/types/genesis_test.go | 43 +- x/ccv/consumer/types/keys.go | 5 +- x/ccv/democracy/distribution/doc.go | 9 + x/ccv/democracy/distribution/module.go | 125 ++ x/ccv/{ => democracy}/staking/doc.go | 2 +- x/ccv/{ => democracy}/staking/module.go | 0 x/ccv/provider/client/proposal_handler.go | 34 +- x/ccv/provider/ibc_module.go | 15 +- x/ccv/provider/ibc_module_test.go | 336 ++++ x/ccv/provider/keeper/genesis.go | 150 +- x/ccv/provider/keeper/genesis_test.go | 153 ++ x/ccv/provider/keeper/keeper.go | 159 +- x/ccv/provider/keeper/keeper_test.go | 51 +- x/ccv/provider/keeper/params_test.go | 32 +- x/ccv/provider/keeper/proposal.go | 302 ++-- x/ccv/provider/keeper/proposal_test.go | 699 +++++++- x/ccv/provider/keeper/relay.go | 2 +- x/ccv/provider/module.go | 15 +- x/ccv/provider/module_test.go | 165 ++ x/ccv/provider/proposal_handler.go | 10 +- x/ccv/provider/proposal_handler_test.go | 90 ++ x/ccv/provider/types/codec.go | 2 +- x/ccv/provider/types/consumer.go | 30 + x/ccv/provider/types/errors.go | 8 +- x/ccv/provider/types/genesis.go | 23 +- x/ccv/provider/types/genesis.pb.go | 1433 +++++++++++++++-- x/ccv/provider/types/genesis_test.go | 63 +- x/ccv/provider/types/keys.go | 37 +- x/ccv/provider/types/keys_test.go | 18 +- x/ccv/provider/types/proposal.go | 70 +- x/ccv/provider/types/proposal_test.go | 28 +- x/ccv/provider/types/provider.pb.go | 355 +++- x/ccv/provider/types/query.pb.go | 8 +- x/ccv/types/ccv.pb.go | 531 +++++- x/ccv/types/errors.go | 2 +- 93 files changed, 9172 insertions(+), 2635 deletions(-) create mode 100644 app/consumer-democracy/ante_handler.go create mode 100644 app/consumer-democracy/app.go create mode 100644 app/consumer-democracy/export.go create mode 100644 app/consumer-democracy/genesis.go create mode 100644 cmd/interchain-security-cdd/main.go create mode 100644 tests/e2e/democracy_test.go create mode 100644 tests/integration/steps_democracy.go create mode 100644 testutil/keeper/expectations.go create mode 100644 x/ccv/consumer/ibc_module_test.go create mode 100644 x/ccv/consumer/keeper/genesis_test.go create mode 100644 x/ccv/consumer/types/codec.go create mode 100644 x/ccv/democracy/distribution/doc.go create mode 100644 x/ccv/democracy/distribution/module.go rename x/ccv/{ => democracy}/staking/doc.go (85%) rename x/ccv/{ => democracy}/staking/module.go (100%) create mode 100644 x/ccv/provider/ibc_module_test.go create mode 100644 x/ccv/provider/keeper/genesis_test.go create mode 100644 x/ccv/provider/module_test.go create mode 100644 x/ccv/provider/proposal_handler_test.go create mode 100644 x/ccv/provider/types/consumer.go diff --git a/Dockerfile b/Dockerfile index 7b699ac974..cc537c7ccc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,6 +39,7 @@ COPY --from=hermes-builder /usr/bin/hermes /usr/local/bin/ COPY --from=is-builder /go/bin/interchain-security-pd /usr/local/bin/interchain-security-pd COPY --from=is-builder /go/bin/interchain-security-cd /usr/local/bin/interchain-security-cd +COPY --from=is-builder /go/bin/interchain-security-cdd /usr/local/bin/interchain-security-cdd # Copy in the shell scripts that run the testnet diff --git a/Makefile b/Makefile index 1b88e19292..3e992796b6 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,7 @@ install: go.sum export CGO_LDFLAGS="-Wl,-z,relro,-z,now -fstack-protector" go install $(BUILD_FLAGS) ./cmd/interchain-security-pd go install $(BUILD_FLAGS) ./cmd/interchain-security-cd + go install $(BUILD_FLAGS) ./cmd/interchain-security-cdd # run all tests: unit, e2e, diff, and integration test: @@ -41,8 +42,8 @@ $(BUILDDIR)/: ### Protobuf ### ############################################################################### -containerProtoVer=v0.7 -containerProtoImage=tendermintdev/sdk-proto-gen:$(containerProtoVer) +containerProtoVer=0.9.0 +containerProtoImage=ghcr.io/cosmos/proto-builder:$(containerProtoVer) containerProtoGen=cosmos-sdk-proto-gen-$(containerProtoVer) containerProtoGenSwagger=cosmos-sdk-proto-gen-swagger-$(containerProtoVer) containerProtoFmt=cosmos-sdk-proto-fmt-$(containerProtoVer) diff --git a/app/consumer-democracy/ante_handler.go b/app/consumer-democracy/ante_handler.go new file mode 100644 index 0000000000..5e282ae06a --- /dev/null +++ b/app/consumer-democracy/ante_handler.go @@ -0,0 +1,59 @@ +package app + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" + "github.com/cosmos/cosmos-sdk/x/auth/ante" + ibcante "github.com/cosmos/ibc-go/v3/modules/core/ante" + ibckeeper "github.com/cosmos/ibc-go/v3/modules/core/keeper" + consumerante "github.com/cosmos/interchain-security/app/consumer/ante" + ibcconsumerkeeper "github.com/cosmos/interchain-security/x/ccv/consumer/keeper" +) + +// HandlerOptions extend the SDK's AnteHandler options by requiring the IBC +// channel keeper. +type HandlerOptions struct { + ante.HandlerOptions + + IBCKeeper *ibckeeper.Keeper + ConsumerKeeper ibcconsumerkeeper.Keeper +} + +func NewAnteHandler(options HandlerOptions) (sdk.AnteHandler, error) { + if options.AccountKeeper == nil { + return nil, sdkerrors.Wrap(sdkerrors.ErrLogic, "account keeper is required for AnteHandler") + } + if options.BankKeeper == nil { + return nil, sdkerrors.Wrap(sdkerrors.ErrLogic, "bank keeper is required for AnteHandler") + } + if options.SignModeHandler == nil { + return nil, sdkerrors.Wrap(sdkerrors.ErrLogic, "sign mode handler is required for ante builder") + } + + var sigGasConsumer = options.SigGasConsumer + if sigGasConsumer == nil { + sigGasConsumer = ante.DefaultSigVerificationGasConsumer + } + + anteDecorators := []sdk.AnteDecorator{ + ante.NewSetUpContextDecorator(), + ante.NewRejectExtensionOptionsDecorator(), + consumerante.NewMsgFilterDecorator(options.ConsumerKeeper), + consumerante.NewDisabledModulesDecorator("/cosmos.evidence", "/cosmos.slashing"), + ante.NewMempoolFeeDecorator(), + ante.NewValidateBasicDecorator(), + ante.NewTxTimeoutHeightDecorator(), + ante.NewValidateMemoDecorator(options.AccountKeeper), + ante.NewConsumeGasForTxSizeDecorator(options.AccountKeeper), + ante.NewDeductFeeDecorator(options.AccountKeeper, options.BankKeeper, options.FeegrantKeeper), + // SetPubKeyDecorator must be called before all signature verification decorators + ante.NewSetPubKeyDecorator(options.AccountKeeper), + ante.NewValidateSigCountDecorator(options.AccountKeeper), + ante.NewSigGasConsumeDecorator(options.AccountKeeper, sigGasConsumer), + ante.NewSigVerificationDecorator(options.AccountKeeper, options.SignModeHandler), + ante.NewIncrementSequenceDecorator(options.AccountKeeper), + ibcante.NewAnteDecorator(options.IBCKeeper), + } + + return sdk.ChainAnteDecorators(anteDecorators...), nil +} diff --git a/app/consumer-democracy/app.go b/app/consumer-democracy/app.go new file mode 100644 index 0000000000..1492244ce0 --- /dev/null +++ b/app/consumer-democracy/app.go @@ -0,0 +1,855 @@ +package app + +import ( + "fmt" + "io" + stdlog "log" + "net/http" + "os" + "path/filepath" + + "github.com/cosmos/cosmos-sdk/baseapp" + "github.com/cosmos/cosmos-sdk/client" + "github.com/cosmos/cosmos-sdk/client/grpc/tmservice" + "github.com/cosmos/cosmos-sdk/client/rpc" + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/codec/types" + "github.com/cosmos/cosmos-sdk/server/api" + "github.com/cosmos/cosmos-sdk/server/config" + servertypes "github.com/cosmos/cosmos-sdk/server/types" + "github.com/cosmos/cosmos-sdk/simapp" + store "github.com/cosmos/cosmos-sdk/store/types" + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/cosmos/cosmos-sdk/types/module" + "github.com/cosmos/cosmos-sdk/version" + "github.com/cosmos/cosmos-sdk/x/auth" + "github.com/cosmos/cosmos-sdk/x/auth/ante" + authrest "github.com/cosmos/cosmos-sdk/x/auth/client/rest" + authkeeper "github.com/cosmos/cosmos-sdk/x/auth/keeper" + authsims "github.com/cosmos/cosmos-sdk/x/auth/simulation" + authtx "github.com/cosmos/cosmos-sdk/x/auth/tx" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + "github.com/cosmos/cosmos-sdk/x/auth/vesting" + vestingtypes "github.com/cosmos/cosmos-sdk/x/auth/vesting/types" + "github.com/cosmos/cosmos-sdk/x/authz" + authzkeeper "github.com/cosmos/cosmos-sdk/x/authz/keeper" + authzmodule "github.com/cosmos/cosmos-sdk/x/authz/module" + "github.com/cosmos/cosmos-sdk/x/bank" + bankkeeper "github.com/cosmos/cosmos-sdk/x/bank/keeper" + banktypes "github.com/cosmos/cosmos-sdk/x/bank/types" + "github.com/cosmos/cosmos-sdk/x/capability" + capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + "github.com/cosmos/cosmos-sdk/x/crisis" + crisiskeeper "github.com/cosmos/cosmos-sdk/x/crisis/keeper" + crisistypes "github.com/cosmos/cosmos-sdk/x/crisis/types" + "github.com/cosmos/cosmos-sdk/x/evidence" + evidencekeeper "github.com/cosmos/cosmos-sdk/x/evidence/keeper" + evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" + "github.com/cosmos/cosmos-sdk/x/feegrant" + feegrantkeeper "github.com/cosmos/cosmos-sdk/x/feegrant/keeper" + feegrantmodule "github.com/cosmos/cosmos-sdk/x/feegrant/module" + "github.com/cosmos/cosmos-sdk/x/params" + paramsclient "github.com/cosmos/cosmos-sdk/x/params/client" + paramskeeper "github.com/cosmos/cosmos-sdk/x/params/keeper" + paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" + upgradeclient "github.com/cosmos/cosmos-sdk/x/upgrade/client" + + "github.com/cosmos/cosmos-sdk/x/slashing" + slashingkeeper "github.com/cosmos/cosmos-sdk/x/slashing/keeper" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + "github.com/cosmos/cosmos-sdk/x/upgrade" + upgradekeeper "github.com/cosmos/cosmos-sdk/x/upgrade/keeper" + upgradetypes "github.com/cosmos/cosmos-sdk/x/upgrade/types" + "github.com/cosmos/ibc-go/v3/modules/apps/transfer" + ibctransferkeeper "github.com/cosmos/ibc-go/v3/modules/apps/transfer/keeper" + ibctransfertypes "github.com/cosmos/ibc-go/v3/modules/apps/transfer/types" + ibc "github.com/cosmos/ibc-go/v3/modules/core" + ibcconnectiontypes "github.com/cosmos/ibc-go/v3/modules/core/03-connection/types" + porttypes "github.com/cosmos/ibc-go/v3/modules/core/05-port/types" + ibchost "github.com/cosmos/ibc-go/v3/modules/core/24-host" + ibckeeper "github.com/cosmos/ibc-go/v3/modules/core/keeper" + ibctesting "github.com/cosmos/ibc-go/v3/testing" + "github.com/gorilla/mux" + "github.com/rakyll/statik/fs" + "github.com/spf13/cast" + "github.com/tendermint/spm/cosmoscmd" + abci "github.com/tendermint/tendermint/abci/types" + tmjson "github.com/tendermint/tendermint/libs/json" + "github.com/tendermint/tendermint/libs/log" + tmos "github.com/tendermint/tendermint/libs/os" + dbm "github.com/tendermint/tm-db" + + distr "github.com/cosmos/cosmos-sdk/x/distribution" + ccvdistrclient "github.com/cosmos/cosmos-sdk/x/distribution/client" + ccvdistrkeeper "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + ccvdistrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + ccvdistr "github.com/cosmos/interchain-security/x/ccv/democracy/distribution" + + ccvstakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + ccvstakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + ccvstaking "github.com/cosmos/interchain-security/x/ccv/democracy/staking" + + ccvgov "github.com/cosmos/cosmos-sdk/x/gov" + ccvgovkeeper "github.com/cosmos/cosmos-sdk/x/gov/keeper" + ccvgovtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + + // add mint + ccvmint "github.com/cosmos/cosmos-sdk/x/mint" + ccvmintkeeper "github.com/cosmos/cosmos-sdk/x/mint/keeper" + ccvminttypes "github.com/cosmos/cosmos-sdk/x/mint/types" + + paramproposal "github.com/cosmos/cosmos-sdk/x/params/types/proposal" + ibcconsumer "github.com/cosmos/interchain-security/x/ccv/consumer" + ibcconsumerkeeper "github.com/cosmos/interchain-security/x/ccv/consumer/keeper" + ibcconsumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + + // unnamed import of statik for swagger UI support + _ "github.com/cosmos/cosmos-sdk/client/docs/statik" + ibcclienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" +) + +const ( + AppName = "interchain-security-cd" + upgradeName = "v07-Theta" + AccountAddressPrefix = "cosmos" +) + +var ( + // DefaultNodeHome default home directories for the application daemon + DefaultNodeHome string + + // ModuleBasics defines the module BasicManager is in charge of setting up basic, + // non-dependant module elements, such as codec registration + // and genesis verification. + ModuleBasics = module.NewBasicManager( + auth.AppModuleBasic{}, + bank.AppModuleBasic{}, + capability.AppModuleBasic{}, + ccvstaking.AppModuleBasic{}, + ccvmint.AppModuleBasic{}, + ccvdistr.AppModuleBasic{}, + ccvgov.NewAppModuleBasic( + // TODO: eventually remove upgrade proposal handler and cancel proposal handler + paramsclient.ProposalHandler, ccvdistrclient.ProposalHandler, upgradeclient.ProposalHandler, upgradeclient.CancelProposalHandler, + ), + params.AppModuleBasic{}, + crisis.AppModuleBasic{}, + slashing.AppModuleBasic{}, + feegrantmodule.AppModuleBasic{}, + authzmodule.AppModuleBasic{}, + ibc.AppModuleBasic{}, + upgrade.AppModuleBasic{}, + evidence.AppModuleBasic{}, + transfer.AppModuleBasic{}, + vesting.AppModuleBasic{}, + //router.AppModuleBasic{}, + ibcconsumer.AppModuleBasic{}, + ) + + // module account permissions + maccPerms = map[string][]string{ + authtypes.FeeCollectorName: nil, + ccvstakingtypes.BondedPoolName: {authtypes.Burner, authtypes.Staking}, + ccvstakingtypes.NotBondedPoolName: {authtypes.Burner, authtypes.Staking}, + ccvdistrtypes.ModuleName: nil, + ccvminttypes.ModuleName: {authtypes.Minter}, + ibcconsumertypes.ConsumerRedistributeName: nil, + ibcconsumertypes.ConsumerToSendToProviderName: nil, + ibctransfertypes.ModuleName: {authtypes.Minter, authtypes.Burner}, + ccvgovtypes.ModuleName: {authtypes.Burner}, + } +) + +var ( + _ simapp.App = (*App)(nil) + _ servertypes.Application = (*App)(nil) + _ cosmoscmd.CosmosApp = (*App)(nil) + _ ibctesting.TestingApp = (*App)(nil) +) + +// App extends an ABCI application, but with most of its parameters exported. +// They are exported for convenience in creating helper functions, as object +// capabilities aren't needed for testing. +type App struct { // nolint: golint + *baseapp.BaseApp + legacyAmino *codec.LegacyAmino + appCodec codec.Codec + interfaceRegistry types.InterfaceRegistry + + invCheckPeriod uint + + // keys to access the substores + keys map[string]*sdk.KVStoreKey + tkeys map[string]*sdk.TransientStoreKey + memKeys map[string]*sdk.MemoryStoreKey + + // keepers + AccountKeeper authkeeper.AccountKeeper + BankKeeper bankkeeper.Keeper + CapabilityKeeper *capabilitykeeper.Keeper + StakingKeeper ccvstakingkeeper.Keeper + SlashingKeeper slashingkeeper.Keeper + MintKeeper ccvmintkeeper.Keeper + DistrKeeper ccvdistrkeeper.Keeper + GovKeeper ccvgovkeeper.Keeper + CrisisKeeper crisiskeeper.Keeper + UpgradeKeeper upgradekeeper.Keeper + ParamsKeeper paramskeeper.Keeper + IBCKeeper *ibckeeper.Keeper // IBC Keeper must be a pointer in the app, so we can SetRouter on it correctly + EvidenceKeeper evidencekeeper.Keeper + TransferKeeper ibctransferkeeper.Keeper + FeeGrantKeeper feegrantkeeper.Keeper + AuthzKeeper authzkeeper.Keeper + ConsumerKeeper ibcconsumerkeeper.Keeper + + // make scoped keepers public for test purposes + ScopedIBCKeeper capabilitykeeper.ScopedKeeper + ScopedTransferKeeper capabilitykeeper.ScopedKeeper + ScopedIBCConsumerKeeper capabilitykeeper.ScopedKeeper + + // the module manager + MM *module.Manager + + // simulation manager + sm *module.SimulationManager + configurator module.Configurator +} + +func init() { + userHomeDir, err := os.UserHomeDir() + if err != nil { + stdlog.Println("Failed to get home dir %2", err) + } + + DefaultNodeHome = filepath.Join(userHomeDir, "."+AppName) +} + +// New returns a reference to an initialized App. +func New( + logger log.Logger, + db dbm.DB, + traceStore io.Writer, + loadLatest bool, + skipUpgradeHeights map[int64]bool, + homePath string, + invCheckPeriod uint, + encodingConfig cosmoscmd.EncodingConfig, + appOpts servertypes.AppOptions, + baseAppOptions ...func(*baseapp.BaseApp), +) cosmoscmd.App { + + appCodec := encodingConfig.Marshaler + legacyAmino := encodingConfig.Amino + interfaceRegistry := encodingConfig.InterfaceRegistry + + bApp := baseapp.NewBaseApp(AppName, logger, db, encodingConfig.TxConfig.TxDecoder(), baseAppOptions...) + bApp.SetCommitMultiStoreTracer(traceStore) + bApp.SetVersion(version.Version) + bApp.SetInterfaceRegistry(interfaceRegistry) + + keys := sdk.NewKVStoreKeys( + authtypes.StoreKey, banktypes.StoreKey, ccvstakingtypes.StoreKey, + ccvminttypes.StoreKey, ccvdistrtypes.StoreKey, slashingtypes.StoreKey, + ccvgovtypes.StoreKey, paramstypes.StoreKey, ibchost.StoreKey, upgradetypes.StoreKey, feegrant.StoreKey, + evidencetypes.StoreKey, ibctransfertypes.StoreKey, + capabilitytypes.StoreKey, authzkeeper.StoreKey, + ibcconsumertypes.StoreKey, + ) + tkeys := sdk.NewTransientStoreKeys(paramstypes.TStoreKey) + memKeys := sdk.NewMemoryStoreKeys(capabilitytypes.MemStoreKey) + + app := &App{ + BaseApp: bApp, + legacyAmino: legacyAmino, + appCodec: appCodec, + interfaceRegistry: interfaceRegistry, + invCheckPeriod: invCheckPeriod, + keys: keys, + tkeys: tkeys, + memKeys: memKeys, + } + + app.ParamsKeeper = initParamsKeeper( + appCodec, + legacyAmino, + keys[paramstypes.StoreKey], + tkeys[paramstypes.TStoreKey], + ) + + // set the BaseApp's parameter store + bApp.SetParamStore( + app.ParamsKeeper.Subspace(baseapp.Paramspace).WithKeyTable( + paramskeeper.ConsensusParamsKeyTable()), + ) + + // add capability keeper and ScopeToModule for ibc module + app.CapabilityKeeper = capabilitykeeper.NewKeeper( + appCodec, + keys[capabilitytypes.StoreKey], + memKeys[capabilitytypes.MemStoreKey], + ) + scopedIBCKeeper := app.CapabilityKeeper.ScopeToModule(ibchost.ModuleName) + scopedTransferKeeper := app.CapabilityKeeper.ScopeToModule(ibctransfertypes.ModuleName) + scopedIBCConsumerKeeper := app.CapabilityKeeper.ScopeToModule(ibcconsumertypes.ModuleName) + app.CapabilityKeeper.Seal() + + // add keepers + app.AccountKeeper = authkeeper.NewAccountKeeper( + appCodec, + keys[authtypes.StoreKey], + app.GetSubspace(authtypes.ModuleName), + authtypes.ProtoBaseAccount, + maccPerms, + ) + + // Remove the fee-pool from the group of blocked recipient addresses in bank + // this is required for the provider chain to be able to receive tokens from + // the consumer chain + bankBlockedAddrs := app.ModuleAccountAddrs() + delete(bankBlockedAddrs, authtypes.NewModuleAddress( + authtypes.FeeCollectorName).String()) + + app.BankKeeper = bankkeeper.NewBaseKeeper( + appCodec, + keys[banktypes.StoreKey], + app.AccountKeeper, + app.GetSubspace(banktypes.ModuleName), + bankBlockedAddrs, + ) + app.AuthzKeeper = authzkeeper.NewKeeper( + keys[authzkeeper.StoreKey], + appCodec, + app.BaseApp.MsgServiceRouter(), + ) + app.FeeGrantKeeper = feegrantkeeper.NewKeeper( + appCodec, + keys[feegrant.StoreKey], + app.AccountKeeper, + ) + + ccvstakingKeeper := ccvstakingkeeper.NewKeeper( + appCodec, + keys[ccvstakingtypes.StoreKey], + app.AccountKeeper, + app.BankKeeper, + app.GetSubspace(ccvstakingtypes.ModuleName), + ) + + app.MintKeeper = ccvmintkeeper.NewKeeper( + appCodec, keys[ccvminttypes.StoreKey], app.GetSubspace(ccvminttypes.ModuleName), &ccvstakingKeeper, + app.AccountKeeper, app.BankKeeper, authtypes.FeeCollectorName, + ) + + app.SlashingKeeper = slashingkeeper.NewKeeper( + appCodec, + keys[slashingtypes.StoreKey], + &app.ConsumerKeeper, + app.GetSubspace(slashingtypes.ModuleName), + ) + app.DistrKeeper = ccvdistrkeeper.NewKeeper( + appCodec, + keys[ccvdistrtypes.StoreKey], + app.GetSubspace(ccvdistrtypes.ModuleName), + app.AccountKeeper, + app.BankKeeper, + &ccvstakingKeeper, + ibcconsumertypes.ConsumerRedistributeName, + app.ModuleAccountAddrs(), + ) + app.CrisisKeeper = crisiskeeper.NewKeeper( + app.GetSubspace(crisistypes.ModuleName), + invCheckPeriod, + app.BankKeeper, + authtypes.FeeCollectorName, + ) + app.UpgradeKeeper = upgradekeeper.NewKeeper( + skipUpgradeHeights, + keys[upgradetypes.StoreKey], + appCodec, + homePath, + app.BaseApp, + ) + + // register the staking hooks + // NOTE: stakingKeeper above is passed by reference, so that it will contain these hooks + // NOTE: slashing hook was removed since it's only relevant for consumerKeeper + app.StakingKeeper = *ccvstakingKeeper.SetHooks( + ccvstakingtypes.NewMultiStakingHooks(app.DistrKeeper.Hooks()), + ) + + // register the proposal types + ccvgovRouter := ccvgovtypes.NewRouter() + ccvgovRouter.AddRoute(ccvgovtypes.RouterKey, ccvgovtypes.ProposalHandler). + AddRoute(paramproposal.RouterKey, params.NewParamChangeProposalHandler(app.ParamsKeeper)). + AddRoute(ccvdistrtypes.RouterKey, distr.NewCommunityPoolSpendProposalHandler(app.DistrKeeper)). + // TODO: remove upgrade handler from gov once admin module or decision for only signaling proposal is made. + AddRoute(upgradetypes.RouterKey, upgrade.NewSoftwareUpgradeProposalHandler(app.UpgradeKeeper)) + govKeeper := ccvgovkeeper.NewKeeper( + appCodec, keys[ccvgovtypes.StoreKey], app.GetSubspace(ccvgovtypes.ModuleName), app.AccountKeeper, app.BankKeeper, + &ccvstakingKeeper, ccvgovRouter, + ) + + app.GovKeeper = *govKeeper.SetHooks( + ccvgovtypes.NewMultiGovHooks( + // register the governance hooks + ), + ) + + app.IBCKeeper = ibckeeper.NewKeeper( + appCodec, + keys[ibchost.StoreKey], + app.GetSubspace(ibchost.ModuleName), + &app.ConsumerKeeper, + app.UpgradeKeeper, + scopedIBCKeeper, + ) + + // Create CCV consumer and modules + app.ConsumerKeeper = ibcconsumerkeeper.NewKeeper( + appCodec, + keys[ibcconsumertypes.StoreKey], + app.GetSubspace(ibcconsumertypes.ModuleName), + scopedIBCConsumerKeeper, + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.IBCKeeper.ConnectionKeeper, + app.IBCKeeper.ClientKeeper, + app.SlashingKeeper, + app.BankKeeper, + app.AccountKeeper, + &app.TransferKeeper, + app.IBCKeeper, + authtypes.FeeCollectorName, + ) + + // consumer keeper satisfies the staking keeper interface + // of the slashing module + app.SlashingKeeper = slashingkeeper.NewKeeper( + appCodec, + keys[slashingtypes.StoreKey], + &app.ConsumerKeeper, + app.GetSubspace(slashingtypes.ModuleName), + ) + + // register slashing module StakingHooks to the consumer keeper + app.ConsumerKeeper = *app.ConsumerKeeper.SetHooks(app.SlashingKeeper.Hooks()) + consumerModule := ibcconsumer.NewAppModule(app.ConsumerKeeper) + + app.TransferKeeper = ibctransferkeeper.NewKeeper( + appCodec, + keys[ibctransfertypes.StoreKey], + app.GetSubspace(ibctransfertypes.ModuleName), + app.IBCKeeper.ChannelKeeper, + app.IBCKeeper.ChannelKeeper, + &app.IBCKeeper.PortKeeper, + app.AccountKeeper, + app.BankKeeper, + scopedTransferKeeper, + ) + transferModule := transfer.NewAppModule(app.TransferKeeper) + ibcmodule := transfer.NewIBCModule(app.TransferKeeper) + + // create static IBC router, add transfer route, then set and seal it + ibcRouter := porttypes.NewRouter() + ibcRouter.AddRoute(ibctransfertypes.ModuleName, ibcmodule) + ibcRouter.AddRoute(ibcconsumertypes.ModuleName, consumerModule) + app.IBCKeeper.SetRouter(ibcRouter) + + // create evidence keeper with router + evidenceKeeper := evidencekeeper.NewKeeper( + appCodec, + keys[evidencetypes.StoreKey], + &app.ConsumerKeeper, + app.SlashingKeeper, + ) + + app.EvidenceKeeper = *evidenceKeeper + + skipGenesisInvariants := cast.ToBool(appOpts.Get(crisis.FlagSkipGenesisInvariants)) + + // NOTE: Any module instantiated in the module manager that is later modified + // must be passed by reference here. + app.MM = module.NewManager( + auth.NewAppModule(appCodec, app.AccountKeeper, nil), + vesting.NewAppModule(app.AccountKeeper, app.BankKeeper), + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), + capability.NewAppModule(appCodec, *app.CapabilityKeeper), + crisis.NewAppModule(&app.CrisisKeeper, skipGenesisInvariants), + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + ccvgov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), + ccvmint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.ConsumerKeeper), + ccvdistr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, authtypes.FeeCollectorName), + ccvstaking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), + upgrade.NewAppModule(app.UpgradeKeeper), + evidence.NewAppModule(app.EvidenceKeeper), + params.NewAppModule(app.ParamsKeeper), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + ibc.NewAppModule(app.IBCKeeper), + transferModule, + consumerModule, + ) + + // During begin block slashing happens after distr.BeginBlocker so that + // there is nothing left over in the validator fee pool, so as to keep the + // CanWithdrawInvariant invariant. + // NOTE: staking module is required if HistoricalEntries param > 0 + // NOTE: capability module's beginblocker must come before any modules using capabilities (e.g. IBC) + app.MM.SetOrderBeginBlockers( + // upgrades should be run first + upgradetypes.ModuleName, + capabilitytypes.ModuleName, + ccvminttypes.ModuleName, + ccvdistrtypes.ModuleName, + slashingtypes.ModuleName, + evidencetypes.ModuleName, + ccvstakingtypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + ccvgovtypes.ModuleName, + crisistypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + paramstypes.ModuleName, + vestingtypes.ModuleName, + ibctransfertypes.ModuleName, + ibchost.ModuleName, + ibcconsumertypes.ModuleName, + ) + app.MM.SetOrderEndBlockers( + crisistypes.ModuleName, + ccvgovtypes.ModuleName, + ccvstakingtypes.ModuleName, + capabilitytypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + ccvdistrtypes.ModuleName, + slashingtypes.ModuleName, + ccvminttypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + ibctransfertypes.ModuleName, + ibchost.ModuleName, + ibcconsumertypes.ModuleName, + ) + + // NOTE: The genutils module must occur after staking so that pools are + // properly initialized with tokens from genesis accounts. + // NOTE: The genutils module must also occur after auth so that it can access the params from auth. + // NOTE: Capability module must occur first so that it can initialize any capabilities + // so that other modules that want to create or claim capabilities afterwards in InitChain + // can do so safely. + app.MM.SetOrderInitGenesis( + capabilitytypes.ModuleName, + authtypes.ModuleName, + banktypes.ModuleName, + ccvdistrtypes.ModuleName, + ccvstakingtypes.ModuleName, + slashingtypes.ModuleName, + ccvgovtypes.ModuleName, + ccvminttypes.ModuleName, + crisistypes.ModuleName, + evidencetypes.ModuleName, + authz.ModuleName, + feegrant.ModuleName, + paramstypes.ModuleName, + upgradetypes.ModuleName, + vestingtypes.ModuleName, + ibchost.ModuleName, + ibctransfertypes.ModuleName, + ibcconsumertypes.ModuleName, + ) + + app.MM.RegisterInvariants(&app.CrisisKeeper) + app.MM.RegisterRoutes(app.Router(), app.QueryRouter(), encodingConfig.Amino) + + app.configurator = module.NewConfigurator(app.appCodec, app.MsgServiceRouter(), app.GRPCQueryRouter()) + app.MM.RegisterServices(app.configurator) + + // create the simulation manager and define the order of the modules for deterministic simulations + // + // NOTE: this is not required apps that don't use the simulator for fuzz testing + // transactions + app.sm = module.NewSimulationManager( + auth.NewAppModule(appCodec, app.AccountKeeper, authsims.RandomGenesisAccounts), + bank.NewAppModule(appCodec, app.BankKeeper, app.AccountKeeper), + capability.NewAppModule(appCodec, *app.CapabilityKeeper), + feegrantmodule.NewAppModule(appCodec, app.AccountKeeper, app.BankKeeper, app.FeeGrantKeeper, app.interfaceRegistry), + ccvgov.NewAppModule(appCodec, app.GovKeeper, app.AccountKeeper, app.BankKeeper), + ccvmint.NewAppModule(appCodec, app.MintKeeper, app.AccountKeeper), + ccvstaking.NewAppModule(appCodec, app.StakingKeeper, app.AccountKeeper, app.BankKeeper), + ccvdistr.NewAppModule(appCodec, app.DistrKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper, authtypes.FeeCollectorName), + slashing.NewAppModule(appCodec, app.SlashingKeeper, app.AccountKeeper, app.BankKeeper, app.StakingKeeper), + params.NewAppModule(app.ParamsKeeper), + evidence.NewAppModule(app.EvidenceKeeper), ibc.NewAppModule(app.IBCKeeper), + authzmodule.NewAppModule(appCodec, app.AuthzKeeper, app.AccountKeeper, app.BankKeeper, app.interfaceRegistry), + transferModule, + ) + + app.sm.RegisterStoreDecoders() + + // initialize stores + app.MountKVStores(keys) + app.MountTransientStores(tkeys) + app.MountMemoryStores(memKeys) + + anteHandler, err := NewAnteHandler( + HandlerOptions{ + HandlerOptions: ante.HandlerOptions{ + AccountKeeper: app.AccountKeeper, + BankKeeper: app.BankKeeper, + FeegrantKeeper: app.FeeGrantKeeper, + SignModeHandler: encodingConfig.TxConfig.SignModeHandler(), + SigGasConsumer: ante.DefaultSigVerificationGasConsumer, + }, + IBCKeeper: app.IBCKeeper, + ConsumerKeeper: app.ConsumerKeeper, + }, + ) + if err != nil { + panic(fmt.Errorf("failed to create AnteHandler: %s", err)) + } + app.SetAnteHandler(anteHandler) + + app.SetInitChainer(app.InitChainer) + app.SetBeginBlocker(app.BeginBlocker) + app.SetEndBlocker(app.EndBlocker) + + app.UpgradeKeeper.SetUpgradeHandler( + upgradeName, + func(ctx sdk.Context, _ upgradetypes.Plan, _ module.VersionMap) (module.VersionMap, error) { + app.IBCKeeper.ConnectionKeeper.SetParams(ctx, ibcconnectiontypes.DefaultParams()) + + fromVM := make(map[string]uint64) + + for moduleName, eachModule := range app.MM.Modules { + fromVM[moduleName] = eachModule.ConsensusVersion() + } + + ctx.Logger().Info("start to run module migrations...") + + return app.MM.RunMigrations(ctx, app.configurator, fromVM) + }, + ) + + upgradeInfo, err := app.UpgradeKeeper.ReadUpgradeInfoFromDisk() + if err != nil { + panic(fmt.Sprintf("failed to read upgrade info from disk %s", err)) + } + + if upgradeInfo.Name == upgradeName && !app.UpgradeKeeper.IsSkipHeight(upgradeInfo.Height) { + storeUpgrades := store.StoreUpgrades{} + + // configure store loader that checks if version == upgradeHeight and applies store upgrades + app.SetStoreLoader(upgradetypes.UpgradeStoreLoader(upgradeInfo.Height, &storeUpgrades)) + } + + if loadLatest { + if err := app.LoadLatestVersion(); err != nil { + tmos.Exit(fmt.Sprintf("failed to load latest version: %s", err)) + } + } + + app.ScopedIBCKeeper = scopedIBCKeeper + app.ScopedTransferKeeper = scopedTransferKeeper + app.ScopedIBCConsumerKeeper = scopedIBCConsumerKeeper + + return app +} + +// Name returns the name of the App +func (app *App) Name() string { return app.BaseApp.Name() } + +// BeginBlocker application updates every begin block +func (app *App) BeginBlocker(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { + return app.MM.BeginBlock(ctx, req) +} + +// EndBlocker application updates every end block +func (app *App) EndBlocker(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { + return app.MM.EndBlock(ctx, req) +} + +// InitChainer application update at chain initialization +func (app *App) InitChainer(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { + var genesisState GenesisState + if err := tmjson.Unmarshal(req.AppStateBytes, &genesisState); err != nil { + panic(err) + } + + app.UpgradeKeeper.SetModuleVersionMap(ctx, app.MM.GetVersionMap()) + return app.MM.InitGenesis(ctx, app.appCodec, genesisState) +} + +// LoadHeight loads a particular height +func (app *App) LoadHeight(height int64) error { + return app.LoadVersion(height) +} + +// ModuleAccountAddrs returns all the app's module account addresses. +func (app *App) ModuleAccountAddrs() map[string]bool { + modAccAddrs := make(map[string]bool) + for acc := range maccPerms { + modAccAddrs[authtypes.NewModuleAddress(acc).String()] = true + } + + return modAccAddrs +} + +// LegacyAmino returns App's amino codec. +// +// NOTE: This is solely to be used for testing purposes as it may be desirable +// for modules to register their own custom testing types. +func (app *App) LegacyAmino() *codec.LegacyAmino { + return app.legacyAmino +} + +// AppCodec returns the app codec. +// +// NOTE: This is solely to be used for testing purposes as it may be desirable +// for modules to register their own custom testing types. +func (app *App) AppCodec() codec.Codec { + return app.appCodec +} + +// InterfaceRegistry returns the InterfaceRegistry +func (app *App) InterfaceRegistry() types.InterfaceRegistry { + return app.interfaceRegistry +} + +// GetKey returns the KVStoreKey for the provided store key. +// +// NOTE: This is solely to be used for testing purposes. +func (app *App) GetKey(storeKey string) *sdk.KVStoreKey { + return app.keys[storeKey] +} + +// GetTKey returns the TransientStoreKey for the provided store key. +// +// NOTE: This is solely to be used for testing purposes. +func (app *App) GetTKey(storeKey string) *sdk.TransientStoreKey { + return app.tkeys[storeKey] +} + +// GetMemKey returns the MemStoreKey for the provided mem key. +// +// NOTE: This is solely used for testing purposes. +func (app *App) GetMemKey(storeKey string) *sdk.MemoryStoreKey { + return app.memKeys[storeKey] +} + +// GetSubspace returns a param subspace for a given module name. +// +// NOTE: This is solely to be used for testing purposes. +func (app *App) GetSubspace(moduleName string) paramstypes.Subspace { + subspace, _ := app.ParamsKeeper.GetSubspace(moduleName) + return subspace +} + +// SimulationManager implements the SimulationApp interface +func (app *App) SimulationManager() *module.SimulationManager { + return app.sm +} + +// TestingApp functions + +// GetBaseApp implements the TestingApp interface. +func (app *App) GetBaseApp() *baseapp.BaseApp { + return app.BaseApp +} + +// GetStakingKeeper implements the TestingApp interface. +func (app *App) GetStakingKeeper() ibcclienttypes.StakingKeeper { + return app.ConsumerKeeper +} + +// GetIBCKeeper implements the TestingApp interface. +func (app *App) GetIBCKeeper() *ibckeeper.Keeper { + return app.IBCKeeper +} + +// GetScopedIBCKeeper implements the TestingApp interface. +func (app *App) GetScopedIBCKeeper() capabilitykeeper.ScopedKeeper { + return app.ScopedIBCKeeper +} + +// GetTxConfig implements the TestingApp interface. +func (app *App) GetTxConfig() client.TxConfig { + return cosmoscmd.MakeEncodingConfig(ModuleBasics).TxConfig +} + +// RegisterAPIRoutes registers all application module routes with the provided +// API server. +func (app *App) RegisterAPIRoutes(apiSvr *api.Server, apiConfig config.APIConfig) { + clientCtx := apiSvr.ClientCtx + rpc.RegisterRoutes(clientCtx, apiSvr.Router) + // Register legacy tx routes. + authrest.RegisterTxRoutes(clientCtx, apiSvr.Router) + // Register new tx routes from grpc-gateway. + authtx.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) + // Register new tendermint queries routes from grpc-gateway. + tmservice.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) + + // Register legacy and grpc-gateway routes for all modules. + ModuleBasics.RegisterRESTRoutes(clientCtx, apiSvr.Router) + ModuleBasics.RegisterGRPCGatewayRoutes(clientCtx, apiSvr.GRPCGatewayRouter) + + // register swagger API from root so that other applications can override easily + if apiConfig.Swagger { + RegisterSwaggerAPI(apiSvr.Router) + } +} + +// RegisterTxService implements the Application.RegisterTxService method. +func (app *App) RegisterTxService(clientCtx client.Context) { + authtx.RegisterTxService(app.BaseApp.GRPCQueryRouter(), clientCtx, app.BaseApp.Simulate, app.interfaceRegistry) +} + +// RegisterTendermintService implements the Application.RegisterTendermintService method. +func (app *App) RegisterTendermintService(clientCtx client.Context) { + tmservice.RegisterTendermintService(app.BaseApp.GRPCQueryRouter(), clientCtx, app.interfaceRegistry) +} + +// RegisterSwaggerAPI registers swagger route with API Server +func RegisterSwaggerAPI(rtr *mux.Router) { + statikFS, err := fs.New() + if err != nil { + panic(err) + } + + staticServer := http.FileServer(statikFS) + rtr.PathPrefix("/swagger/").Handler(http.StripPrefix("/swagger/", staticServer)) +} + +// GetMaccPerms returns a copy of the module account permissions +func GetMaccPerms() map[string][]string { + dupMaccPerms := make(map[string][]string) + for k, v := range maccPerms { + dupMaccPerms[k] = v + } + return dupMaccPerms +} + +// initParamsKeeper init params keeper and its subspaces +func initParamsKeeper(appCodec codec.BinaryCodec, legacyAmino *codec.LegacyAmino, key, tkey sdk.StoreKey) paramskeeper.Keeper { + paramsKeeper := paramskeeper.NewKeeper(appCodec, legacyAmino, key, tkey) + + paramsKeeper.Subspace(authtypes.ModuleName) + paramsKeeper.Subspace(banktypes.ModuleName) + paramsKeeper.Subspace(ccvstakingtypes.ModuleName) + paramsKeeper.Subspace(ccvminttypes.ModuleName) + paramsKeeper.Subspace(ccvdistrtypes.ModuleName) + paramsKeeper.Subspace(slashingtypes.ModuleName) + paramsKeeper.Subspace(ccvgovtypes.ModuleName).WithKeyTable(ccvgovtypes.ParamKeyTable()) + paramsKeeper.Subspace(crisistypes.ModuleName) + paramsKeeper.Subspace(ibctransfertypes.ModuleName) + paramsKeeper.Subspace(ibchost.ModuleName) + paramsKeeper.Subspace(ibcconsumertypes.ModuleName) + + return paramsKeeper +} diff --git a/app/consumer-democracy/export.go b/app/consumer-democracy/export.go new file mode 100644 index 0000000000..c146ab7ad2 --- /dev/null +++ b/app/consumer-democracy/export.go @@ -0,0 +1,198 @@ +package app + +import ( + "encoding/json" + "fmt" + + tmproto "github.com/tendermint/tendermint/proto/tendermint/types" + + servertypes "github.com/cosmos/cosmos-sdk/server/types" + sdk "github.com/cosmos/cosmos-sdk/types" + slashingtypes "github.com/cosmos/cosmos-sdk/x/slashing/types" + tmtypes "github.com/tendermint/tendermint/types" +) + +// ExportAppStateAndValidators exports the state of the application for a genesis +// file. +func (app *App) ExportAppStateAndValidators( + forZeroHeight bool, jailAllowedAddrs []string, +) (servertypes.ExportedApp, error) { + + // as if they could withdraw from the start of the next block + ctx := app.NewContext(true, tmproto.Header{Height: app.LastBlockHeight()}) + + // We export at last height + 1, because that's the height at which + // Tendermint will start InitChain. + height := app.LastBlockHeight() + 1 + if forZeroHeight { + height = 0 + app.prepForZeroHeightGenesis(ctx, jailAllowedAddrs) + } + + genState := app.MM.ExportGenesis(ctx, app.appCodec) + appState, err := json.MarshalIndent(genState, "", " ") + if err != nil { + return servertypes.ExportedApp{}, err + } + + validators, err := app.GetValidatorSet(ctx) + if err != nil { + return servertypes.ExportedApp{}, err + } + return servertypes.ExportedApp{ + AppState: appState, + Validators: validators, + Height: height, + ConsensusParams: app.BaseApp.GetConsensusParams(ctx), + }, nil +} + +// prepare for fresh start at zero height +// NOTE zero height genesis is a temporary feature which will be deprecated +// in favour of export at a block height +func (app *App) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs []string) { + // applyAllowedAddrs := false + + // check if there is a allowed address list + // if len(jailAllowedAddrs) > 0 { + // applyAllowedAddrs = true + // } + + // allowedAddrsMap := make(map[string]bool) + + // for _, addr := range jailAllowedAddrs { + // _, err := sdk.ValAddressFromBech32(addr) + // if err != nil { + // log.Fatal(err) + // } + // allowedAddrsMap[addr] = true + // } + + /* Just to be safe, assert the invariants on current state. */ + app.CrisisKeeper.AssertInvariants(ctx) + + /* Handle fee distribution state. */ + + // withdraw all validator commission + // app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { + // _, err := app.DistrKeeper.WithdrawValidatorCommission(ctx, val.GetOperator()) + // if err != nil { + // panic(err) + // } + // return false + // }) + + // withdraw all delegator rewards + // dels := app.StakingKeeper.GetAllDelegations(ctx) + // for _, delegation := range dels { + // _, err := app.DistrKeeper.WithdrawDelegationRewards(ctx, delegation.GetDelegatorAddr(), delegation.GetValidatorAddr()) + // if err != nil { + // panic(err) + // } + // } + + // clear validator slash events + // app.DistrKeeper.DeleteAllValidatorSlashEvents(ctx) + + // clear validator historical rewards + // app.DistrKeeper.DeleteAllValidatorHistoricalRewards(ctx) + + // set context height to zero + height := ctx.BlockHeight() + ctx = ctx.WithBlockHeight(0) + + // reinitialize all validators + // app.StakingKeeper.IterateValidators(ctx, func(_ int64, val stakingtypes.ValidatorI) (stop bool) { + // // donate any unwithdrawn outstanding reward fraction tokens to the community pool + // scraps := app.DistrKeeper.GetValidatorOutstandingRewardsCoins(ctx, val.GetOperator()) + // feePool := app.DistrKeeper.GetFeePool(ctx) + // feePool.CommunityPool = feePool.CommunityPool.Add(scraps...) + // app.DistrKeeper.SetFeePool(ctx, feePool) + + // app.DistrKeeper.Hooks().AfterValidatorCreated(ctx, val.GetOperator()) + // return false + // }) + + // reinitialize all delegations + // for _, del := range dels { + // app.DistrKeeper.Hooks().BeforeDelegationCreated(ctx, del.GetDelegatorAddr(), del.GetValidatorAddr()) + // app.DistrKeeper.Hooks().AfterDelegationModified(ctx, del.GetDelegatorAddr(), del.GetValidatorAddr()) + // } + + // reset context height + ctx = ctx.WithBlockHeight(height) + + /* Handle staking state. */ + + // iterate through redelegations, reset creation height + // app.StakingKeeper.IterateRedelegations(ctx, func(_ int64, red stakingtypes.Redelegation) (stop bool) { + // for i := range red.Entries { + // red.Entries[i].CreationHeight = 0 + // } + // app.StakingKeeper.SetRedelegation(ctx, red) + // return false + // }) + + // iterate through unbonding delegations, reset creation height + // app.StakingKeeper.IterateUnbondingDelegations(ctx, func(_ int64, ubd stakingtypes.UnbondingDelegation) (stop bool) { + // for i := range ubd.Entries { + // ubd.Entries[i].CreationHeight = 0 + // } + // app.StakingKeeper.SetUnbondingDelegation(ctx, ubd) + // return false + // }) + + // Iterate through validators by power descending, reset bond heights, and + // update bond intra-tx counters. + // store := ctx.KVStore(app.keys[stakingtypes.StoreKey]) + // iter := sdk.KVStoreReversePrefixIterator(store, stakingtypes.ValidatorsKey) + // counter := int16(0) + + // for ; iter.Valid(); iter.Next() { + // addr := sdk.ValAddress(iter.Key()[1:]) + // validator, found := app.StakingKeeper.GetValidator(ctx, addr) + // if !found { + // panic("expected validator, not found") + // } + + // validator.UnbondingHeight = 0 + // if applyAllowedAddrs && !allowedAddrsMap[addr.String()] { + // validator.Jailed = true + // } + + // app.StakingKeeper.SetValidator(ctx, validator) + // counter++ + // } + + // iter.Close() + + // if _, err := app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx); err != nil { + // panic(err) + // } + + /* Handle slashing state. */ + + // reset start height on signing infos + app.SlashingKeeper.IterateValidatorSigningInfos( + ctx, + func(addr sdk.ConsAddress, info slashingtypes.ValidatorSigningInfo) (stop bool) { + info.StartHeight = 0 + app.SlashingKeeper.SetValidatorSigningInfo(ctx, addr, info) + return false + }, + ) +} + +// GetValidatorSet returns a slice of bonded validators. +func (app *App) GetValidatorSet(ctx sdk.Context) ([]tmtypes.GenesisValidator, error) { + cVals := app.ConsumerKeeper.GetAllCCValidator(ctx) + if len(cVals) == 0 { + return nil, fmt.Errorf("empty validator set") + } + + vals := []tmtypes.GenesisValidator{} + for _, v := range cVals { + vals = append(vals, tmtypes.GenesisValidator{Address: v.Address, Power: v.Power}) + } + return vals, nil +} diff --git a/app/consumer-democracy/genesis.go b/app/consumer-democracy/genesis.go new file mode 100644 index 0000000000..5bf0c1da80 --- /dev/null +++ b/app/consumer-democracy/genesis.go @@ -0,0 +1,21 @@ +package app + +import ( + "encoding/json" + + "github.com/cosmos/cosmos-sdk/codec" +) + +// The genesis state of the blockchain is represented here as a map of raw json +// messages key'd by a identifier string. +// The identifier is used to determine which module genesis information belongs +// to so it may be appropriately routed during init chain. +// Within this application default genesis information is retrieved from +// the ModuleBasicManager which populates json from each BasicModule +// object provided to it during init. +type GenesisState map[string]json.RawMessage + +// NewDefaultGenesisState generates the default state for the application. +func NewDefaultGenesisState(cdc codec.JSONCodec) GenesisState { + return ModuleBasics.DefaultGenesis(cdc) +} diff --git a/app/consumer/app.go b/app/consumer/app.go index 7f60c0dc47..7bf07e3851 100644 --- a/app/consumer/app.go +++ b/app/consumer/app.go @@ -347,7 +347,7 @@ func New( app.GetSubspace(slashingtypes.ModuleName), ) - // register slashing module StakingHooks to the consumer keeper + // register slashing module Slashing hooks to the consumer keeper app.ConsumerKeeper = *app.ConsumerKeeper.SetHooks(app.SlashingKeeper.Hooks()) consumerModule := ibcconsumer.NewAppModule(app.ConsumerKeeper) diff --git a/app/consumer/export.go b/app/consumer/export.go index 77ba45dad8..297072cf1c 100644 --- a/app/consumer/export.go +++ b/app/consumer/export.go @@ -12,8 +12,8 @@ import ( tmtypes "github.com/tendermint/tendermint/types" ) -// ExportAppStateAndValidators exports the state of the application for a genesis -// file. +// ExportAppStateAndValidators implements the simapp app interface +// by exporting the state of the application func (app *App) ExportAppStateAndValidators( forZeroHeight bool, jailAllowedAddrs []string, ) (servertypes.ExportedApp, error) { @@ -39,17 +39,19 @@ func (app *App) ExportAppStateAndValidators( if err != nil { return servertypes.ExportedApp{}, err } + return servertypes.ExportedApp{ AppState: appState, - Validators: validators, Height: height, + Validators: validators, ConsensusParams: app.BaseApp.GetConsensusParams(ctx), }, nil } // prepare for fresh start at zero height // NOTE zero height genesis is a temporary feature which will be deprecated -// in favour of export at a block height +// +// in favour of export at a block height func (app *App) prepForZeroHeightGenesis(ctx sdk.Context, jailAllowedAddrs []string) { /* Just to be safe, assert the invariants on current state. */ diff --git a/cmd/interchain-security-cdd/main.go b/cmd/interchain-security-cdd/main.go new file mode 100644 index 0000000000..6b9bbffe60 --- /dev/null +++ b/cmd/interchain-security-cdd/main.go @@ -0,0 +1,32 @@ +package main + +import ( + "os" + + "github.com/cosmos/cosmos-sdk/server" + svrcmd "github.com/cosmos/cosmos-sdk/server/cmd" + app "github.com/cosmos/interchain-security/app/consumer-democracy" + "github.com/tendermint/spm/cosmoscmd" +) + +func main() { + rootCmd, _ := cosmoscmd.NewRootCmd( + app.AppName, + app.AccountAddressPrefix, + app.DefaultNodeHome, + app.AppName, + app.ModuleBasics, + app.New, + // this line is used by starport scaffolding # root/arguments + ) + + if err := svrcmd.Execute(rootCmd, app.DefaultNodeHome); err != nil { + switch e := err.(type) { + case server.ErrorCode: + os.Exit(e.Code) + + default: + os.Exit(1) + } + } +} diff --git a/docs/interchain-security/proto-docs.md b/docs/interchain-security/proto-docs.md index 41782ce80b..1c974e7106 100644 --- a/docs/interchain-security/proto-docs.md +++ b/docs/interchain-security/proto-docs.md @@ -22,7 +22,7 @@ - [UnbondingSequence](#interchain_security.ccv.consumer.v1.UnbondingSequence) - [interchain_security/ccv/provider/v1/provider.proto](#interchain_security/ccv/provider/v1/provider.proto) - - [CreateConsumerChainProposal](#interchain_security.ccv.provider.v1.CreateConsumerChainProposal) + - [ConsumerAdditionProposal](#interchain_security.ccv.provider.v1.ConsumerAdditionProposal) - [HandshakeMetadata](#interchain_security.ccv.provider.v1.HandshakeMetadata) - [Params](#interchain_security.ccv.provider.v1.Params) @@ -278,10 +278,10 @@ UnbondingSequence defines the genesis information for each unbonding packet sequ - + -### CreateConsumerChainProposal -CreateConsumerChainProposal is a governance proposal on the provider chain to spawn a new consumer chain. +### ConsumerAdditionProposal +ConsumerAdditionProposal is a governance proposal on the provider chain to spawn a new consumer chain. If it passes, then all validators on the provider chain are expected to validate the consumer chain at spawn time or get slashed. It is recommended that spawn time occurs after the proposal end time. diff --git a/docs/quality_assurance.md b/docs/quality_assurance.md index 0a95c47aad..aeef7c1af9 100644 --- a/docs/quality_assurance.md +++ b/docs/quality_assurance.md @@ -27,9 +27,9 @@ For an overview of the Interchain Security workflow, have a look at [the diagram | ID | Concern | Code Review | Automatic Tools | Unit Testing | | -- | ------- | ----------- | --------------- | ------------ | | 1.01 | Unexpected panics | `Scheduled` | `??` | `??` | -| 1.02 | Handling errors | `Scheduled` | `gosec` | `??` | +| 1.02 | Handling errors | `Scheduled` | `gosec` | `Partial coverage` | | 1.03 | Accessing store (setters, getters, iterators) | `Scheduled` | `??` | `Partial coverage` | -| 1.04 | Serialization / deserialization | `Scheduled` | `??` | `??` | +| 1.04 | Serialization / deserialization | `Scheduled` | `??` | `Partial coverage` | | 1.05 | Storage leaks | `Scheduled` | `NA` | `??` | ### Integration with IBC @@ -41,28 +41,28 @@ IBC packets: - MaturedVSCPacket - SlashPacketData -| ID | Concern | Code Review | Unit Testing | Diff. testing | Testnet | -| -- | ------- | ----------- | ------------ | ------------- | ------- | -| 2.01 | Create IBC clients | `Scheduled` (ibc-go team) | `Done` | `Future work` | `Scheduled` | -| 2.02 | Getting consumer `UnbondingPeriod` from IBC client | `Scheduled` (ibc-go team) | `??` | `NA` | `NA` | -| 2.03 | Create CCV channel (handshake) | `Scheduled` (ibc-go team) | `Done` | `Future work` | `Scheduled` | -| 2.04 | Sending IBC packets
- see `x/ccv/utils/utils.go:SendIBCPacket()` | `Scheduled` (ibc-go team) | `Done` | `Done` | `Scheduled` | -| 2.05 | Handling acknowledgments | `Scheduled` (ibc-go team) | `Partial coverage` | `Scheduled` | `Scheduled` | -| 2.06 | Handling timeouts | `Scheduled` (ibc-go team) | `Partial coverage` | `Future work` | `Scheduled` | -| 2.07 | **Handling IBC client expiration** | `Scheduled` (ibc-go team)
high priority | `??` | `Future work` | `Scheduled` | -| 2.08 | ICS-20 channel creation | `Scheduled` (ibc-go team) | `??` | `Future work` | `Scheduled` | -| 2.09 | ICS-20 transfer | `Scheduled` (ibc-go team) | `??` | `NA` | `Scheduled` | -| 2.10 | Changes in IBC-GO testing suite | `Scheduled` (ibc-go team) | `NA` | `Partial coverage` | `NA` | +| ID | Concern | Code Review | Unit Testing | E2E Testing | Diff. Testing | Testnet | +| -- | ------- | ----------- | ------------ | ----------- | ------------- | ------- | +| 2.01 | Create IBC clients | `Scheduled` (ibc-go team) | `Done` | `??` | `Future work` | `Scheduled` | +| 2.02 | Getting consumer `UnbondingPeriod` from IBC client | `Scheduled` (ibc-go team) | `Done`, see TestUnbondingTime` | `??` | `NA` | `NA` | +| 2.03 | Create CCV channel (handshake) | `Scheduled` (ibc-go team) | `Done` | `NA` | `Future work` | `Scheduled` | +| 2.04 | Sending IBC packets
- see `x/ccv/utils/utils.go:SendIBCPacket()` | `Scheduled` (ibc-go team) | `??` | `Done` | `Done` | `Scheduled` | +| 2.05 | Handling acknowledgments | `Scheduled` (ibc-go team) | `Partial Coverage` | `Partial coverage` | `Scheduled` | `Scheduled` | +| 2.06 | Handling timeouts | `Scheduled` (ibc-go team) | `??` |`??` | `Future work` | `Scheduled` | +| 2.07 | **Handling IBC client expiration** | `Scheduled` (ibc-go team)
high priority | `??` | `??` | `Future work` | `Scheduled` | +| 2.08 | ICS-20 channel creation | `Scheduled` (ibc-go team) | `??` | `??` |`Future work` | `Scheduled` | +| 2.09 | ICS-20 transfer | `Scheduled` (ibc-go team) | `??` | `??` | `NA` | `Scheduled` | +| 2.10 | Changes in IBC-GO testing suite | `Scheduled` (ibc-go team) | `NA` | `??` | `Partial coverage` | `NA` | ### Integration with Cosmos SDK A prerequisite of the code review is to open a PR with all the [SDK changes](https://github.com/cosmos/cosmos-sdk/tree/interchain-security-rebase) needed by Interchain Security. -| ID | Concern | Code Review | Unit Testing | Diff. testing | Testnet | -| -- | ------- | ----------- | ------------ | ------------- | ------- | -| 3.01 | Changes to staking module | `Scheduled` (sdk team) | `Partial coverage`
see [unbonding_test.go](../x/ccv/provider/unbonding_test.go)
redelegation and validator unbonding missing | `Partial coverage` | `Scheduled` | -| 3.02 | Changes to slashing module | `Scheduled` (sdk team) | `Done`
see [TestValidatorDowntime](../x/ccv/consumer/keeper/keeper_test.go#L345)
| `NA` | `Scheduled` | -| 3.03 | Changes to evidence module | `Scheduled` (sdk team) | `Done`
see [TestValidatorDoubleSigning](../x/ccv/consumer/keeper/keeper_test.go#L427)
| `NA` | `Scheduled` | +| ID | Concern | Code Review | Unit Testing | E2E Testing | Diff. Testing | Testnet | +| -- | ------- | ----------- | ------------ | ----------- | ------------- | ------- | +| 3.01 | Changes to staking module | `Scheduled` (sdk team) | `??` | `Partial coverage`
see [unbonding_test.go](../tests/e2e/unbonding_test.go)
redelegation could be expanded, validator unbonding missing | `Partial coverage` | `Scheduled` | +| 3.02 | Changes to slashing module | `Scheduled` (sdk team) | `??` | `Done`
see [TestValidatorDowntime](../tests/e2e/slashing_test.go#L502)
| `NA` | `Scheduled` | +| 3.03 | Changes to evidence module | `Scheduled` (sdk team) | `??` | `Done`
see [TestValidatorDoubleSigning](../tests/e2e/slashing_test.go#L584)
| `NA` | `Scheduled` | ### Provider Chain Correctness @@ -71,21 +71,21 @@ The main concern addressed in this section is the correctness of the provider ch - one single consumer chain; - multiple consumer chains. -| ID | Concern | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 4.01 | Liveness of undelegations
- unbonding delegation entries are eventually removed from `UnbondingDelegation` | `Scheduled` | `Done`
see [unbonding_test.go](../x/ccv/provider/unbonding_test.go) | `Done` | `Scheduled` | `Scheduled` | -| 4.02 | Liveness of redelegations
- redelegations entries are eventually removed from `Redelegations` | `Scheduled` | `Scheduled` | `Scheduled` | `Scheduled` | `Scheduled` | -| 4.03 | Liveness of validator unbondings
- unbonding validators with no delegations are eventually removed from `Validators` | `Scheduled` | `Scheduled` | `Done` | `Scheduled` | `Scheduled` | -| 4.04 | Unbonding operations (undelegations, redelegations, validator unbondings) should eventually complete even if the CCV channel is never established (due to error)
- expected outcome: the pending VSC packets eventually timeout, which leads to the consumer chain removal | `Scheduled` | `??` | `Future work` | `Scheduled` | `Scheduled`
high priority | -| 4.05 | Unbonding operations (undelegations, redelegations, validator unbondings) should eventually complete even if one of the clients expire
- expected outcome: the pending VSC packets eventually timeout, which leads to the consumer chain removal | `Scheduled` | `??` | `Future work` | `Scheduled` | `Scheduled`
high priority | -| 4.06 | A validator cannot get slashed more than once for double signing, regardless of how many times it double signs on different chains (consumers or provider) | `Scheduled` | `Done`
see [TestHandleSlashPacketErrors](../x/provider/keeper_test.go#L329) | `Done` | `Scheduled` | `NA` | -| 4.07 | A validator cannot get slashed multiple times for downtime on the same consumer chain without requesting to `Unjail` itself on the provider chain in between | `Scheduled` | `Done`
see [TestSendSlashPacket](../x/consumer/keeper_test.go#489)| `Partial coverage` | `Scheduled` | `NA` | -| 4.08 | A validator can be slashed multiple times for downtime on different chains | `Scheduled` | `Future work` | `NA` | `Scheduled` | `NA` | -| 4.09 | The provider chain can easily be restarted with IS enabled
- `ExportGenesis` & `InitGenesis` | `Scheduled` | `Future work` | `Future work` | `Scheduled` | `NA` | -| 4.10 | The provider chain's correctness is not affected by a consumer chain shutting down | `Scheduled` | `Future work` | `Future work` | `Scheduled` | `NA` | -| 4.11 | The provider chain can graciously handle a CCV packet timing out (without shuting down)
- expected outcome: consumer chain shuts down and its state in provider CCV module is removed | `Scheduled` | `Future work` | `Future work` | `Scheduled` | `NA` | -| 4.12 | The provider chain can graciously handle a `StopConsumerChainProposal`
- expected outcome: consumer chain shuts down and its state in provider CCV module is removed | `Scheduled` | `Done`
see [stop_consumer_test.go](../x/ccv/provider/stop_consumer_test.go) | `Future work` | `Scheduled` | `NA` | -| 4.13 | The provider chain can graciously handle a `SpawnConsumerChainProposal`
- expected outcome: a consumer chain is registered and a client is created | `Scheduled` |`Done`
see [TestCreateConsumerChainProposal](../x/ccv/provider/keeper/proposal_test.go#L44) | `Future work` | `Scheduled` | `NA` | +| ID | Concern | Code Review | Unit | E2e | Diff. Testing | Testnet | Protocol audit | +| -- | ------- | ----------- | ---- | --- | ------------- | ------- | -------------- | +| 4.01 | Liveness of undelegations
- unbonding delegation entries are eventually removed from `UnbondingDelegation` | `Scheduled` | `NA` | `Done`
see [here](../tests/e2e/unbonding_test.go) | `Done` | `Scheduled` | `Scheduled` | +| 4.02 | Liveness of redelegations
- redelegations entries are eventually removed from `Redelegations` | `NA` | `Scheduled` | `Scheduled` | `Scheduled` | `Scheduled` | `Scheduled` | +| 4.03 | Liveness of validator unbondings
- unbonding validators with no delegations are eventually removed from `Validators` | `NA` | `Scheduled` | `Scheduled` | `Done` | `Scheduled` | `Scheduled` | +| 4.04 | Unbonding operations (undelegations, redelegations, validator unbondings) should eventually complete even if the CCV channel is never established (due to error)
- expected outcome: the pending VSC packets eventually timeout, which leads to the consumer chain removal | `Scheduled` | `NA` | `??` | `Future work` | `Scheduled` | `Scheduled`
high priority | +| 4.05 | Unbonding operations (undelegations, redelegations, validator unbondings) should eventually complete even if one of the clients expire
- expected outcome: the pending VSC packets eventually timeout, which leads to the consumer chain removal | `Scheduled` | `??` | `??` | `Future work` | `Scheduled` | `Scheduled`
high priority | +| 4.06 | A validator cannot get slashed more than once for double signing, regardless of how many times it double signs on different chains (consumers or provider) | `Scheduled` | `NA` |`Done`
see [here](../tests/e2e/slashing_test.go#L317) | `Done` | `Scheduled` | `NA` | +| 4.07 | A validator cannot get slashed multiple times for downtime on the same consumer chain without requesting to `Unjail` itself on the provider chain in between | `Scheduled` | `NA` | `Done`
see [here](../tests/e2e/slashing_test.go#642)| `Partial coverage` | `Scheduled` | `NA` | +| 4.08 | A validator can be slashed multiple times for downtime on different chains | `Scheduled` | `NA` | `Future work` | `NA` | `Scheduled` | `NA` | +| 4.09 | The provider chain can easily be restarted with IS enabled
- `ExportGenesis` & `InitGenesis` | `Scheduled` | `??` | `Future work` | `Future work` | `Scheduled` | `NA` | +| 4.10 | The provider chain's correctness is not affected by a consumer chain shutting down | `Scheduled` | `NA` | `Future work` | `Future work` | `Scheduled` | `NA` | +| 4.11 | The provider chain can graciously handle a CCV packet timing out (without shuting down)
- expected outcome: consumer chain shuts down and its state in provider CCV module is removed | `Scheduled` | `??` | `Future work` | `Future work` | `Scheduled` | `NA` | +| 4.12 | The provider chain can graciously handle a `ConsumerRemovalProposal`
- expected outcome: consumer chain shuts down and its state in provider CCV module is removed | `Scheduled` | `Done`
see [here](../x/ccv/provider/keeper/proposal_test.go#L313) | `NA` | `Future work` | `Scheduled` | `NA` | +| 4.13 | The provider chain can graciously handle a `ConsumerAdditionProposal`
- expected outcome: a consumer chain is registered and a client is created | `Scheduled` |`Done`
see [here](../x/ccv/provider/keeper/proposal_test.go#L31) | `NA` | `Future work` | `Scheduled` | `NA` | ### Interchain Security Protocol Correctness @@ -101,41 +101,41 @@ In addition, the implementation MUST guarantee the following [system properties] --- -| ID | Concern re. *Channel Uniqueness* | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 5.01 | `SpawnConsumerChainProposal(chainId)` should fail if a consumer with `chainId` is already registered | `Scheduled` | `??` | `NA` | `Scheduled` | `Scheduled`
high priority | -| 5.02 | The channel handshake for a consumer with `chainId` should fail if there is already an established CCV channel for `chainId` | `Scheduled` | `??` | `NA` | `Scheduled` | `Scheduled`
high priority | -| 5.03 | *Channel Uniqueness* should hold even if a consumer chain restarts | `Scheduled` | `??` | `NA` | `Scheduled` | `NA` | -| 5.04 | *Channel Uniqueness* should hold even when a client expires | `Scheduled` | `??` | `NA` | `Scheduled` | `NA` | +| ID | Concern re. *Channel Uniqueness* | Code Review | Unit Testing | E2e Testing | Diff. Testing | Testnet | Protocol audit | +| -- | -------------------------------- | ----------- | ------------ | ----------- | ------------- | ------- | -------------- | +| 5.01 | `HandleConsumerAdditionProposal()` should fail if a consumer with `chainId` is already registered | `Scheduled` | `DONE` see [here](../x/ccv/provider/keeper/proposal_test.go#L138) | `??` | `NA` | `Scheduled` | `Scheduled`
high priority | +| 5.02 | The channel handshake for a consumer with `chainId` should fail if there is already an established CCV channel for `chainId` | `Scheduled` | `DONE` see [here](../x/ccv/provider/ibc_module_test.go#L103) and [here](../x/ccv/consumer/ibc_module_test.go#L59) | `??` | `NA` | `Scheduled` | `Scheduled`
high priority | +| 5.03 | *Channel Uniqueness* should hold even if a consumer chain restarts | `Scheduled` | `NA` | `??` | `NA` | `Scheduled` | `NA` | +| 5.04 | *Channel Uniqueness* should hold even when a client expires | `Scheduled` | `??` | `NA` | `NA` | `Scheduled` | `NA` | --- -| ID | Concern re. *Validator Set Replication* | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 6.01 | Every validator set on any consumer chain MUST either be or have been a validator set on the provider chain. | `Scheduled` | `NA` | `Done` | `Scheduled` | `Scheduled` | -| 6.02 | Any update in the power of a validator `val` on the provider, as a result of
- (increase) `Delegate()` / `Redelegate()` to `val`
- (increase) `val` joining the provider validator set
- (decrease) `Undelegate()` / `Redelegate()` from `val`
- (decrease) `Slash(val)`
- (decrease) `val` leaving the provider validator set
MUST be present in a `ValidatorSetChangePacket` that is sent to all registered consumer chains | `Scheduled` | `NA` | `Done` | `Scheduled` | `Scheduled` | -| 6.03 | Every consumer chain receives the same sequence of `ValidatorSetChangePacket`s in the same order. | `Scheduled` | `NA` | `NA` | `Scheduled` | `Scheduled`
high priority | +| ID | Concern re. *Validator Set Replication* | Code Review | Unit Testing | E2e Testing | Diff. testing | Testnet | Protocol audit | +| -- | --------------------------------------- | ----------- | ------------ | ----------- | ------------- | ------- | -------------- | +| 6.01 | Every validator set on any consumer chain MUST either be or have been a validator set on the provider chain. | `Scheduled` | `NA` | `NA` | `Done` | `Scheduled` | `Scheduled` | +| 6.02 | Any update in the power of a validator `val` on the provider, as a result of
- (increase) `Delegate()` / `Redelegate()` to `val`
- (increase) `val` joining the provider validator set
- (decrease) `Undelegate()` / `Redelegate()` from `val`
- (decrease) `Slash(val)`
- (decrease) `val` leaving the provider validator set
MUST be present in a `ValidatorSetChangePacket` that is sent to all registered consumer chains | `Scheduled` | `NA` | `NA` | `Done` | `Scheduled` | `Scheduled` | +| 6.03 | Every consumer chain receives the same sequence of `ValidatorSetChangePacket`s in the same order. | `Scheduled` | `NA` | `NA` | `NA` | `Scheduled` | `Scheduled`
high priority | --- -| ID | Concern re. *Bond-Based Consumer Voting Power* | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 7.01 | For every `ValidatorSetChangePacket` received by a consumer chain at time `t`, a `MaturedVSCPacket` is sent back to the provider in the first block with a timestamp `>= t + UnbondingPeriod` | `Scheduled` | `Scheduled` | `Done` | `Scheduled` | `Scheduled` | -| 7.02 | If an unbonding operation resulted in a `ValidatorSetChangePacket` sent to all registered consumer chains, then it cannot complete before receiving matching `MaturedVSCPacket`s from these consumer chains (unless some of these consumer chains are removed) | `Scheduled` | `Scheduled` | `Done` | `Scheduled` | `Scheduled` | +| ID | Concern re. *Bond-Based Consumer Voting Power* | Code Review | Unit Testing | E2e Testing | Diff. Testing | Testnet | Protocol audit | +| -- | ---------------------------------------------- | ----------- | ------------ | ----------- | ------------- | ------- | -------------- | +| 7.01 | For every `ValidatorSetChangePacket` received by a consumer chain at time `t`, a `MaturedVSCPacket` is sent back to the provider in the first block with a timestamp `>= t + UnbondingPeriod` | `Scheduled` | `??` | `Scheduled` | `Done` | `Scheduled` | `Scheduled` | +| 7.02 | If an unbonding operation resulted in a `ValidatorSetChangePacket` sent to all registered consumer chains, then it cannot complete before receiving matching `MaturedVSCPacket`s from these consumer chains (unless some of these consumer chains are removed) | `Scheduled` | `??` | `Scheduled` | `Done` | `Scheduled` | `Scheduled` | --- -| ID | Concern re. *Slashable Consumer Misbehavior* | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 8.01 | Multiple downtime infractions committed by the same validator `val` on the same consumer chain without `val` requesting to `Unjail` itself result in a single `SlashPacket` | `Scheduled` | `??` | `Done` | `Scheduled` | `Scheduled` | -| 8.02 | If evidence of misbehavior is submitted on a consumer chain within the unbonding period targeting an amount `x` of staked tokens, the amount `x` cannot be unlocked on the provider before the corresponding `SlashPacket` is received
- `SlashPacket` will not arrive after the corresponding `MaturedVSCPacket`s | `Scheduled` | `??` | `Done` | `Scheduled` | `Scheduled` | +| ID | Concern re. *Slashable Consumer Misbehavior* | Code Review | Unit Testing | E2e Testing | Diff. testing | Testnet | Protocol audit | +| -- | -------------------------------------------- | ----------- | ------------ | ----------- | ------------- | ------- | -------------- | +| 8.01 | Multiple downtime infractions committed by the same validator `val` on the same consumer chain without `val` requesting to `Unjail` itself result in a single `SlashPacket` | `Scheduled` | `??` | `??` | `??` | `Done` | `Scheduled` | `Scheduled` | +| 8.02 | If evidence of misbehavior is submitted on a consumer chain within the unbonding period targeting an amount `x` of staked tokens, the amount `x` cannot be unlocked on the provider before the corresponding `SlashPacket` is received
- `SlashPacket` will not arrive after the corresponding `MaturedVSCPacket`s | `Scheduled` | `??` | `??` | `??` | `Done` | `Scheduled` | `Scheduled` | --- -| ID | Concern re. *Consumer Rewards Distribution* | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 9.01 | Validators on the provider chain receive rewards for participating in IS | `Scheduled` | `Scheduled` | `NA` | `Scheduled` | `NA` | -| 9.02 | The rewards sent to the provider chain are escrowed on the consumer chains (no double spend) | `Scheduled` | `Scheduled` | `NA` | `Scheduled` | `NA` | +| ID | Concern re. *Consumer Rewards Distribution* | Code Review | Unit Testing | E2e Testing | Diff. testing | Testnet | Protocol audit | +| -- | ------------------------------------------- | ----------- | ------------ | ----------- | ------------- | ------- | -------------- | +| 9.01 | Validators on the provider chain receive rewards for participating in IS | `Scheduled` | `NA` | `Scheduled` | `NA` | `Scheduled` | `NA` | +| 9.02 | The rewards sent to the provider chain are escrowed on the consumer chains (no double spend) | `Scheduled` | `NA` | `Scheduled` | `NA` | `Scheduled` | `NA` | --- @@ -146,14 +146,14 @@ The main concern addressed in this section is the correctness of the consumer ch - governance-enabled consumer chain ([gov-cc](https://github.com/cosmos/interchain-security/issues/141)), with the modified staking and distribution modules (see `x/ccv/staking` and `x/ccv/distribution`); also, must look at the [atom-gov module](https://github.com/cosmos/interchain-security/issues/162) - CosmWasm-enabled consumer chain ([wasm-cc](https://github.com/cosmos/interchain-security/issues/143)), with the CosmWasm module enabled -| ID | Concern | Code Review | Unit Testing | Diff. testing | Testnet | Protocol audit | -| -- | ------- | ----------- | ------------ | ------------- | ------- | ----- | -| 10.01 | Consumer chain liveness (blocks are being produced) | `Scheduled` | `NA` | `??` | `Scheduled` | `NA` | -| 10.02 | A chain has the ability to restart as a consumer chain with no more than 24 hours downtime | `Scheduled` | `NA` | `??` | `Scheduled` | `NA` | -| 10.03 | A consumer chain has the ability to restart as a normal chain after shutting down, either controlled (via `StopConsumerChainProposal`) or due to timing out | `Scheduled` | `??` | `??` | `Scheduled` | `NA` | -| 10.04 | A consumer chain has the ability to restart as a consumer chain with the same `chainId` after shutting down, either controlled (via `StopConsumerChainProposal`) or due to timing out | `Scheduled` | `??` | `??` | `Scheduled` | `NA` | -| 10.05 | Governance on `gov-cc` | `Scheduled` | `??` | `??` | `Scheduled` | `NA` | -| 10.06 | CosmWasm on `wasm-cc` | `Scheduled` | `??` | `??` | `Scheduled` | `NA` | +| ID | Concern | Code Review | Unit Testing | E2e Testing | Diff. testing | Testnet | Protocol audit | +| -- | ------- | ----------- | ------------ | ----------- | ------------- | ------- | -------------- | +| 10.01 | Consumer chain liveness (blocks are being produced) | `Scheduled` | `NA` | `NA` | `??` | `Scheduled` | `NA` | +| 10.02 | A chain has the ability to restart as a consumer chain with no more than 24 hours downtime | `Scheduled` | `NA` | `NA` | `??` | `Scheduled` | `NA` | +| 10.03 | A consumer chain has the ability to restart as a normal chain after shutting down, either controlled (via `ConsumerRemovalProposal`) or due to timing out | `Scheduled` | `??` | `??` | `??` | `Scheduled` | `NA` | +| 10.04 | A consumer chain has the ability to restart as a consumer chain with the same `chainId` after shutting down, either controlled (via `ConsumerRemovalProposal`) or due to timing out | `Scheduled` | `??` | `??` | `??` | `Scheduled` | `NA` | +| 10.05 | Governance on `gov-cc` | `Scheduled` | `??` | `??` | `??` | `Scheduled` | `NA` | +| 10.06 | CosmWasm on `wasm-cc` | `Scheduled` | `??` | `??` | `??` | `Scheduled` | `NA` | | TBA ... > TODO create clear concerns for `gov-cc` and `wasm-cc` once the implementations are done diff --git a/go.mod b/go.mod index 51b04f0d32..20df7e5aab 100644 --- a/go.mod +++ b/go.mod @@ -31,6 +31,7 @@ require ( require ( github.com/golang/mock v1.6.0 + github.com/oxyno-zeta/gomock-extra-matcher v1.1.0 github.com/regen-network/cosmos-proto v0.3.1 ) diff --git a/go.sum b/go.sum index 9f5bba4985..8c90862b34 100644 --- a/go.sum +++ b/go.sum @@ -232,8 +232,6 @@ github.com/cosmos/btcutil v1.0.4/go.mod h1:Ffqc8Hn6TJUdDgHBwIZLtrLQC1KdJ9jGJl/Tv github.com/cosmos/cosmos-sdk v0.44.2/go.mod h1:fwQJdw+aECatpTvQTo1tSfHEsxACdZYU80QCZUPnHr4= github.com/cosmos/cosmos-sdk v0.44.3/go.mod h1:bA3+VenaR/l/vDiYzaiwbWvRPWHMBX2jG0ygiFtiBp0= github.com/cosmos/cosmos-sdk v0.45.0/go.mod h1:XXS/asyCqWNWkx2rW6pSuen+EVcpAFxq6khrhnZgHaQ= -github.com/cosmos/cosmos-sdk v0.45.2-0.20220811130336-846d0158765e h1:aKKTrqI9mNCQpLkul4S6BHdWYrdNrFNhiHfy2Oh2yhM= -github.com/cosmos/cosmos-sdk v0.45.2-0.20220811130336-846d0158765e/go.mod h1:XXS/asyCqWNWkx2rW6pSuen+EVcpAFxq6khrhnZgHaQ= github.com/cosmos/cosmos-sdk v0.45.2-0.20220901181011-06d4a64bf808 h1:PW5p0/qt5iJZS7f4bDjo/OYhMhzlmCewh8PZrpBluxo= github.com/cosmos/cosmos-sdk v0.45.2-0.20220901181011-06d4a64bf808/go.mod h1:XXS/asyCqWNWkx2rW6pSuen+EVcpAFxq6khrhnZgHaQ= github.com/cosmos/go-bip39 v0.0.0-20180819234021-555e2067c45d/go.mod h1:tSxLoYXyBmiFeKpvmq4dzayMdCjCnu8uqmCysIGBT2Y= @@ -865,6 +863,8 @@ github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJ github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs= github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= github.com/otiai10/mint v1.3.2/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= +github.com/oxyno-zeta/gomock-extra-matcher v1.1.0 h1:Yyk5ov0ZPKBXtVEeIWtc4J2XVrHuNoIK+0F2BUJgtsc= +github.com/oxyno-zeta/gomock-extra-matcher v1.1.0/go.mod h1:UMGTHYEmJ1dRq8LDZ7VTAYO4nqM3GD1UGC3RJEUxEz0= github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY= diff --git a/proto/buf.lock b/proto/buf.lock index 755e2cd76b..68c709a8d9 100644 --- a/proto/buf.lock +++ b/proto/buf.lock @@ -4,14 +4,8 @@ deps: - remote: buf.build owner: cosmos repository: gogo-proto - branch: main commit: bee5511075b7499da6178d9e4aaa628b - digest: b1-rrBIustouD-S80cVoZ_rM0qJsmei9AgbXy9GPQu6vxg= - create_time: 2021-12-02T20:01:17.069307Z - remote: buf.build owner: googleapis repository: googleapis - branch: main - commit: 40f07f5b563941f2b20b991a7aedd53d - digest: b1-Iv8fTR4AKXwNW80Ey6K5tY8cP053y_95sB5fro9IWZo= - create_time: 2021-12-02T15:07:41.896892Z + commit: 62f35d8aed1149c291d606d958a7ce32 diff --git a/proto/interchain_security/ccv/consumer/v1/consumer.proto b/proto/interchain_security/ccv/consumer/v1/consumer.proto index 8f095cfcfc..57d0f0fbd3 100644 --- a/proto/interchain_security/ccv/consumer/v1/consumer.proto +++ b/proto/interchain_security/ccv/consumer/v1/consumer.proto @@ -10,9 +10,10 @@ import "cosmos/staking/v1beta1/staking.proto"; import "gogoproto/gogo.proto"; import "cosmos_proto/cosmos.proto"; - // Params defines the parameters for CCV consumer module message Params { + // TODO: Remove enabled flag and find a better way to setup e2e tests + // See: https://github.com/cosmos/interchain-security/issues/339 bool enabled = 1; /////////////////////// @@ -27,22 +28,22 @@ message Params { // transfers over. These parameters is auto-set during the consumer <-> // provider handshake procedure. string distribution_transmission_channel = 3; - string provider_fee_pool_addr_str = 4; + string provider_fee_pool_addr_str = 4; } // LastTransmissionBlockHeight is the last time validator holding // pools were transmitted to the provider chain -message LastTransmissionBlockHeight { - int64 height = 1; -} +message LastTransmissionBlockHeight { int64 height = 1; } // CrossChainValidator defines the validators for CCV consumer module message CrossChainValidator { bytes address = 1; - int64 power = 2; + int64 power = 2; // pubkey is the consensus public key of the validator, as a Protobuf Any. - google.protobuf.Any pubkey = 3 - [(cosmos_proto.accepts_interface) = "cosmos.crypto.PubKey", (gogoproto.moretags) = "yaml:\"consensus_pubkey\""]; + google.protobuf.Any pubkey = 3 [ + (cosmos_proto.accepts_interface) = "cosmos.crypto.PubKey", + (gogoproto.moretags) = "yaml:\"consensus_pubkey\"" + ]; } // SlashRequest defines a slashing request for CCV consumer module @@ -50,3 +51,9 @@ message SlashRequest { interchain_security.ccv.v1.SlashPacketData packet = 1; cosmos.staking.v1beta1.InfractionType infraction = 2; } + +// SlashRequests is a list of slash requests for CCV consumer module +message SlashRequests { + repeated SlashRequest requests = 1 + [ (gogoproto.nullable) = false ]; +} diff --git a/proto/interchain_security/ccv/consumer/v1/genesis.proto b/proto/interchain_security/ccv/consumer/v1/genesis.proto index 171c145f71..c71aecd64a 100644 --- a/proto/interchain_security/ccv/consumer/v1/genesis.proto +++ b/proto/interchain_security/ccv/consumer/v1/genesis.proto @@ -12,20 +12,45 @@ import "tendermint/abci/types.proto"; // GenesisState defines the CCV consumer chain genesis state message GenesisState { - Params params = 1 [(gogoproto.nullable) = false]; - string provider_client_id = 2; // empty for a completely new chain - string provider_channel_id = 3; // empty for a completely new chain - bool new_chain = 4; // true for new chain GenesisState, false for chain restart. + Params params = 1 [ (gogoproto.nullable) = false ]; + string provider_client_id = 2; // empty for a completely new chain + string provider_channel_id = 3; // empty for a completely new chain + bool new_chain = 4; // true for new chain GenesisState, false for chain restart. // ProviderClientState filled in on new chain, nil on restart. ibc.lightclients.tendermint.v1.ClientState provider_client_state = 5; // ProviderConsensusState filled in on new chain, nil on restart. ibc.lightclients.tendermint.v1.ConsensusState provider_consensus_state = 6; - repeated MaturingVSCPacket maturing_packets = 7 [(gogoproto.nullable) = false]; - repeated .tendermint.abci.ValidatorUpdate initial_val_set = 8 [(gogoproto.nullable) = false]; + // MaturingPackets nil on new chain, filled on restart. + repeated MaturingVSCPacket maturing_packets = 7 + [ (gogoproto.nullable) = false ]; + // InitialValset filled in on new chain and on restart. + repeated .tendermint.abci.ValidatorUpdate initial_val_set = 8 + [ (gogoproto.nullable) = false ]; + // HeightToValsetUpdateId nil on new chain, filled on restart. + repeated HeightToValsetUpdateID height_to_valset_update_id = 9 + [ (gogoproto.nullable) = false ]; + // OutstandingDowntimes nil on new chain, filled on restart. + repeated OutstandingDowntime outstanding_downtime_slashing = 10 + [ (gogoproto.nullable) = false ]; + // PendingSlashRequests filled in on new chain, nil on restart. + interchain_security.ccv.consumer.v1.SlashRequests pending_slash_requests = 11 + [ (gogoproto.nullable) = false ]; } -// UnbondingSequence defines the genesis information for each unbonding packet sequence. +// MaturingVSCPacket defines the genesis information for the +// unbonding VSC packet message MaturingVSCPacket { - uint64 vscId = 1; - uint64 maturity_time = 2; + uint64 vscId = 1; + uint64 maturity_time = 2; } + +// HeightValsetUpdateID defines the genesis information for the mapping +// of each block height to a valset update id +message HeightToValsetUpdateID { + uint64 height = 1; + uint64 valset_update_id = 2; +} + +// OutstandingDowntime defines the genesis information for each validator +// flagged with an outstanding downtime slashing. +message OutstandingDowntime { string validator_consensus_address = 1; } \ No newline at end of file diff --git a/proto/interchain_security/ccv/provider/v1/genesis.proto b/proto/interchain_security/ccv/provider/v1/genesis.proto index e26c5d4a48..dedfc72120 100644 --- a/proto/interchain_security/ccv/provider/v1/genesis.proto +++ b/proto/interchain_security/ccv/provider/v1/genesis.proto @@ -7,15 +7,72 @@ option go_package = "github.com/cosmos/interchain-security/x/ccv/provider/types" import "gogoproto/gogo.proto"; import "interchain_security/ccv/v1/ccv.proto"; import "interchain_security/ccv/provider/v1/provider.proto"; +import "interchain_security/ccv/consumer/v1/consumer.proto"; +import "interchain_security/ccv/consumer/v1/genesis.proto"; + // GenesisState defines the CCV provider chain genesis state message GenesisState { - repeated ConsumerState consumer_states = 1 [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"consumer_states\""]; - Params params = 2 [(gogoproto.nullable) = false]; + // empty for a new chain + uint64 valset_update_id = 1; + // empty for a new chain + repeated ConsumerState consumer_states = 2 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"consumer_states\"" + ]; + // empty for a new chain + repeated interchain_security.ccv.v1.UnbondingOp unbonding_ops = 3 + [ (gogoproto.nullable) = false ]; + // empty for a new chain + interchain_security.ccv.v1.MaturedUnbondingOps mature_unbonding_ops = 4; + // empty for a new chain + repeated ValsetUpdateIdToHeight valset_update_id_to_height = 5 + [ (gogoproto.nullable) = false ]; + // empty for a new chain + repeated ConsumerAdditionProposal consumer_addition_proposals = 6 + [ (gogoproto.nullable) = false ]; + // empty for a new chain + repeated ConsumerRemovalProposal consumer_removal_proposals = 7 + [ (gogoproto.nullable) = false ]; + Params params = 8 + [ (gogoproto.nullable) = false ]; } -// ConsumerState defines the state that the provider chain stores for each consumer chain +// consumer chain message ConsumerState { - string chain_id = 1; - string channel_id = 2; + // ChannelID defines the chain ID for the consumer chain + string chain_id = 1; + // ChannelID defines the IBC channel ID for the consumer chain + string channel_id = 2; + // ClientID defines the IBC client ID for the consumer chain + string client_id = 3; + // InitalHeight defines the initial block height for the consumer chain + uint64 initial_height = 4; + // LockUnbondingOnTimeout defines whether the unbonding funds should be released for this + // chain in case of a IBC channel timeout + bool lock_unbonding_on_timeout = 5; + // ConsumerGenesis defines the initial consumer chain genesis states + interchain_security.ccv.consumer.v1.GenesisState consumer_genesis = 6 + [ (gogoproto.nullable) = false ]; + // PendingValsetChanges defines the pending validator set changes for the consumer chain + repeated interchain_security.ccv.v1.ValidatorSetChangePacketData pending_valset_changes = 7 + [ (gogoproto.nullable) = false ]; + repeated string slash_downtime_ack = 8; + // UnbondingOpsIndex defines the unbonding operations on the consumer chain + repeated UnbondingOpIndex unbonding_ops_index = 9 + [ (gogoproto.nullable) = false ]; +} + +// UnbondingOpIndex defines the genesis information for each unbonding operations index +// referenced by chain id and valset udpate id +message UnbondingOpIndex { + uint64 valset_update_id = 1; + repeated uint64 unbonding_op_index = 2; +} + +// ValsetUpdateIdToHeight defines the genesis information for the mapping +// of each valset udpate id to a block height +message ValsetUpdateIdToHeight { + uint64 valset_update_id = 1; + uint64 height = 2; } diff --git a/proto/interchain_security/ccv/provider/v1/provider.proto b/proto/interchain_security/ccv/provider/v1/provider.proto index 0d21a6a10b..0c0beb2bb3 100644 --- a/proto/interchain_security/ccv/provider/v1/provider.proto +++ b/proto/interchain_security/ccv/provider/v1/provider.proto @@ -9,10 +9,10 @@ import "google/protobuf/timestamp.proto"; import "ibc/core/client/v1/client.proto"; import "ibc/lightclients/tendermint/v1/tendermint.proto"; -// CreateConsumerChainProposal is a governance proposal on the provider chain to spawn a new consumer chain. +// ConsumerAdditionProposal is a governance proposal on the provider chain to spawn a new consumer chain. // If it passes, then all validators on the provider chain are expected to validate the consumer chain at spawn time // or get slashed. It is recommended that spawn time occurs after the proposal end time. -message CreateConsumerChainProposal { +message ConsumerAdditionProposal { option (gogoproto.goproto_getters) = false; option (gogoproto.goproto_stringer) = false; @@ -40,10 +40,10 @@ message CreateConsumerChainProposal { bool lock_unbonding_on_timeout = 8; } -// StopConsumerProposal is a governance proposal on the provider chain to stop a consumer chain. +// ConsumerRemovalProposal is a governance proposal on the provider chain to remove (and stop) a consumer chain. // If it passes, all the consumer chain's state is removed from the provider chain. The outstanding unbonding // operation funds are released if the LockUnbondingOnTimeout parameter is set to false for the consumer chain ID. - message StopConsumerChainProposal { + message ConsumerRemovalProposal { // the title of the proposal string title = 1; // the description of the proposal @@ -62,5 +62,11 @@ message Params { message HandshakeMetadata { string provider_fee_pool_addr = 1; - string version = 2; + string version = 2; } + +// SlashAcks contains addesses of consumer chain validators +// successfully slashed on the provider chain +message SlashAcks { + repeated string addresses = 1; +} \ No newline at end of file diff --git a/proto/interchain_security/ccv/provider/v1/query.proto b/proto/interchain_security/ccv/provider/v1/query.proto index 559049c5ce..70f36e2967 100644 --- a/proto/interchain_security/ccv/provider/v1/query.proto +++ b/proto/interchain_security/ccv/provider/v1/query.proto @@ -8,17 +8,18 @@ import "gogoproto/gogo.proto"; import "interchain_security/ccv/consumer/v1/genesis.proto"; service Query { - // ConsumerGenesis queries the genesis state needed to start a consumer chain whose proposal - // has been accepted - rpc ConsumerGenesis(QueryConsumerGenesisRequest) returns (QueryConsumerGenesisResponse) { - option (google.api.http).get = "/interchain_security/ccv/provider/consumer_genesis/{chain_id}"; + // ConsumerGenesis queries the genesis state needed to start a consumer chain + // whose proposal has been accepted + rpc ConsumerGenesis(QueryConsumerGenesisRequest) + returns (QueryConsumerGenesisResponse) { + option (google.api.http).get = + "/interchain_security/ccv/provider/consumer_genesis/{chain_id}"; } } -message QueryConsumerGenesisRequest { - string chain_id = 1; -} +message QueryConsumerGenesisRequest { string chain_id = 1; } message QueryConsumerGenesisResponse { - interchain_security.ccv.consumer.v1.GenesisState genesis_state = 1 [(gogoproto.nullable) = false]; + interchain_security.ccv.consumer.v1.GenesisState genesis_state = 1 + [ (gogoproto.nullable) = false ]; } diff --git a/proto/interchain_security/ccv/v1/ccv.proto b/proto/interchain_security/ccv/v1/ccv.proto index fd7c64f96d..23729db928 100644 --- a/proto/interchain_security/ccv/v1/ccv.proto +++ b/proto/interchain_security/ccv/v1/ccv.proto @@ -9,13 +9,16 @@ import "cosmos/staking/v1beta1/staking.proto"; import "gogoproto/gogo.proto"; import "tendermint/abci/types.proto"; -// This packet is sent from provider chain to consumer chain if the validator set for consumer chain -// changes (due to new bonding/unbonding messages or slashing events) -// A VSCMatured packet from consumer chain will be sent asynchronously once unbonding period is over, -// and this will function as `UnbondingOver` message for this packet. +// This packet is sent from provider chain to consumer chain if the validator +// set for consumer chain changes (due to new bonding/unbonding messages or +// slashing events) A VSCMatured packet from consumer chain will be sent +// asynchronously once unbonding period is over, and this will function as +// `UnbondingOver` message for this packet. message ValidatorSetChangePacketData { - repeated .tendermint.abci.ValidatorUpdate validator_updates = 1 - [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"validator_updates\""]; + repeated .tendermint.abci.ValidatorUpdate validator_updates = 1 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"validator_updates\"" + ]; uint64 valset_update_id = 2; // consensus address of consumer chain validators // successfully slashed on the provider chain @@ -31,18 +34,30 @@ message UnbondingOp { // This packet is sent from the consumer chain to the provider chain // to notify that a VSC packet reached maturity on the consumer chain. message VSCMaturedPacketData { - // the id of the VSC packet that reached maturity + // the id of the VSC packet that reached maturity uint64 valset_update_id = 1; } -// This packet is sent from the consumer chain to the provider chain -// to request the slashing of a validator as a result of an infraction -// committed on the consumer chain. +// This packet is sent from the consumer chain to the provider chain +// to request the slashing of a validator as a result of an infraction +// committed on the consumer chain. message SlashPacketData { - tendermint.abci.Validator validator = 1 - [(gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"validator\""]; + tendermint.abci.Validator validator = 1 [ + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"validator\"" + ]; // map to the infraction block height on the provider uint64 valset_update_id = 2; // tell if the slashing is for a downtime or a double-signing infraction cosmos.staking.v1beta1.InfractionType infraction = 3; } + +// UnbondingOpsIndex defines a list of unbonding operation ids. +message UnbondingOpsIndex { + repeated uint64 ids = 1; +} + +// MaturedUnbondingOps defines a list of ids corresponding to ids of matured unbonding operations. +message MaturedUnbondingOps { + repeated uint64 ids = 1; +} diff --git a/scripts/protocgen.sh b/scripts/protocgen.sh index 7840db031a..1f77331450 100644 --- a/scripts/protocgen.sh +++ b/scripts/protocgen.sh @@ -19,4 +19,4 @@ cd .. cp -r github.com/cosmos/interchain-security/* ./ rm -rf github.com -go mod tidy -compat=1.17 +go mod tidy -compat=1.18 diff --git a/tests/difference/core/driver/setup.go b/tests/difference/core/driver/setup.go index 0e14d1adea..3228c3e802 100644 --- a/tests/difference/core/driver/setup.go +++ b/tests/difference/core/driver/setup.go @@ -477,7 +477,7 @@ func (b *Builder) createConsumerGenesis(tmConfig *ibctesting.TendermintConfig) * "", // ignore distribution "", // ignore distribution ) - return consumertypes.NewInitialGenesisState(providerClient, providerConsState, valUpdates, params) + return consumertypes.NewInitialGenesisState(providerClient, providerConsState, valUpdates, consumertypes.SlashRequests{}, params) } func (b *Builder) createLink() { diff --git a/tests/e2e/channel_init_test.go b/tests/e2e/channel_init_test.go index 46f62b0e49..c319fda16f 100644 --- a/tests/e2e/channel_init_test.go +++ b/tests/e2e/channel_init_test.go @@ -6,42 +6,19 @@ import ( app "github.com/cosmos/interchain-security/app/consumer" - "fmt" - - ibctypes "github.com/cosmos/ibc-go/v3/modules/core/03-connection/types" - clienttmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" - - consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" - providerkeeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" - providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" tmtypes "github.com/tendermint/tendermint/types" - "github.com/cosmos/interchain-security/x/ccv/utils" - channeltypes "github.com/cosmos/ibc-go/v3/modules/core/04-channel/types" - host "github.com/cosmos/ibc-go/v3/modules/core/24-host" - - "encoding/json" - "time" appConsumer "github.com/cosmos/interchain-security/app/consumer" - "github.com/cosmos/interchain-security/x/ccv/consumer" ccv "github.com/cosmos/interchain-security/x/ccv/types" abci "github.com/tendermint/tendermint/abci/types" - crypto "github.com/tendermint/tendermint/proto/tendermint/crypto" - sdk "github.com/cosmos/cosmos-sdk/types" - distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types" - govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" - ibctesting "github.com/cosmos/ibc-go/v3/testing" - appProvider "github.com/cosmos/interchain-security/app/provider" - "github.com/cosmos/interchain-security/x/ccv/provider" - "github.com/cosmos/interchain-security/x/ccv/provider/types" ) -func (suite *ConsumerKeeperTestSuite) TestConsumerGenesis() { +func (suite *CCVTestSuite) TestConsumerGenesis() { genesis := suite.consumerChain.App.(*app.App).ConsumerKeeper.ExportGenesis(suite.consumerChain.GetContext()) suite.Require().Equal(suite.providerClient, genesis.ProviderClientState) @@ -90,19 +67,16 @@ func (suite *ConsumerKeeperTestSuite) TestConsumerGenesis() { ccv.ConsumerPortID, suite.path.EndpointA.ChannelID, clienttypes.NewHeight(1, 0), 0) suite.consumerChain.App.(*app.App).ConsumerKeeper.OnRecvVSCPacket(suite.consumerChain.GetContext(), packet, pd) - - // mocking the fact that consumer chain validators should be provider chain validators - // TODO: Fix testing suite so we can initialize both chains with the same validator set valUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.providerChain.Vals) restartGenesis := suite.consumerChain.App.(*app.App).ConsumerKeeper.ExportGenesis(suite.consumerChain.GetContext()) - restartGenesis.InitialValSet = valUpdates + suite.Require().Equal(valUpdates, restartGenesis.InitialValSet) // ensure reset genesis is set correctly providerChannel := suite.path.EndpointA.ChannelID suite.Require().Equal(providerChannel, restartGenesis.ProviderChannelId) maturityTime := suite.consumerChain.App.(*app.App).ConsumerKeeper.GetPacketMaturityTime(suite.consumerChain.GetContext(), 1) - unbondingPeriod, found := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetUnbondingTime(suite.ctx) + unbondingPeriod, found := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetUnbondingTime(suite.consumerCtx()) suite.Require().True(found) suite.Require().Equal(uint64(origTime.Add(unbondingPeriod).UnixNano()), maturityTime, "maturity time is not set correctly in genesis") @@ -110,747 +84,12 @@ func (suite *ConsumerKeeperTestSuite) TestConsumerGenesis() { suite.consumerChain.App.(*app.App).ConsumerKeeper.InitGenesis(suite.consumerChain.GetContext(), restartGenesis) }) } -func (suite *ConsumerTestSuite) TestOnChanOpenInit() { - var ( - channel *channeltypes.Channel - ) - - testCases := []struct { - name string - malleate func() - expPass bool - }{ - - { - "success", func() {}, true, - }, - { - "invalid: provider channel already established", func() { - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.SetProviderChannel(suite.ctx, "channel-2") - }, false, - }, - { - "invalid: UNORDERED channel", func() { - channel.Ordering = channeltypes.UNORDERED - }, false, - }, - { - "invalid port ID", func() { - suite.path.EndpointA.ChannelConfig.PortID = ibctesting.MockPort - }, false, - }, - { - "invalid version", func() { - channel.Version = "version" - }, false, - }, - { - "invalid counter party port ID", func() { - channel.Counterparty.PortId = ibctesting.MockPort - }, false, - }, - { - "invalid: verify provider chain failed", func() { - // setup a new path with provider client on consumer chain being different from genesis client - path := ibctesting.NewPath(suite.consumerChain, suite.providerChain) - // - channel config - path.EndpointA.ChannelConfig.PortID = ccv.ConsumerPortID - path.EndpointB.ChannelConfig.PortID = ccv.ProviderPortID - path.EndpointA.ChannelConfig.Version = ccv.Version - path.EndpointB.ChannelConfig.Version = ccv.Version - path.EndpointA.ChannelConfig.Order = channeltypes.ORDERED - path.EndpointB.ChannelConfig.Order = channeltypes.ORDERED - - // create consumer client on provider chain, and provider client on consumer chain - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - err := suite.createCustomClient(path.EndpointB, consumerUnbondingPeriod) - suite.Require().NoError(err) - err = suite.createCustomClient(path.EndpointA, providerUnbondingPeriod) - suite.Require().NoError(err) - - suite.coordinator.CreateConnections(path) - suite.path = path - channel.ConnectionHops = []string{suite.path.EndpointA.ConnectionID} - }, false, - }, - } - - for _, tc := range testCases { - tc := tc - - suite.Run(tc.name, func() { - suite.SetupTest() // reset - - suite.path.EndpointA.ChannelID = ibctesting.FirstChannelID - - counterparty := channeltypes.NewCounterparty(suite.path.EndpointB.ChannelConfig.PortID, "") - channel = &channeltypes.Channel{ - State: channeltypes.INIT, - Ordering: channeltypes.ORDERED, - Counterparty: counterparty, - ConnectionHops: []string{suite.path.EndpointA.ConnectionID}, - Version: ccv.Version, - } - - consumerModule := consumer.NewAppModule(suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper) - chanCap, err := suite.consumerChain.App.GetScopedIBCKeeper().NewCapability( - suite.ctx, - host.ChannelCapabilityPath( - ccv.ConsumerPortID, - suite.path.EndpointA.ChannelID, - ), - ) - suite.Require().NoError(err) - - tc.malleate() // explicitly change fields in channel and testChannel - - err = consumerModule.OnChanOpenInit( - suite.ctx, - channel.Ordering, - channel.GetConnectionHops(), - suite.path.EndpointA.ChannelConfig.PortID, - suite.path.EndpointA.ChannelID, - chanCap, - channel.Counterparty, - channel.GetVersion(), - ) - - if tc.expPass { - suite.Require().NoError(err) - } else { - suite.Require().Error(err) - } - - }) - } -} - -func (suite *ConsumerTestSuite) TestOnChanOpenTry() { - // OnOpenTry must error even with correct arguments - consumerModule := consumer.NewAppModule(suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper) - _, err := consumerModule.OnChanOpenTry( - suite.ctx, - channeltypes.ORDERED, - []string{"connection-1"}, - ccv.ConsumerPortID, - "channel-1", - nil, - channeltypes.NewCounterparty(ccv.ProviderPortID, "channel-1"), - ccv.Version, - ) - suite.Require().Error(err, "OnChanOpenTry callback must error on consumer chain") -} - -// TestOnChanOpenAck tests the consumer module's OnChanOpenAck implementation against the spec: -// https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-ccf-coack1 -func (suite *ConsumerTestSuite) TestOnChanOpenAck() { - - var ( - portID string - channelID string - metadataBz []byte - metadata providertypes.HandshakeMetadata - err error - ) - testCases := []struct { - name string - malleate func() - expPass bool - }{ - { - "success", func() {}, true, - }, - { - "invalid: provider channel already established", - func() { - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.SetProviderChannel(suite.ctx, "channel-2") - }, false, - }, - { - "invalid: cannot unmarshal ack metadata ", - func() { - metadataBz = []byte{78, 89, 20} - }, false, - }, - { - "invalid: mismatched versions", - func() { - // Set counter party version to an invalid value, passed as marshaled metadata - metadata.Version = "invalidVersion" - metadataBz, err = (&metadata).Marshal() - suite.Require().NoError(err) - }, false, - }, - // See ConsumerKeeper.GetConnectionHops as to why portID and channelID must be correct - { - "invalid: portID ", - func() { - portID = "invalidPort" - }, false, - }, - { - "invalid: channelID ", - func() { - channelID = "invalidChan" - }, false, - }, - } - - for _, tc := range testCases { - tc := tc - suite.Run(fmt.Sprintf("Case: %s", tc.name), func() { - suite.SetupTest() // reset - portID = ccv.ConsumerPortID - channelID = "channel-1" - counterChannelID := "channel-2" // per spec this is not required by onChanOpenAck() - suite.path.EndpointA.ChannelID = channelID - - // Set INIT channel on consumer chain - suite.consumerChain.App.GetIBCKeeper().ChannelKeeper.SetChannel( - suite.ctx, - ccv.ConsumerPortID, - channelID, - channeltypes.NewChannel( - channeltypes.INIT, - channeltypes.ORDERED, - channeltypes.NewCounterparty(ccv.ProviderPortID, ""), - []string{suite.path.EndpointA.ConnectionID}, - suite.path.EndpointA.ChannelConfig.Version, - ), - ) - - consumerModule := consumer.NewAppModule( - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper) - - metadata := providertypes.HandshakeMetadata{ - ProviderFeePoolAddr: "", // dummy address used - Version: suite.path.EndpointB.ChannelConfig.Version, - } - - metadataBz, err = (&metadata).Marshal() - suite.Require().NoError(err) - - tc.malleate() // Explicitly change fields already defined - - err = consumerModule.OnChanOpenAck( - suite.ctx, - portID, - channelID, - counterChannelID, - string(metadataBz), - ) - - if tc.expPass { - suite.Require().NoError(err) - } else { - suite.Require().Error(err) - } - }) - } -} - -func (suite *ConsumerTestSuite) TestOnChanOpenConfirm() { - consumerModule := consumer.NewAppModule(suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper) - err := consumerModule.OnChanOpenConfirm(suite.ctx, ccv.ConsumerPortID, "channel-1") - suite.Require().Error(err, "OnChanOpenConfirm callback must error on consumer chain") -} - -func (suite *ConsumerTestSuite) TestOnChanCloseInit() { - channelID := "channel-1" - testCases := []struct { - name string - setup func(suite *ConsumerTestSuite) - expError bool - }{ - { - name: "can close duplicate in-progress channel once provider channel is established", - setup: func(suite *ConsumerTestSuite) { - // Set INIT channel on consumer chain - suite.consumerChain.App.GetIBCKeeper().ChannelKeeper.SetChannel(suite.ctx, ccv.ConsumerPortID, channelID, - channeltypes.NewChannel( - channeltypes.INIT, channeltypes.ORDERED, channeltypes.NewCounterparty(ccv.ProviderPortID, ""), - []string{suite.path.EndpointA.ConnectionID}, suite.path.EndpointA.ChannelConfig.Version), - ) - suite.path.EndpointA.ChannelID = channelID - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.SetProviderChannel(suite.ctx, "different-channel") - }, - expError: false, - }, - { - name: "can close duplicate open channel once provider channel is established", - setup: func(suite *ConsumerTestSuite) { - // create open channel - suite.coordinator.CreateChannels(suite.path) - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.SetProviderChannel(suite.ctx, "different-channel") - }, - expError: false, - }, - { - name: "cannot close in-progress channel, no established channel yet", - setup: func(suite *ConsumerTestSuite) { - // Set INIT channel on consumer chain - suite.consumerChain.App.GetIBCKeeper().ChannelKeeper.SetChannel(suite.ctx, ccv.ConsumerPortID, channelID, - channeltypes.NewChannel( - channeltypes.INIT, channeltypes.ORDERED, channeltypes.NewCounterparty(ccv.ProviderPortID, ""), - []string{suite.path.EndpointA.ConnectionID}, suite.path.EndpointA.ChannelConfig.Version), - ) - suite.path.EndpointA.ChannelID = channelID - }, - expError: true, - }, - { - name: "cannot close provider channel", - setup: func(suite *ConsumerTestSuite) { - // create open channel - suite.coordinator.CreateChannels(suite.path) - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.SetProviderChannel(suite.ctx, suite.path.EndpointA.ChannelID) - }, - expError: true, - }, - } - - for _, tc := range testCases { - tc := tc - suite.Run(fmt.Sprintf("Case: %s", tc.name), func() { - suite.SetupTest() // reset suite - tc.setup(suite) - - consumerModule := consumer.NewAppModule(suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper) - - err := consumerModule.OnChanCloseInit(suite.ctx, ccv.ConsumerPortID, suite.path.EndpointA.ChannelID) - - if tc.expError { - suite.Require().Error(err) - } else { - suite.Require().NoError(err) - } - }) - } -} // TestProviderClientMatches tests that the provider client managed by the consumer keeper matches the client keeper's client state -func (suite *ConsumerKeeperTestSuite) TestProviderClientMatches() { - providerClientID, ok := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderClientID(suite.ctx) +func (suite *CCVTestSuite) TestProviderClientMatches() { + providerClientID, ok := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderClientID(suite.consumerCtx()) suite.Require().True(ok) - clientState, _ := suite.consumerChain.App.GetIBCKeeper().ClientKeeper.GetClientState(suite.ctx, providerClientID) + clientState, _ := suite.consumerChain.App.GetIBCKeeper().ClientKeeper.GetClientState(suite.consumerCtx(), providerClientID) suite.Require().Equal(suite.providerClient, clientState, "stored client state does not match genesis provider client") } - -// TestVerifyProviderChain tests the VerifyProviderChain method for the consumer keeper -func (suite *ConsumerKeeperTestSuite) TestVerifyProviderChain() { - var connectionHops []string - channelID := "channel-0" - testCases := []struct { - name string - setup func(suite *ConsumerKeeperTestSuite) - connectionHops []string - expError bool - }{ - { - name: "success", - setup: func(suite *ConsumerKeeperTestSuite) { - // create consumer client on provider chain - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - suite.CreateCustomClient(suite.path.EndpointB, consumerUnbondingPeriod) - err := suite.path.EndpointB.CreateClient() - suite.Require().NoError(err) - - suite.coordinator.CreateConnections(suite.path) - - // set connection hops to be connection hop from path endpoint - connectionHops = []string{suite.path.EndpointA.ConnectionID} - }, - connectionHops: []string{suite.path.EndpointA.ConnectionID}, - expError: false, - }, - { - name: "connection hops is not length 1", - setup: func(suite *ConsumerKeeperTestSuite) { - // create consumer client on provider chain - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - suite.CreateCustomClient(suite.path.EndpointB, consumerUnbondingPeriod) - - suite.coordinator.CreateConnections(suite.path) - - // set connection hops to be connection hop from path endpoint - connectionHops = []string{suite.path.EndpointA.ConnectionID, "connection-2"} - }, - expError: true, - }, - { - name: "connection does not exist", - setup: func(suite *ConsumerKeeperTestSuite) { - // set connection hops to be connection hop from path endpoint - connectionHops = []string{"connection-dne"} - }, - expError: true, - }, - { - name: "clientID does not match", - setup: func(suite *ConsumerKeeperTestSuite) { - // create consumer client on provider chain - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - suite.CreateCustomClient(suite.path.EndpointB, consumerUnbondingPeriod) - - // create a new provider client on consumer chain that is different from the one in genesis - suite.CreateCustomClient(suite.path.EndpointA, providerUnbondingPeriod) - - suite.coordinator.CreateConnections(suite.path) - - // set connection hops to be connection hop from path endpoint - connectionHops = []string{suite.path.EndpointA.ConnectionID} - }, - expError: true, - }, - } - - for _, tc := range testCases { - tc := tc - suite.Run(fmt.Sprintf("Case: %s", tc.name), func() { - suite.SetupTest() // reset suite - - tc.setup(suite) - - // Verify ProviderChain on consumer chain using path returned by setup - err := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.VerifyProviderChain(suite.ctx, channelID, connectionHops) - - if tc.expError { - suite.Require().Error(err, "invalid case did not return error") - } else { - suite.Require().NoError(err, "valid case returned error") - } - }) - } -} - -// TestOnChanOpenTry validates the provider's OnChanOpenTry implementation against the spec: -// https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-cotry1 -func (suite *ProviderTestSuite) TestOnChanOpenTry() { - var ( - channel *channeltypes.Channel - counterpartyVersion string - providerKeeper *providerkeeper.Keeper - ) - - testCases := []struct { - name string - malleate func() - expPass bool - }{ - { - "success", func() {}, true, - }, - { - "invalid order", func() { - channel.Ordering = channeltypes.UNORDERED - }, false, - }, - { - "invalid port ID", func() { - suite.path.EndpointA.ChannelConfig.PortID = ibctesting.MockPort - }, false, - }, - { - "invalid counter party port ID", func() { - channel.Counterparty.PortId = ibctesting.MockPort - }, false, - }, - { - "invalid counter party version", func() { - counterpartyVersion = "invalidVersion" - }, false, - }, - { - "unexpected client ID mapped to chain ID", func() { - providerKeeper.SetConsumerClientId( - suite.providerCtx(), - suite.path.EndpointA.Chain.ChainID, - "invalidClientID", - ) - }, false, - }, - { - "other CCV channel exists for this consumer chain", func() { - providerKeeper.SetChainToChannel( - suite.providerCtx(), - suite.path.EndpointA.Chain.ChainID, - "some existing channel ID", - ) - }, false, - }, - } - - for _, tc := range testCases { - tc := tc - - suite.Run(tc.name, func() { - suite.SetupTest() // reset - - suite.path.EndpointA.ChannelConfig.PortID = ccv.ProviderPortID - suite.path.EndpointA.ChannelID = "providerChanID" - suite.path.EndpointB.ChannelConfig.PortID = ccv.ConsumerPortID - suite.path.EndpointB.ChannelID = "consumerChanID" - suite.path.EndpointA.ConnectionID = "ConnID" - suite.path.EndpointA.ClientID = "ClientID" - suite.path.EndpointA.Chain.ChainID = "ChainID" - - counterparty := channeltypes.NewCounterparty( - suite.path.EndpointB.ChannelConfig.PortID, - suite.path.EndpointA.ChannelID, - ) - counterpartyVersion = ccv.Version - - channel = &channeltypes.Channel{ - State: channeltypes.INIT, - Ordering: channeltypes.ORDERED, - Counterparty: counterparty, - ConnectionHops: []string{suite.path.EndpointA.ConnectionID}, - Version: counterpartyVersion, - } - - providerKeeper = &suite.providerChain.App.(*appProvider.App).ProviderKeeper - providerModule := provider.NewAppModule(providerKeeper) - chanCap, err := suite.providerChain.App.GetScopedIBCKeeper().NewCapability( - suite.providerCtx(), - host.ChannelCapabilityPath( - suite.path.EndpointA.ChannelConfig.PortID, - suite.path.EndpointA.ChannelID, - ), - ) - suite.Require().NoError(err) - - // Manual keeper setup - connKeeper := suite.providerChain.App.GetIBCKeeper().ConnectionKeeper - connKeeper.SetConnection( - suite.providerCtx(), - suite.path.EndpointA.ConnectionID, - ibctypes.ConnectionEnd{ - ClientId: suite.path.EndpointA.ClientID, - }, - ) - clientKeeper := suite.providerChain.App.GetIBCKeeper().ClientKeeper - clientKeeper.SetClientState( - suite.providerCtx(), - suite.path.EndpointA.ClientID, - &clienttmtypes.ClientState{ - ChainId: suite.path.EndpointA.Chain.ChainID, - }, - ) - providerKeeper.SetConsumerClientId( - suite.providerCtx(), - suite.path.EndpointA.Chain.ChainID, - suite.path.EndpointA.ClientID, - ) - - tc.malleate() // explicitly change fields - - metadata, err := providerModule.OnChanOpenTry( - suite.providerCtx(), - channel.Ordering, - channel.GetConnectionHops(), - suite.path.EndpointA.ChannelConfig.PortID, - suite.path.EndpointA.ChannelID, - chanCap, - channel.Counterparty, - counterpartyVersion, - ) - - if tc.expPass { - suite.Require().NoError(err) - md := &providertypes.HandshakeMetadata{} - err = md.Unmarshal([]byte(metadata)) - suite.Require().NoError(err) - } else { - suite.Require().Error(err) - } - }) - } -} - -func (suite *ProviderTestSuite) TestOnChanOpenInit() { - // OnChanOpenInit must error for provider even with correct arguments - providerModule := provider.NewAppModule(&suite.providerChain.App.(*appProvider.App).ProviderKeeper) - - err := providerModule.OnChanOpenInit( - suite.providerCtx(), - channeltypes.ORDERED, - []string{"connection-1"}, - ccv.ProviderPortID, - "channel-1", - nil, - channeltypes.NewCounterparty(ccv.ConsumerPortID, "channel-1"), - ccv.Version, - ) - suite.Require().Error(err, "OnChanOpenInit must error on provider chain") -} - -// TestConsumerChainProposalHandler tests the handler for consumer chain proposals -// for both CreateConsumerChainProposal and StopConsumerChainProposal -// -// TODO: Determine if it's possible to make this a unit test -func (suite *ProviderTestSuite) TestConsumerChainProposalHandler() { - var ( - ctx sdk.Context - content govtypes.Content - err error - ) - - testCases := []struct { - name string - malleate func(*ProviderTestSuite) - expPass bool - }{ - { - "valid create consumerchain proposal", func(suite *ProviderTestSuite) { - initialHeight := clienttypes.NewHeight(2, 3) - // ctx blocktime is after proposal's spawn time - ctx = suite.providerChain.GetContext().WithBlockTime(time.Now().Add(time.Hour)) - content = types.NewCreateConsumerChainProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()) - }, true, - }, - { - "valid stop consumerchain proposal", func(suite *ProviderTestSuite) { - ctx = suite.providerChain.GetContext().WithBlockTime(time.Now().Add(time.Hour)) - content, err = types.NewStopConsumerChainProposal("title", "description", "chainID", time.Now()) - suite.Require().NoError(err) - }, true, - }, - { - "nil proposal", func(suite *ProviderTestSuite) { - ctx = suite.providerChain.GetContext() - content = nil - }, false, - }, - { - "unsupported proposal type", func(suite *ProviderTestSuite) { - ctx = suite.providerChain.GetContext() - content = distributiontypes.NewCommunityPoolSpendProposal(ibctesting.Title, ibctesting.Description, suite.providerChain.SenderAccount.GetAddress(), sdk.NewCoins(sdk.NewCoin("communityfunds", sdk.NewInt(10)))) - }, false, - }, - } - - for _, tc := range testCases { - tc := tc - - suite.Run(tc.name, func() { - suite.SetupTest() // reset - - tc.malleate(suite) - - proposalHandler := provider.NewConsumerChainProposalHandler(suite.providerChain.App.(*appProvider.App).ProviderKeeper) - - err = proposalHandler(ctx, content) - - if tc.expPass { - suite.Require().NoError(err) - } else { - suite.Require().Error(err) - } - }) - } -} - -func (suite *ProviderKeeperTestSuite) TestMakeConsumerGenesis() { - suite.SetupTest() - - actualGenesis, err := suite.providerChain.App.(*appProvider.App).ProviderKeeper.MakeConsumerGenesis(suite.providerChain.GetContext()) - suite.Require().NoError(err) - - jsonString := `{"params":{"enabled":true, "blocks_per_distribution_transmission":1000, "lock_unbonding_on_timeout": false},"new_chain":true,"provider_client_state":{"chain_id":"testchain1","trust_level":{"numerator":1,"denominator":3},"trusting_period":907200000000000,"unbonding_period":1814400000000000,"max_clock_drift":10000000000,"frozen_height":{},"latest_height":{"revision_height":5},"proof_specs":[{"leaf_spec":{"hash":1,"prehash_value":1,"length":1,"prefix":"AA=="},"inner_spec":{"child_order":[0,1],"child_size":33,"min_prefix_length":4,"max_prefix_length":12,"hash":1}},{"leaf_spec":{"hash":1,"prehash_value":1,"length":1,"prefix":"AA=="},"inner_spec":{"child_order":[0,1],"child_size":32,"min_prefix_length":1,"max_prefix_length":1,"hash":1}}],"upgrade_path":["upgrade","upgradedIBCState"],"allow_update_after_expiry":true,"allow_update_after_misbehaviour":true},"provider_consensus_state":{"timestamp":"2020-01-02T00:00:10Z","root":{"hash":"LpGpeyQVLUo9HpdsgJr12NP2eCICspcULiWa5u9udOA="},"next_validators_hash":"E30CE736441FB9101FADDAF7E578ABBE6DFDB67207112350A9A904D554E1F5BE"},"unbonding_sequences":null,"initial_val_set":[{"pub_key":{"type":"tendermint/PubKeyEd25519","value":"dcASx5/LIKZqagJWN0frOlFtcvz91frYmj/zmoZRWro="},"power":1}]}` - - var expectedGenesis consumertypes.GenesisState - err = json.Unmarshal([]byte(jsonString), &expectedGenesis) - suite.Require().NoError(err) - - // Zero out differing fields- TODO: figure out how to get the test suite to - // keep these deterministic - actualGenesis.ProviderConsensusState.NextValidatorsHash = []byte{} - expectedGenesis.ProviderConsensusState.NextValidatorsHash = []byte{} - - // set valset to one empty validator because SetupTest() creates 4 validators per chain - actualGenesis.InitialValSet = []abci.ValidatorUpdate{{PubKey: crypto.PublicKey{}, Power: actualGenesis.InitialValSet[0].Power}} - expectedGenesis.InitialValSet[0].PubKey = crypto.PublicKey{} - - actualGenesis.ProviderConsensusState.Root.Hash = []byte{} - expectedGenesis.ProviderConsensusState.Root.Hash = []byte{} - - suite.Require().Equal(actualGenesis, expectedGenesis, "consumer chain genesis created incorrectly") -} - -func (suite *ProviderKeeperTestSuite) TestCreateConsumerChainProposal() { - var ( - ctx sdk.Context - proposal *types.CreateConsumerChainProposal - ok bool - ) - - chainID := "chainID" - initialHeight := clienttypes.NewHeight(2, 3) - lockUbdOnTimeout := false - - testCases := []struct { - name string - malleate func(*ProviderKeeperTestSuite) - expPass bool - spawnReached bool - }{ - { - "valid create consumer chain proposal: spawn time reached", func(suite *ProviderKeeperTestSuite) { - // ctx blocktime is after proposal's spawn time - ctx = suite.providerChain.GetContext().WithBlockTime(time.Now().Add(time.Hour)) - content := types.NewCreateConsumerChainProposal("title", "description", chainID, initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()) - proposal, ok = content.(*types.CreateConsumerChainProposal) - suite.Require().True(ok) - proposal.LockUnbondingOnTimeout = lockUbdOnTimeout - }, true, true, - }, - { - "valid proposal: spawn time has not yet been reached", func(suite *ProviderKeeperTestSuite) { - // ctx blocktime is before proposal's spawn time - ctx = suite.providerChain.GetContext().WithBlockTime(time.Now()) - content := types.NewCreateConsumerChainProposal("title", "description", chainID, initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now().Add(time.Hour)) - proposal, ok = content.(*types.CreateConsumerChainProposal) - suite.Require().True(ok) - proposal.LockUnbondingOnTimeout = lockUbdOnTimeout - }, true, false, - }, - } - - for _, tc := range testCases { - tc := tc - - suite.Run(tc.name, func() { - suite.SetupTest() - - tc.malleate(suite) - - err := suite.providerChain.App.(*appProvider.App).ProviderKeeper.CreateConsumerChainProposal(ctx, proposal) - if tc.expPass { - suite.Require().NoError(err, "error returned on valid case") - if tc.spawnReached { - clientId, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerClientId(ctx, chainID) - suite.Require().True(found, "consumer client not found") - consumerGenesis, ok := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerGenesis(ctx, chainID) - suite.Require().True(ok) - - expectedGenesis, err := suite.providerChain.App.(*appProvider.App).ProviderKeeper.MakeConsumerGenesis(ctx) - suite.Require().NoError(err) - - suite.Require().Equal(expectedGenesis, consumerGenesis) - suite.Require().NotEqual("", clientId, "consumer client was not created after spawn time reached") - } else { - gotProposal := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetPendingCreateProposal(ctx, proposal.SpawnTime, chainID) - suite.Require().Equal(initialHeight, gotProposal.InitialHeight, "unexpected pending proposal (InitialHeight)") - suite.Require().Equal(lockUbdOnTimeout, gotProposal.LockUnbondingOnTimeout, "unexpected pending proposal (LockUnbondingOnTimeout)") - } - } else { - suite.Require().Error(err, "did not return error on invalid case") - } - }) - } -} diff --git a/tests/e2e/common_test.go b/tests/e2e/common_test.go index 97a0c8fdd0..396aa02ee3 100644 --- a/tests/e2e/common_test.go +++ b/tests/e2e/common_test.go @@ -32,19 +32,19 @@ const ( Consumer ) -func (s *ProviderTestSuite) providerCtx() sdk.Context { +func (s *CCVTestSuite) providerCtx() sdk.Context { return s.providerChain.GetContext() } -func (s *ProviderTestSuite) consumerCtx() sdk.Context { +func (s *CCVTestSuite) consumerCtx() sdk.Context { return s.consumerChain.GetContext() } -func (s *ProviderTestSuite) providerBondDenom() string { +func (s *CCVTestSuite) providerBondDenom() string { return s.providerChain.App.(*appProvider.App).StakingKeeper.BondDenom(s.providerCtx()) } -func (s *ProviderTestSuite) getVal(index int) (validator stakingtypes.Validator, valAddr sdk.ValAddress) { +func (s *CCVTestSuite) getVal(index int) (validator stakingtypes.Validator, valAddr sdk.ValAddress) { // Choose a validator, and get its address and data structure into the correct types tmValidator := s.providerChain.Vals.Validators[index] valAddr, err := sdk.ValAddressFromHex(tmValidator.Address.String()) @@ -55,13 +55,13 @@ func (s *ProviderTestSuite) getVal(index int) (validator stakingtypes.Validator, return validator, valAddr } -func getBalance(s *ProviderTestSuite, providerCtx sdk.Context, delAddr sdk.AccAddress) sdk.Int { +func getBalance(s *CCVTestSuite, providerCtx sdk.Context, delAddr sdk.AccAddress) sdk.Int { return s.providerChain.App.(*appProvider.App).BankKeeper.GetBalance(providerCtx, delAddr, s.providerBondDenom()).Amount } // delegateAndUndelegate delegates bondAmt from delAddr to the first validator // and then immediately undelegates 1/shareDiv of that delegation -func delegateAndUndelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, bondAmt sdk.Int, shareDiv int64) (initBalance sdk.Int, valsetUpdateId uint64) { +func delegateAndUndelegate(s *CCVTestSuite, delAddr sdk.AccAddress, bondAmt sdk.Int, shareDiv int64) (initBalance sdk.Int, valsetUpdateId uint64) { // delegate initBalance, shares, valAddr := delegate(s, delAddr, bondAmt) @@ -82,7 +82,7 @@ func delegateAndUndelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, bondAmt // // Note: This function advances blocks in-between operations, where validator powers are // not checked, since they are checked in integration tests. -func delegateAndRedelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, +func delegateAndRedelegate(s *CCVTestSuite, delAddr sdk.AccAddress, srcValAddr sdk.ValAddress, dstValAddr sdk.ValAddress, amount sdk.Int) { stakingKeeper := s.providerChain.App.(*appProvider.App).StakingKeeper @@ -115,7 +115,7 @@ func delegateAndRedelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, } // delegate delegates bondAmt to the first validator -func delegate(s *ProviderTestSuite, delAddr sdk.AccAddress, bondAmt sdk.Int) (initBalance sdk.Int, shares sdk.Dec, valAddr sdk.ValAddress) { +func delegate(s *CCVTestSuite, delAddr sdk.AccAddress, bondAmt sdk.Int) (initBalance sdk.Int, shares sdk.Dec, valAddr sdk.ValAddress) { initBalance = getBalance(s, s.providerCtx(), delAddr) // choose a validator validator, valAddr := s.getVal(0) @@ -135,7 +135,7 @@ func delegate(s *ProviderTestSuite, delAddr sdk.AccAddress, bondAmt sdk.Int) (in } // undelegate unbonds an amount of delegator shares from a given validator -func undelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, valAddr sdk.ValAddress, sharesAmount sdk.Dec) (valsetUpdateId uint64) { +func undelegate(s *CCVTestSuite, delAddr sdk.AccAddress, valAddr sdk.ValAddress, sharesAmount sdk.Dec) (valsetUpdateId uint64) { _, err := s.providerChain.App.(*appProvider.App).StakingKeeper.Undelegate(s.providerCtx(), delAddr, valAddr, sharesAmount) s.Require().NoError(err) @@ -147,7 +147,7 @@ func undelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, valAddr sdk.ValAdd // Executes a BeginRedelegation (unbonding and redelegation) operation // on the provider chain using delegated funds from delAddr -func redelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, valSrcAddr sdk.ValAddress, +func redelegate(s *CCVTestSuite, delAddr sdk.AccAddress, valSrcAddr sdk.ValAddress, ValDstAddr sdk.ValAddress, sharesAmount sdk.Dec) { ctx := s.providerCtx() @@ -179,7 +179,7 @@ func redelegate(s *ProviderTestSuite, delAddr sdk.AccAddress, valSrcAddr sdk.Val // relayAllCommittedPackets relays all committed packets from `srcChain` on `path` func relayAllCommittedPackets( - s *ProviderTestSuite, + s *CCVTestSuite, srcChain *ibctesting.TestChain, path *ibctesting.Path, portID string, @@ -212,7 +212,7 @@ func relayAllCommittedPackets( // // Note that it is expected for the provider unbonding period // to be one day larger than the consumer unbonding period. -func incrementTimeByUnbondingPeriod(s *ProviderTestSuite, chainType ChainType) { +func incrementTimeByUnbondingPeriod(s *CCVTestSuite, chainType ChainType) { // Get unboding period from staking keeper providerUnbondingPeriod := s.providerChain.App.GetStakingKeeper().UnbondingTime(s.providerCtx()) consumerUnbondingPeriod, found := s.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetUnbondingTime(s.consumerCtx()) @@ -239,13 +239,13 @@ func incrementTimeByUnbondingPeriod(s *ProviderTestSuite, chainType ChainType) { } } -func checkStakingUnbondingOps(s *ProviderTestSuite, id uint64, found bool, onHold bool) { +func checkStakingUnbondingOps(s *CCVTestSuite, id uint64, found bool, onHold bool) { stakingUnbondingOp, wasFound := getStakingUnbondingDelegationEntry(s.providerCtx(), s.providerChain.App.(*appProvider.App).StakingKeeper, id) s.Require().True(found == wasFound) s.Require().True(onHold == (0 < stakingUnbondingOp.UnbondingOnHoldRefCount)) } -func checkCCVUnbondingOp(s *ProviderTestSuite, providerCtx sdk.Context, chainID string, valUpdateID uint64, found bool) { +func checkCCVUnbondingOp(s *CCVTestSuite, providerCtx sdk.Context, chainID string, valUpdateID uint64, found bool) { entries, wasFound := s.providerChain.App.(*appProvider.App).ProviderKeeper.GetUnbondingOpsFromIndex(providerCtx, chainID, valUpdateID) s.Require().True(found == wasFound) if found { @@ -257,7 +257,7 @@ func checkCCVUnbondingOp(s *ProviderTestSuite, providerCtx sdk.Context, chainID // Checks that an expected amount of redelegations exist for a delegator // via the staking keeper, then returns those redelegations. -func checkRedelegations(s *ProviderTestSuite, delAddr sdk.AccAddress, +func checkRedelegations(s *CCVTestSuite, delAddr sdk.AccAddress, expect uint16) []stakingtypes.Redelegation { redelegations := s.providerChain.App.(*appProvider.App).StakingKeeper. @@ -269,7 +269,7 @@ func checkRedelegations(s *ProviderTestSuite, delAddr sdk.AccAddress, // Checks that a redelegation entry has a completion time equal to an expected time func checkRedelegationEntryCompletionTime( - s *ProviderTestSuite, entry stakingtypes.RedelegationEntry, expectedCompletion time.Time) { + s *CCVTestSuite, entry stakingtypes.RedelegationEntry, expectedCompletion time.Time) { s.Require().Equal(expectedCompletion, entry.CompletionTime) } @@ -289,7 +289,7 @@ func getStakingUnbondingDelegationEntry(ctx sdk.Context, k stakingkeeper.Keeper, // SendEmptyVSCPacket sends a VSC packet without any changes // to ensure that the channel gets established -func (suite *ConsumerKeeperTestSuite) SendEmptyVSCPacket() { +func (suite *CCVTestSuite) SendEmptyVSCPacket() { providerKeeper := suite.providerChain.App.(*appProvider.App).ProviderKeeper oldBlockTime := suite.providerChain.GetContext().BlockTime() @@ -318,7 +318,7 @@ func (suite *ConsumerKeeperTestSuite) SendEmptyVSCPacket() { // commitSlashPacket returns a commit hash for the given slash packet data // Note that it must be called before sending the embedding IBC packet. -func (suite *ConsumerKeeperTestSuite) commitSlashPacket(ctx sdk.Context, packetData ccv.SlashPacketData) []byte { +func (suite *CCVTestSuite) commitSlashPacket(ctx sdk.Context, packetData ccv.SlashPacketData) []byte { oldBlockTime := ctx.BlockTime() timeout := uint64(ccv.GetTimeoutTimestamp(oldBlockTime).UnixNano()) @@ -329,7 +329,7 @@ func (suite *ConsumerKeeperTestSuite) commitSlashPacket(ctx sdk.Context, packetD } // incrementTimeBy increments the overall time by jumpPeriod -func incrementTimeBy(s *ConsumerKeeperTestSuite, jumpPeriod time.Duration) { +func incrementTimeBy(s *CCVTestSuite, jumpPeriod time.Duration) { // Get unboding period from staking keeper consumerUnbondingPeriod, found := s.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetUnbondingTime(s.consumerChain.GetContext()) s.Require().True(found) @@ -356,7 +356,7 @@ func incrementTimeBy(s *ConsumerKeeperTestSuite, jumpPeriod time.Duration) { // using the given unbonding period. // It will update the clientID for the endpoint if the message // is successfully executed. -func (suite *ConsumerKeeperTestSuite) CreateCustomClient(endpoint *ibctesting.Endpoint, unbondingPeriod time.Duration) { +func (suite *CCVTestSuite) CreateCustomClient(endpoint *ibctesting.Endpoint, unbondingPeriod time.Duration) { // ensure counterparty has committed state endpoint.Chain.Coordinator.CommitBlock(endpoint.Counterparty.Chain) @@ -386,42 +386,3 @@ func (suite *ConsumerKeeperTestSuite) CreateCustomClient(endpoint *ibctesting.En endpoint.ClientID, err = ibctesting.ParseClientIDFromEvents(res.GetEvents()) require.NoError(endpoint.Chain.T, err) } - -// createCustomClient creates an IBC client on the endpoint -// using the given unbonding period. -// It will update the clientID for the endpoint if the message -// is successfully executed. -func (suite *ConsumerTestSuite) createCustomClient(endpoint *ibctesting.Endpoint, unbondingPeriod time.Duration) (err error) { - // ensure counterparty has committed state - endpoint.Chain.Coordinator.CommitBlock(endpoint.Counterparty.Chain) - - suite.Require().Equal(exported.Tendermint, endpoint.ClientConfig.GetClientType(), "only Tendermint client supported") - - tmConfig, ok := endpoint.ClientConfig.(*ibctesting.TendermintConfig) - require.True(endpoint.Chain.T, ok) - tmConfig.UnbondingPeriod = unbondingPeriod - tmConfig.TrustingPeriod = unbondingPeriod / utils.TrustingPeriodFraction - - height := endpoint.Counterparty.Chain.LastHeader.GetHeight().(clienttypes.Height) - UpgradePath := []string{"upgrade", "upgradedIBCState"} - clientState := ibctmtypes.NewClientState( - endpoint.Counterparty.Chain.ChainID, tmConfig.TrustLevel, tmConfig.TrustingPeriod, tmConfig.UnbondingPeriod, tmConfig.MaxClockDrift, - height, commitmenttypes.GetSDKSpecs(), UpgradePath, tmConfig.AllowUpdateAfterExpiry, tmConfig.AllowUpdateAfterMisbehaviour, - ) - consensusState := endpoint.Counterparty.Chain.LastHeader.ConsensusState() - - msg, err := clienttypes.NewMsgCreateClient( - clientState, consensusState, endpoint.Chain.SenderAccount.GetAddress().String(), - ) - require.NoError(endpoint.Chain.T, err) - - res, err := endpoint.Chain.SendMsgs(msg) - if err != nil { - return err - } - - endpoint.ClientID, err = ibctesting.ParseClientIDFromEvents(res.GetEvents()) - require.NoError(endpoint.Chain.T, err) - - return nil -} diff --git a/tests/e2e/democracy_test.go b/tests/e2e/democracy_test.go new file mode 100644 index 0000000000..f399e04627 --- /dev/null +++ b/tests/e2e/democracy_test.go @@ -0,0 +1,262 @@ +package e2e_test + +import ( + "bytes" + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" + transfertypes "github.com/cosmos/ibc-go/v3/modules/apps/transfer/types" + + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" + channeltypes "github.com/cosmos/ibc-go/v3/modules/core/04-channel/types" + ibctesting "github.com/cosmos/ibc-go/v3/testing" + + appConsumer "github.com/cosmos/interchain-security/app/consumer-democracy" + appProvider "github.com/cosmos/interchain-security/app/provider" + "github.com/cosmos/interchain-security/testutil/simapp" + consumerkeeper "github.com/cosmos/interchain-security/x/ccv/consumer/keeper" + consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/cosmos/interchain-security/x/ccv/utils" + + tmtypes "github.com/tendermint/tendermint/types" + + "github.com/stretchr/testify/suite" +) + +var consumerFraction, _ = sdk.NewDecFromStr(consumerkeeper.ConsumerRedistributeFrac) + +type ConsumerDemocracyTestSuite struct { + suite.Suite + + coordinator *ibctesting.Coordinator + + // testing chains + providerChain *ibctesting.TestChain + consumerChain *ibctesting.TestChain + + path *ibctesting.Path + transferPath *ibctesting.Path +} + +func (s *ConsumerDemocracyTestSuite) SetupTest() { + s.coordinator, s.providerChain, s.consumerChain = simapp.NewProviderConsumerDemocracyCoordinator(s.T()) + + // valsets must match + providerValUpdates := tmtypes.TM2PB.ValidatorUpdates(s.providerChain.Vals) + consumerValUpdates := tmtypes.TM2PB.ValidatorUpdates(s.consumerChain.Vals) + s.Require().True(len(providerValUpdates) == len(consumerValUpdates), "initial valset not matching") + for i := 0; i < len(providerValUpdates); i++ { + addr1 := utils.GetChangePubKeyAddress(providerValUpdates[i]) + addr2 := utils.GetChangePubKeyAddress(consumerValUpdates[i]) + s.Require().True(bytes.Equal(addr1, addr2), "validator mismatch") + } + + // move both chains to the next block + s.providerChain.NextBlock() + s.consumerChain.NextBlock() + + // create consumer client on provider chain and set as consumer client for consumer chainID in provider keeper. + err := s.providerChain.App.(*appProvider.App).ProviderKeeper.CreateConsumerClient( + s.providerCtx(), + s.consumerChain.ChainID, + s.consumerChain.LastHeader.GetHeight().(clienttypes.Height), + false, + ) + s.Require().NoError(err) + + // move provider to next block to commit the state + s.providerChain.NextBlock() + + // initialize the consumer chain with the genesis state stored on the provider + consumerGenesis, found := s.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerGenesis( + s.providerCtx(), + s.consumerChain.ChainID, + ) + s.Require().True(found, "consumer genesis not found") + s.consumerChain.App.(*appConsumer.App).ConsumerKeeper.InitGenesis(s.consumerChain.GetContext(), &consumerGenesis) + + // create path for the CCV channel + s.path = ibctesting.NewPath(s.consumerChain, s.providerChain) + + // update CCV path with correct info + // - set provider endpoint's clientID + consumerClient, found := s.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerClientId( + s.providerCtx(), + s.consumerChain.ChainID, + ) + s.Require().True(found, "consumer client not found") + s.path.EndpointB.ClientID = consumerClient + // - set consumer endpoint's clientID + providerClient, found := s.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderClientID(s.consumerChain.GetContext()) + s.Require().True(found, "provider client not found") + s.path.EndpointA.ClientID = providerClient + // - client config + providerUnbondingPeriod := s.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(s.providerCtx()) + s.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = providerUnbondingPeriod + s.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = providerUnbondingPeriod / utils.TrustingPeriodFraction + consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) + s.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = consumerUnbondingPeriod + s.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = consumerUnbondingPeriod / utils.TrustingPeriodFraction + // - channel config + s.path.EndpointA.ChannelConfig.PortID = types.ConsumerPortID + s.path.EndpointB.ChannelConfig.PortID = types.ProviderPortID + s.path.EndpointA.ChannelConfig.Version = types.Version + s.path.EndpointB.ChannelConfig.Version = types.Version + s.path.EndpointA.ChannelConfig.Order = channeltypes.ORDERED + s.path.EndpointB.ChannelConfig.Order = channeltypes.ORDERED + + // set chains sender account number + // TODO: to be fixed in #151 + err = s.path.EndpointB.Chain.SenderAccount.SetAccountNumber(6) + s.Require().NoError(err) + err = s.path.EndpointA.Chain.SenderAccount.SetAccountNumber(0) + s.Require().NoError(err) + + // create path for the transfer channel + s.transferPath = ibctesting.NewPath(s.consumerChain, s.providerChain) + s.transferPath.EndpointA.ChannelConfig.PortID = transfertypes.PortID + s.transferPath.EndpointB.ChannelConfig.PortID = transfertypes.PortID + s.transferPath.EndpointA.ChannelConfig.Version = transfertypes.Version + s.transferPath.EndpointB.ChannelConfig.Version = transfertypes.Version +} + +func (s *ConsumerDemocracyTestSuite) SetupCCVChannel() { + s.StartSetupCCVChannel() + s.CompleteSetupCCVChannel() + s.SetupTransferChannel() +} + +func (s *ConsumerDemocracyTestSuite) StartSetupCCVChannel() { + s.coordinator.CreateConnections(s.path) + + err := s.path.EndpointA.ChanOpenInit() + s.Require().NoError(err) + + err = s.path.EndpointB.ChanOpenTry() + s.Require().NoError(err) +} + +func (s *ConsumerDemocracyTestSuite) CompleteSetupCCVChannel() { + err := s.path.EndpointA.ChanOpenAck() + s.Require().NoError(err) + + err = s.path.EndpointB.ChanOpenConfirm() + s.Require().NoError(err) + + // ensure counterparty is up to date + err = s.path.EndpointA.UpdateClient() + s.Require().NoError(err) +} + +func (s *ConsumerDemocracyTestSuite) SetupTransferChannel() { + // transfer path will use the same connection as ccv path + + s.transferPath.EndpointA.ClientID = s.path.EndpointA.ClientID + s.transferPath.EndpointA.ConnectionID = s.path.EndpointA.ConnectionID + s.transferPath.EndpointB.ClientID = s.path.EndpointB.ClientID + s.transferPath.EndpointB.ConnectionID = s.path.EndpointB.ConnectionID + + // CCV channel handshake will automatically initiate transfer channel handshake on ACK + // so transfer channel will be on stage INIT when CompleteSetupCCVChannel returns. + s.transferPath.EndpointA.ChannelID = s.consumerChain.App.(*appConsumer.App). + ConsumerKeeper.GetDistributionTransmissionChannel(s.consumerChain.GetContext()) + + // Complete TRY, ACK, CONFIRM for transfer path + err := s.transferPath.EndpointB.ChanOpenTry() + s.Require().NoError(err) + + err = s.transferPath.EndpointA.ChanOpenAck() + s.Require().NoError(err) + + err = s.transferPath.EndpointB.ChanOpenConfirm() + s.Require().NoError(err) + + // ensure counterparty is up to date + err = s.transferPath.EndpointA.UpdateClient() + s.Require().NoError(err) +} + +func TestConsumerDemocracyTestSuite(t *testing.T) { + suite.Run(t, new(ConsumerDemocracyTestSuite)) +} + +func (s *ConsumerDemocracyTestSuite) TestDemocracyRewarsDistribution() { + + s.consumerChain.NextBlock() + stakingKeeper := s.consumerChain.App.(*appConsumer.App).StakingKeeper + authKeeper := s.consumerChain.App.(*appConsumer.App).AccountKeeper + distrKeeper := s.consumerChain.App.(*appConsumer.App).DistrKeeper + bankKeeper := s.consumerChain.App.(*appConsumer.App).BankKeeper + bondDenom := stakingKeeper.BondDenom(s.consumerCtx()) + + currentRepresentativesRewards := map[string]sdk.Dec{} + nextRepresentativesRewards := map[string]sdk.Dec{} + representativesTokens := map[string]sdk.Int{} + + for _, representative := range stakingKeeper.GetAllValidators(s.consumerCtx()) { + currentRepresentativesRewards[representative.OperatorAddress] = sdk.NewDec(0) + nextRepresentativesRewards[representative.OperatorAddress] = sdk.NewDec(0) + representativesTokens[representative.OperatorAddress] = representative.GetTokens() + } + + distrModuleAccount := distrKeeper.GetDistributionAccount(s.consumerCtx()) + providerRedistributeAccount := authKeeper.GetModuleAccount(s.consumerCtx(), consumertypes.ConsumerToSendToProviderName) + //balance of consumer redistribute address will always be 0 when checked between 2 NextBlock() calls + + currentDistrModuleAccountBalance := sdk.NewDecFromInt(bankKeeper.GetBalance(s.consumerCtx(), distrModuleAccount.GetAddress(), bondDenom).Amount) + currentProviderFeeAccountBalance := sdk.NewDecFromInt(bankKeeper.GetBalance(s.consumerCtx(), providerRedistributeAccount.GetAddress(), bondDenom).Amount) + currentCommunityPoolBalance := distrKeeper.GetFeePoolCommunityCoins(s.consumerCtx()).AmountOf(bondDenom) + for key := range currentRepresentativesRewards { + representativeAddr, _ := sdk.ValAddressFromBech32(key) + representativeReward := distrKeeper.GetValidatorOutstandingRewards(s.consumerCtx(), representativeAddr).Rewards.AmountOf(bondDenom) + currentRepresentativesRewards[key] = representativeReward + } + + s.consumerChain.NextBlock() + + nextDistrModuleAccountBalance := sdk.NewDecFromInt(bankKeeper.GetBalance(s.consumerCtx(), distrModuleAccount.GetAddress(), bondDenom).Amount) + nextProviderFeeAccountBalance := sdk.NewDecFromInt(bankKeeper.GetBalance(s.consumerCtx(), providerRedistributeAccount.GetAddress(), bondDenom).Amount) + nextCommunityPoolBalance := distrKeeper.GetFeePoolCommunityCoins(s.consumerCtx()).AmountOf(bondDenom) + for key := range nextRepresentativesRewards { + representativeAddr, _ := sdk.ValAddressFromBech32(key) + representativeReward := distrKeeper.GetValidatorOutstandingRewards(s.consumerCtx(), representativeAddr).Rewards.AmountOf(bondDenom) + nextRepresentativesRewards[key] = representativeReward + } + + distrModuleDifference := nextDistrModuleAccountBalance.Sub(currentDistrModuleAccountBalance) + providerDifference := nextProviderFeeAccountBalance.Sub(currentProviderFeeAccountBalance) + communityPoolDifference := nextCommunityPoolBalance.Sub(currentCommunityPoolBalance) + representativeDifference := map[string]sdk.Dec{} + consumerRedistributeDifference := communityPoolDifference + + for key, currentReward := range currentRepresentativesRewards { + representativeDifference[key] = nextRepresentativesRewards[key].Sub(currentReward) + consumerRedistributeDifference = consumerRedistributeDifference.Add(representativeDifference[key]) + } + + //confirm that the total amount given to the community pool plus all representatives is equal to the total amount taken out of distribution + s.Require().Equal(distrModuleDifference, consumerRedistributeDifference) + //confirm that the percentage given to the community pool is equal to the configured community tax percentage. + s.Require().Equal(communityPoolDifference.Quo(consumerRedistributeDifference), distrKeeper.GetCommunityTax(s.consumerCtx())) + //check that the fraction actually kept by the consumer is the correct fraction. using InEpsilon because the math code uses truncations + s.Require().InEpsilon(distrModuleDifference.Quo(providerDifference.Add(distrModuleDifference)).MustFloat64(), consumerFraction.MustFloat64(), float64(0.0001)) + //check that the fraction actually kept by the provider is the correct fraction. using InEpsilon because the math code uses truncations + s.Require().InEpsilon(providerDifference.Quo(providerDifference.Add(distrModuleDifference)).MustFloat64(), sdk.NewDec(1).Sub(consumerFraction).MustFloat64(), float64(0.0001)) + + totalRepresentativePower := stakingKeeper.GetValidatorSet().TotalBondedTokens(s.consumerCtx()) + + //check that each representative has gotten the correct amount of rewards + for key, representativeTokens := range representativesTokens { + powerFraction := sdk.NewDecFromInt(representativeTokens).QuoTruncate(sdk.NewDecFromInt(totalRepresentativePower)) + s.Require().Equal(powerFraction, representativeDifference[key].Quo(consumerRedistributeDifference.Sub(communityPoolDifference))) + } +} + +func (s *ConsumerDemocracyTestSuite) providerCtx() sdk.Context { + return s.providerChain.GetContext() +} + +func (s *ConsumerDemocracyTestSuite) consumerCtx() sdk.Context { + return s.consumerChain.GetContext() +} diff --git a/tests/e2e/distribution_test.go b/tests/e2e/distribution_test.go index 32ccb9824b..5eb7b12b77 100644 --- a/tests/e2e/distribution_test.go +++ b/tests/e2e/distribution_test.go @@ -14,10 +14,11 @@ import ( ) //This test is valid for minimal viable consumer chain -func (s *ProviderTestSuite) TestRewardsDistribution() { +func (s *CCVTestSuite) TestRewardsDistribution() { //set up channel and delegate some tokens in order for validator set update to be sent to the consumer chain s.SetupCCVChannel() + s.SetupTransferChannel() bondAmt := sdk.NewInt(10000000) delAddr := s.providerChain.SenderAccount.GetAddress() delegate(s, delAddr, bondAmt) diff --git a/tests/e2e/normal_operations_test.go b/tests/e2e/normal_operations_test.go index 4acd881563..1c90efaf50 100644 --- a/tests/e2e/normal_operations_test.go +++ b/tests/e2e/normal_operations_test.go @@ -8,7 +8,7 @@ import ( ) // Tests the tracking of historical info in the context of new blocks being committed -func (k ConsumerKeeperTestSuite) TestTrackHistoricalInfo() { +func (k CCVTestSuite) TestTrackHistoricalInfo() { consumerKeeper := k.consumerChain.App.(*appConsumer.App).ConsumerKeeper cCtx := k.consumerChain.GetContext @@ -19,7 +19,7 @@ func (k ConsumerKeeperTestSuite) TestTrackHistoricalInfo() { // define an utility function that creates a new cross-chain validator // and then call track historical info in the next block - createVal := func(k ConsumerKeeperTestSuite) { + createVal := func(k CCVTestSuite) { // add new validator to consumer states pk := ed25519.GenPrivKey().PubKey() cVal, err := types.NewCCValidator(pk.Address(), int64(1), pk) @@ -35,10 +35,10 @@ func (k ConsumerKeeperTestSuite) TestTrackHistoricalInfo() { // increased by HistoricalEntries in order to prune the historical info less or equal to the current block height // Note that historical info containing the created validators are stored during the next block BeginBlocker // and thus are indexed with the respective block heights InitHeight+1 and InitHeight+2 - testSetup := []func(ConsumerKeeperTestSuite){ + testSetup := []func(CCVTestSuite){ createVal, createVal, - func(k ConsumerKeeperTestSuite) { + func(k CCVTestSuite) { newHeight := k.consumerChain.GetContext().BlockHeight() + int64(types.HistoricalEntries) header := tmproto.Header{ ChainID: "HelloChain", diff --git a/tests/e2e/setup_test.go b/tests/e2e/setup_test.go index 5e06f0cd8a..3fd0522729 100644 --- a/tests/e2e/setup_test.go +++ b/tests/e2e/setup_test.go @@ -6,7 +6,6 @@ import ( "bytes" "testing" - sdk "github.com/cosmos/cosmos-sdk/types" ccv "github.com/cosmos/interchain-security/x/ccv/types" "github.com/cosmos/interchain-security/x/ccv/utils" @@ -24,24 +23,22 @@ import ( "github.com/stretchr/testify/suite" ) -type ProviderTestSuite struct { +type CCVTestSuite struct { suite.Suite - - coordinator *ibctesting.Coordinator - - // testing chains - providerChain *ibctesting.TestChain - consumerChain *ibctesting.TestChain - - path *ibctesting.Path - transferPath *ibctesting.Path + coordinator *ibctesting.Coordinator + providerChain *ibctesting.TestChain + consumerChain *ibctesting.TestChain + providerClient *ibctmtypes.ClientState + providerConsState *ibctmtypes.ConsensusState + path *ibctesting.Path + transferPath *ibctesting.Path } -func TestProviderTestSuite(t *testing.T) { - suite.Run(t, new(ProviderTestSuite)) +func TestCCVTestSuite(t *testing.T) { + suite.Run(t, new(CCVTestSuite)) } -func (suite *ProviderTestSuite) SetupTest() { +func (suite *CCVTestSuite) SetupTest() { suite.coordinator, suite.providerChain, suite.consumerChain = simapp.NewProviderConsumerCoordinator(suite.T()) // valsets must match @@ -76,6 +73,8 @@ func (suite *ProviderTestSuite) SetupTest() { ) suite.Require().True(found, "consumer genesis not found") suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.InitGenesis(suite.consumerChain.GetContext(), &consumerGenesis) + suite.providerClient = consumerGenesis.ProviderClientState + suite.providerConsState = consumerGenesis.ProviderConsensusState // create path for the CCV channel suite.path = ibctesting.NewPath(suite.consumerChain, suite.providerChain) @@ -86,6 +85,7 @@ func (suite *ProviderTestSuite) SetupTest() { suite.providerCtx(), suite.consumerChain.ChainID, ) + suite.Require().True(found, "consumer client not found") suite.path.EndpointB.ClientID = consumerClient // - set consumer endpoint's clientID @@ -122,13 +122,12 @@ func (suite *ProviderTestSuite) SetupTest() { suite.transferPath.EndpointB.ChannelConfig.Version = transfertypes.Version } -func (suite *ProviderTestSuite) SetupCCVChannel() { +func (suite *CCVTestSuite) SetupCCVChannel() { suite.StartSetupCCVChannel() suite.CompleteSetupCCVChannel() - suite.SetupTransferChannel() } -func (suite *ProviderTestSuite) StartSetupCCVChannel() { +func (suite *CCVTestSuite) StartSetupCCVChannel() { suite.coordinator.CreateConnections(suite.path) err := suite.path.EndpointA.ChanOpenInit() @@ -138,7 +137,7 @@ func (suite *ProviderTestSuite) StartSetupCCVChannel() { suite.Require().NoError(err) } -func (suite *ProviderTestSuite) CompleteSetupCCVChannel() { +func (suite *CCVTestSuite) CompleteSetupCCVChannel() { err := suite.path.EndpointA.ChanOpenAck() suite.Require().NoError(err) @@ -150,7 +149,7 @@ func (suite *ProviderTestSuite) CompleteSetupCCVChannel() { suite.Require().NoError(err) } -func (suite *ProviderTestSuite) SetupTransferChannel() { +func (suite *CCVTestSuite) SetupTransferChannel() { // transfer path will use the same connection as ccv path suite.transferPath.EndpointA.ClientID = suite.path.EndpointA.ClientID @@ -177,296 +176,3 @@ func (suite *ProviderTestSuite) SetupTransferChannel() { err = suite.transferPath.EndpointA.UpdateClient() suite.Require().NoError(err) } - -// TODO: Can this be consolidated with ProviderTestSuite above? -type ProviderKeeperTestSuite struct { - suite.Suite - coordinator *ibctesting.Coordinator - - // testing chains - providerChain *ibctesting.TestChain - consumerChain *ibctesting.TestChain - path *ibctesting.Path - ctx sdk.Context -} - -func TestProviderKeeperTestSuite(t *testing.T) { - suite.Run(t, new(ProviderKeeperTestSuite)) -} - -func (suite *ProviderKeeperTestSuite) SetupTest() { - suite.coordinator, suite.providerChain, suite.consumerChain = simapp.NewProviderConsumerCoordinator(suite.T()) - - // valsets must match - providerValUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.providerChain.Vals) - consumerValUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.consumerChain.Vals) - suite.Require().True(len(providerValUpdates) == len(consumerValUpdates), "initial valset not matching") - for i := 0; i < len(providerValUpdates); i++ { - addr1 := utils.GetChangePubKeyAddress(providerValUpdates[i]) - addr2 := utils.GetChangePubKeyAddress(consumerValUpdates[i]) - suite.Require().True(bytes.Equal(addr1, addr2), "validator mismatch") - } - - // move both chains to the next block - suite.providerChain.NextBlock() - suite.consumerChain.NextBlock() - - // create consumer client on provider chain and set as consumer client for consumer chainID in provider keeper. - err := suite.providerChain.App.(*appProvider.App).ProviderKeeper.CreateConsumerClient( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - suite.consumerChain.LastHeader.GetHeight().(clienttypes.Height), - false, - ) - suite.Require().NoError(err) - // move provider to next block to commit the state - suite.providerChain.NextBlock() - - // initialize the consumer chain with the genesis state stored on the provider - consumerGenesis, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerGenesis( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - ) - suite.Require().True(found, "consumer genesis not found") - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.InitGenesis(suite.consumerChain.GetContext(), &consumerGenesis) - - // create path for the CCV channel - suite.path = ibctesting.NewPath(suite.consumerChain, suite.providerChain) - - // update CCV path with correct info - // - set provider endpoint's clientID - consumerClient, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerClientId( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - ) - suite.Require().True(found, "consumer client not found") - suite.path.EndpointB.ClientID = consumerClient - // - set consumer endpoint's clientID - providerClient, found := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderClientID(suite.consumerChain.GetContext()) - suite.Require().True(found, "provider client not found") - suite.path.EndpointA.ClientID = providerClient - // - client config - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - suite.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = providerUnbondingPeriod - suite.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = providerUnbondingPeriod / utils.TrustingPeriodFraction - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - suite.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = consumerUnbondingPeriod - suite.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = consumerUnbondingPeriod / utils.TrustingPeriodFraction - // - channel config - suite.path.EndpointA.ChannelConfig.PortID = ccv.ConsumerPortID - suite.path.EndpointB.ChannelConfig.PortID = ccv.ProviderPortID - suite.path.EndpointA.ChannelConfig.Version = ccv.Version - suite.path.EndpointB.ChannelConfig.Version = ccv.Version - suite.path.EndpointA.ChannelConfig.Order = channeltypes.ORDERED - suite.path.EndpointB.ChannelConfig.Order = channeltypes.ORDERED - - // set chains sender account number - // TODO: to be fixed in #151 - err = suite.path.EndpointB.Chain.SenderAccount.SetAccountNumber(6) - suite.Require().NoError(err) - err = suite.path.EndpointA.Chain.SenderAccount.SetAccountNumber(1) - suite.Require().NoError(err) - - suite.ctx = suite.providerChain.GetContext() -} - -type ConsumerTestSuite struct { - suite.Suite - - coordinator *ibctesting.Coordinator - - // testing chains - providerChain *ibctesting.TestChain - consumerChain *ibctesting.TestChain - - path *ibctesting.Path - - ctx sdk.Context -} - -func TestConsumerTestSuite(t *testing.T) { - suite.Run(t, new(ConsumerTestSuite)) -} - -func (suite *ConsumerTestSuite) SetupTest() { - suite.coordinator, suite.providerChain, suite.consumerChain = simapp.NewProviderConsumerCoordinator(suite.T()) - - // valsets must match - providerValUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.providerChain.Vals) - consumerValUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.consumerChain.Vals) - suite.Require().True(len(providerValUpdates) == len(consumerValUpdates), "initial valset not matching") - for i := 0; i < len(providerValUpdates); i++ { - addr1 := utils.GetChangePubKeyAddress(providerValUpdates[i]) - addr2 := utils.GetChangePubKeyAddress(consumerValUpdates[i]) - suite.Require().True(bytes.Equal(addr1, addr2), "validator mismatch") - } - - // move both chains to the next block - suite.providerChain.NextBlock() - suite.consumerChain.NextBlock() - - // create consumer client on provider chain and set as consumer client for consumer chainID in provider keeper. - err := suite.providerChain.App.(*appProvider.App).ProviderKeeper.CreateConsumerClient( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - suite.consumerChain.LastHeader.GetHeight().(clienttypes.Height), - false, - ) - suite.Require().NoError(err) - // move provider to next block to commit the state - suite.providerChain.NextBlock() - - // initialize the consumer chain with the genesis state stored on the provider - consumerGenesis, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerGenesis( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - ) - suite.Require().True(found, "consumer genesis not found") - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.InitGenesis(suite.consumerChain.GetContext(), &consumerGenesis) - - // create path for the CCV channel - suite.path = ibctesting.NewPath(suite.consumerChain, suite.providerChain) - - // update CCV path with correct info - // - set provider endpoint's clientID - consumerClient, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerClientId( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - ) - suite.Require().True(found, "consumer client not found") - suite.path.EndpointB.ClientID = consumerClient - // - set consumer endpoint's clientID - providerClient, found := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderClientID(suite.consumerChain.GetContext()) - suite.Require().True(found, "provider client not found") - suite.path.EndpointA.ClientID = providerClient - // - client config - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - suite.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = providerUnbondingPeriod - suite.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = providerUnbondingPeriod / utils.TrustingPeriodFraction - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - suite.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = consumerUnbondingPeriod - suite.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = consumerUnbondingPeriod / utils.TrustingPeriodFraction - // - channel config - suite.path.EndpointA.ChannelConfig.PortID = ccv.ConsumerPortID - suite.path.EndpointB.ChannelConfig.PortID = ccv.ProviderPortID - suite.path.EndpointA.ChannelConfig.Version = ccv.Version - suite.path.EndpointB.ChannelConfig.Version = ccv.Version - suite.path.EndpointA.ChannelConfig.Order = channeltypes.ORDERED - suite.path.EndpointB.ChannelConfig.Order = channeltypes.ORDERED - - // set chains sender account number - // TODO: to be fixed in #151 - err = suite.path.EndpointB.Chain.SenderAccount.SetAccountNumber(6) - suite.Require().NoError(err) - err = suite.path.EndpointA.Chain.SenderAccount.SetAccountNumber(1) - suite.Require().NoError(err) - - suite.ctx = suite.consumerChain.GetContext() - - suite.coordinator.CreateConnections(suite.path) -} - -// TODO: Can this be consolidated with ConsumerTestSuite above? -type ConsumerKeeperTestSuite struct { - suite.Suite - - coordinator *ibctesting.Coordinator - - // testing chains - providerChain *ibctesting.TestChain - consumerChain *ibctesting.TestChain - - providerClient *ibctmtypes.ClientState - providerConsState *ibctmtypes.ConsensusState - - path *ibctesting.Path - - ctx sdk.Context -} - -func TestConsumerKeeperTestSuite(t *testing.T) { - suite.Run(t, new(ConsumerKeeperTestSuite)) -} - -func (suite *ConsumerKeeperTestSuite) SetupTest() { - suite.coordinator, suite.providerChain, suite.consumerChain = simapp.NewProviderConsumerCoordinator(suite.T()) - - // valsets must match - providerValUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.providerChain.Vals) - consumerValUpdates := tmtypes.TM2PB.ValidatorUpdates(suite.consumerChain.Vals) - suite.Require().True(len(providerValUpdates) == len(consumerValUpdates), "initial valset not matching") - for i := 0; i < len(providerValUpdates); i++ { - addr1 := utils.GetChangePubKeyAddress(providerValUpdates[i]) - addr2 := utils.GetChangePubKeyAddress(consumerValUpdates[i]) - suite.Require().True(bytes.Equal(addr1, addr2), "validator mismatch") - } - - // move both chains to the next block - suite.providerChain.NextBlock() - suite.consumerChain.NextBlock() - - // create consumer client on provider chain and set as consumer client for consumer chainID in provider keeper. - err := suite.providerChain.App.(*appProvider.App).ProviderKeeper.CreateConsumerClient( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - suite.consumerChain.LastHeader.GetHeight().(clienttypes.Height), - false, - ) - suite.Require().NoError(err) - // move provider to next block to commit the state - suite.providerChain.NextBlock() - - // initialize the consumer chain with the genesis state stored on the provider - consumerGenesis, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerGenesis( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - ) - suite.Require().True(found, "consumer genesis not found") - suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.InitGenesis(suite.consumerChain.GetContext(), &consumerGenesis) - suite.providerClient = consumerGenesis.ProviderClientState - suite.providerConsState = consumerGenesis.ProviderConsensusState - - // create path for the CCV channel - suite.path = ibctesting.NewPath(suite.consumerChain, suite.providerChain) - - // update CCV path with correct info - // - set provider endpoint's clientID - consumerClient, found := suite.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerClientId( - suite.providerChain.GetContext(), - suite.consumerChain.ChainID, - ) - suite.Require().True(found, "consumer client not found") - suite.path.EndpointB.ClientID = consumerClient - // - set consumer endpoint's clientID - providerClient, found := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderClientID(suite.consumerChain.GetContext()) - suite.Require().True(found, "provider client not found") - suite.path.EndpointA.ClientID = providerClient - // - client config - providerUnbondingPeriod := suite.providerChain.App.(*appProvider.App).GetStakingKeeper().UnbondingTime(suite.providerChain.GetContext()) - suite.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = providerUnbondingPeriod - suite.path.EndpointB.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = providerUnbondingPeriod / utils.TrustingPeriodFraction - consumerUnbondingPeriod := utils.ComputeConsumerUnbondingPeriod(providerUnbondingPeriod) - suite.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).UnbondingPeriod = consumerUnbondingPeriod - suite.path.EndpointA.ClientConfig.(*ibctesting.TendermintConfig).TrustingPeriod = consumerUnbondingPeriod / utils.TrustingPeriodFraction - // - channel config - suite.path.EndpointA.ChannelConfig.PortID = ccv.ConsumerPortID - suite.path.EndpointB.ChannelConfig.PortID = ccv.ProviderPortID - suite.path.EndpointA.ChannelConfig.Version = ccv.Version - suite.path.EndpointB.ChannelConfig.Version = ccv.Version - suite.path.EndpointA.ChannelConfig.Order = channeltypes.ORDERED - suite.path.EndpointB.ChannelConfig.Order = channeltypes.ORDERED - - // set chains sender account number - // TODO: to be fixed in #151 - err = suite.path.EndpointB.Chain.SenderAccount.SetAccountNumber(6) - suite.Require().NoError(err) - err = suite.path.EndpointA.Chain.SenderAccount.SetAccountNumber(1) - suite.Require().NoError(err) - - suite.ctx = suite.consumerChain.GetContext() -} - -func (suite *ConsumerKeeperTestSuite) SetupCCVChannel() { - suite.coordinator.CreateConnections(suite.path) - suite.coordinator.CreateChannels(suite.path) -} diff --git a/tests/e2e/slashing_test.go b/tests/e2e/slashing_test.go index bf861d99af..1e7e5fd6af 100644 --- a/tests/e2e/slashing_test.go +++ b/tests/e2e/slashing_test.go @@ -23,8 +23,9 @@ import ( ) // TestSendDowntimePacket tests consumer initiated slashing -func (s *ProviderTestSuite) TestSendSlashPacketDowntime() { +func (s *CCVTestSuite) TestSendSlashPacketDowntime() { s.SetupCCVChannel() + s.SetupTransferChannel() validatorsPerChain := len(s.consumerChain.Vals.Validators) providerStakingKeeper := s.providerChain.App.(*appProvider.App).StakingKeeper @@ -143,8 +144,9 @@ func (s *ProviderTestSuite) TestSendSlashPacketDowntime() { s.Require().NoError(err) } -func (s *ProviderTestSuite) TestSendSlashPacketDoubleSign() { +func (s *CCVTestSuite) TestSendSlashPacketDoubleSign() { s.SetupCCVChannel() + s.SetupTransferChannel() validatorsPerChain := len(s.consumerChain.Vals.Validators) providerStakingKeeper := s.providerChain.App.(*appProvider.App).StakingKeeper @@ -253,11 +255,12 @@ func (s *ProviderTestSuite) TestSendSlashPacketDoubleSign() { s.Require().True(valSignInfo.JailedUntil.Equal(evidencetypes.DoubleSignJailEndTime)) } -func (s *ProviderTestSuite) TestSlashPacketAcknowldgement() { +func (s *CCVTestSuite) TestSlashPacketAcknowldgement() { providerKeeper := s.providerChain.App.(*appProvider.App).ProviderKeeper consumerKeeper := s.consumerChain.App.(*appConsumer.App).ConsumerKeeper s.SetupCCVChannel() + s.SetupTransferChannel() packet := channeltypes.NewPacket([]byte{}, 1, ccv.ConsumerPortID, s.path.EndpointA.ChannelID, ccv.ProviderPortID, s.path.EndpointB.ChannelID, clienttypes.Height{}, 0) @@ -273,7 +276,7 @@ func (s *ProviderTestSuite) TestSlashPacketAcknowldgement() { } // TestHandleSlashPacketDoubleSigning tests the handling of a double-signing related slash packet, with e2e tests -func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketDoubleSigning() { +func (suite *CCVTestSuite) TestHandleSlashPacketDoubleSigning() { providerKeeper := suite.providerChain.App.(*appProvider.App).ProviderKeeper providerSlashingKeeper := suite.providerChain.App.(*appProvider.App).SlashingKeeper providerStakingKeeper := suite.providerChain.App.(*appProvider.App).StakingKeeper @@ -282,21 +285,21 @@ func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketDoubleSigning() { consAddr := sdk.ConsAddress(tmVal.Address) // check that validator bonded status - validator, found := providerStakingKeeper.GetValidatorByConsAddr(suite.ctx, consAddr) + validator, found := providerStakingKeeper.GetValidatorByConsAddr(suite.providerCtx(), consAddr) suite.Require().True(found) suite.Require().Equal(stakingtypes.Bonded, validator.GetStatus()) // set init VSC id for chain0 - providerKeeper.SetInitChainHeight(suite.ctx, suite.consumerChain.ChainID, uint64(suite.ctx.BlockHeight())) + providerKeeper.SetInitChainHeight(suite.providerCtx(), suite.consumerChain.ChainID, uint64(suite.providerCtx().BlockHeight())) // set validator signing-info providerSlashingKeeper.SetValidatorSigningInfo( - suite.ctx, + suite.providerCtx(), consAddr, slashingtypes.ValidatorSigningInfo{Address: consAddr.String()}, ) - _, err := providerKeeper.HandleSlashPacket(suite.ctx, suite.consumerChain.ChainID, + _, err := providerKeeper.HandleSlashPacket(suite.providerCtx(), suite.consumerChain.ChainID, ccv.NewSlashPacketData( abci.Validator{Address: tmVal.Address, Power: 0}, uint64(0), @@ -306,35 +309,35 @@ func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketDoubleSigning() { suite.NoError(err) // verify that validator is jailed in the staking and slashing mdodules' states - suite.Require().True(providerStakingKeeper.IsValidatorJailed(suite.ctx, consAddr)) + suite.Require().True(providerStakingKeeper.IsValidatorJailed(suite.providerCtx(), consAddr)) - signingInfo, _ := providerSlashingKeeper.GetValidatorSigningInfo(suite.ctx, consAddr) + signingInfo, _ := providerSlashingKeeper.GetValidatorSigningInfo(suite.providerCtx(), consAddr) suite.Require().True(signingInfo.JailedUntil.Equal(evidencetypes.DoubleSignJailEndTime)) suite.Require().True(signingInfo.Tombstoned) } // TestHandleSlashPacketErrors tests errors for the HandleSlashPacket method in an e2e testing setting -func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketErrors() { +func (suite *CCVTestSuite) TestHandleSlashPacketErrors() { providerStakingKeeper := suite.providerChain.App.(*appProvider.App).StakingKeeper ProviderKeeper := suite.providerChain.App.(*appProvider.App).ProviderKeeper providerSlashingKeeper := suite.providerChain.App.(*appProvider.App).SlashingKeeper consumerChainID := suite.consumerChain.ChainID // sync contexts block height - suite.ctx = suite.providerChain.GetContext() + ctx := suite.providerCtx() // expect an error if initial block height isn't set for consumer chain - _, err := ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, ccv.SlashPacketData{}) + _, err := ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, ccv.SlashPacketData{}) suite.Require().Error(err, "slash validator with invalid infraction height") // save VSC ID - vID := ProviderKeeper.GetValidatorSetUpdateId(suite.ctx) + vID := ProviderKeeper.GetValidatorSetUpdateId(ctx) // remove block height for current VSC ID - ProviderKeeper.DeleteValsetUpdateBlockHeight(suite.ctx, vID) + ProviderKeeper.DeleteValsetUpdateBlockHeight(ctx, vID) // expect an error if block height mapping VSC ID is zero - _, err = ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, ccv.SlashPacketData{ValsetUpdateId: vID}) + _, err = ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, ccv.SlashPacketData{ValsetUpdateId: vID}) suite.Require().Error(err, "slash with height mapping to zero") // construct slashing packet with non existing validator @@ -344,34 +347,34 @@ func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketErrors() { ) // Set initial block height for consumer chain - ProviderKeeper.SetInitChainHeight(suite.ctx, consumerChainID, uint64(suite.ctx.BlockHeight())) + ProviderKeeper.SetInitChainHeight(ctx, consumerChainID, uint64(ctx.BlockHeight())) // expect the slash to not succeed if validator doesn't exist - success, err := ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, slashingPkt) + success, err := ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, slashingPkt) suite.Require().NoError(err, "slashing an unknown validator should not result in error") suite.Require().False(success, "did slash unknown validator") // jail an existing validator val := suite.providerChain.Vals.Validators[0] consAddr := sdk.ConsAddress(val.Address) - providerStakingKeeper.Jail(suite.ctx, consAddr) + providerStakingKeeper.Jail(ctx, consAddr) // commit block to set VSC ID suite.coordinator.CommitBlock(suite.providerChain) // Update suite.ctx bc CommitBlock updates only providerChain's current header block height - suite.ctx = suite.providerChain.GetContext() - suite.Require().NotZero(ProviderKeeper.GetValsetUpdateBlockHeight(suite.ctx, vID)) + ctx = suite.providerChain.GetContext() + suite.Require().NotZero(ProviderKeeper.GetValsetUpdateBlockHeight(ctx, vID)) // create validator signing info - valInfo := slashingtypes.NewValidatorSigningInfo(sdk.ConsAddress(val.Address), suite.ctx.BlockHeight(), - suite.ctx.BlockHeight()-1, time.Time{}.UTC(), false, int64(0)) - providerSlashingKeeper.SetValidatorSigningInfo(suite.ctx, sdk.ConsAddress(val.Address), valInfo) + valInfo := slashingtypes.NewValidatorSigningInfo(sdk.ConsAddress(val.Address), ctx.BlockHeight(), + ctx.BlockHeight()-1, time.Time{}.UTC(), false, int64(0)) + providerSlashingKeeper.SetValidatorSigningInfo(ctx, sdk.ConsAddress(val.Address), valInfo) // update validator address and VSC ID slashingPkt.Validator.Address = val.Address slashingPkt.ValsetUpdateId = vID // expect to slash and jail validator - _, err = ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, slashingPkt) + _, err = ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, slashingPkt) suite.Require().NoError(err, "did slash jail validator") // expect error when infraction type in unspecified @@ -380,25 +383,25 @@ func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketErrors() { slashingPkt.Infraction = stakingtypes.InfractionEmpty valInfo.Address = sdk.ConsAddress(tmAddr).String() - providerSlashingKeeper.SetValidatorSigningInfo(suite.ctx, sdk.ConsAddress(tmAddr), valInfo) + providerSlashingKeeper.SetValidatorSigningInfo(ctx, sdk.ConsAddress(tmAddr), valInfo) - _, err = ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, slashingPkt) + _, err = ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, slashingPkt) suite.Require().EqualError(err, fmt.Sprintf("invalid infraction type: %v", stakingtypes.InfractionEmpty)) // expect to slash jail validator slashingPkt.Infraction = stakingtypes.DoubleSign - _, err = ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, slashingPkt) + _, err = ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, slashingPkt) suite.Require().NoError(err) // expect the slash to not succeed when validator is tombstoned - success, _ = ProviderKeeper.HandleSlashPacket(suite.ctx, consumerChainID, slashingPkt) + success, _ = ProviderKeeper.HandleSlashPacket(ctx, consumerChainID, slashingPkt) suite.Require().False(success) } // TestHandleSlashPacketDistribution tests the slashing of an undelegation balance // by varying the slash packet VSC ID mapping to infraction heights // lesser, equal or greater than the undelegation entry creation height -func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketDistribution() { +func (suite *CCVTestSuite) TestHandleSlashPacketDistribution() { providerStakingKeeper := suite.providerChain.App.(*appProvider.App).StakingKeeper providerKeeper := suite.providerChain.App.(*appProvider.App).ProviderKeeper @@ -419,20 +422,20 @@ func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketDistribution() { // setup the test with a delegation, a no-op and an undelegation setupOperations := []struct { - fn func(suite *ProviderKeeperTestSuite) error + fn func(suite *CCVTestSuite) error }{ { - func(suite *ProviderKeeperTestSuite) error { + func(suite *CCVTestSuite) error { testShares, err = providerStakingKeeper.Delegate(suite.providerChain.GetContext(), delAddr, bondAmt, stakingtypes.Unbonded, stakingtypes.Validator(validator), true) return err }, }, { - func(suite *ProviderKeeperTestSuite) error { + func(suite *CCVTestSuite) error { return nil }, }, { // undelegate a quarter of the new shares created - func(suite *ProviderKeeperTestSuite) error { + func(suite *CCVTestSuite) error { _, err = providerStakingKeeper.Undelegate(suite.providerChain.GetContext(), delAddr, valAddr, testShares.QuoInt64(4)) return err }, @@ -499,81 +502,84 @@ func (suite *ProviderKeeperTestSuite) TestHandleSlashPacketDistribution() { // TestValidatorDowntime tests if a slash packet is sent // and if the outstanding slashing flag is switched // when a validator has downtime on the slashing module -func (suite *ConsumerKeeperTestSuite) TestValidatorDowntime() { +func (suite *CCVTestSuite) TestValidatorDowntime() { // initial setup suite.SetupCCVChannel() suite.SendEmptyVSCPacket() // sync suite context after CCV channel is established - suite.ctx = suite.consumerChain.GetContext() + ctx := suite.consumerCtx() app := suite.consumerChain.App.(*appConsumer.App) channelID := suite.path.EndpointA.ChannelID // pick a cross-chain validator - vals := app.ConsumerKeeper.GetAllCCValidator(suite.ctx) + vals := app.ConsumerKeeper.GetAllCCValidator(ctx) consAddr := sdk.ConsAddress(vals[0].Address) // save next sequence before sending a slash packet - seq, ok := app.GetIBCKeeper().ChannelKeeper.GetNextSequenceSend(suite.ctx, ccv.ConsumerPortID, channelID) + seq, ok := app.GetIBCKeeper().ChannelKeeper.GetNextSequenceSend(ctx, ccv.ConsumerPortID, channelID) suite.Require().True(ok) // Sign 100 blocks valPower := int64(1) - height, signedBlocksWindow := int64(0), app.SlashingKeeper.SignedBlocksWindow(suite.ctx) + height, signedBlocksWindow := int64(0), app.SlashingKeeper.SignedBlocksWindow(ctx) for ; height < signedBlocksWindow; height++ { - suite.ctx = suite.ctx.WithBlockHeight(height) - app.SlashingKeeper.HandleValidatorSignature(suite.ctx, vals[0].Address, valPower, true) + ctx = ctx.WithBlockHeight(height) + app.SlashingKeeper.HandleValidatorSignature(ctx, vals[0].Address, valPower, true) } - missedBlockThreshold := (2 * signedBlocksWindow) - app.SlashingKeeper.MinSignedPerWindow(suite.ctx) + missedBlockThreshold := (2 * signedBlocksWindow) - app.SlashingKeeper.MinSignedPerWindow(ctx) + ctx = suite.consumerCtx() // construct slash packet to be sent and get its commit packetData := ccv.NewSlashPacketData( abci.Validator{Address: vals[0].Address, Power: valPower}, // get the VSC ID mapping the infraction height - app.ConsumerKeeper.GetHeightValsetUpdateID(suite.ctx, uint64(missedBlockThreshold-sdk.ValidatorUpdateDelay-1)), + app.ConsumerKeeper.GetHeightValsetUpdateID(ctx, uint64(missedBlockThreshold-sdk.ValidatorUpdateDelay-1)), stakingtypes.Downtime, ) - expCommit := suite.commitSlashPacket(suite.ctx, packetData) + expCommit := suite.commitSlashPacket(ctx, packetData) // Miss 50 blocks and expect a slash packet to be sent for ; height <= missedBlockThreshold; height++ { - suite.ctx = suite.ctx.WithBlockHeight(height) - app.SlashingKeeper.HandleValidatorSignature(suite.ctx, vals[0].Address, valPower, false) + ctx = ctx.WithBlockHeight(height) + app.SlashingKeeper.HandleValidatorSignature(ctx, vals[0].Address, valPower, false) } + ctx = suite.consumerCtx() + // check validator signing info - res, _ := app.SlashingKeeper.GetValidatorSigningInfo(suite.ctx, consAddr) + res, _ := app.SlashingKeeper.GetValidatorSigningInfo(ctx, consAddr) // expect increased jail time - suite.Require().True(res.JailedUntil.Equal(suite.ctx.BlockTime().Add(app.SlashingKeeper.DowntimeJailDuration(suite.ctx))), "did not update validator jailed until signing info") + suite.Require().True(res.JailedUntil.Equal(ctx.BlockTime().Add(app.SlashingKeeper.DowntimeJailDuration(ctx))), "did not update validator jailed until signing info") // expect missed block counters reseted suite.Require().Zero(res.MissedBlocksCounter, "did not reset validator missed block counter") suite.Require().Zero(res.IndexOffset) - app.SlashingKeeper.IterateValidatorMissedBlockBitArray(suite.ctx, consAddr, func(_ int64, missed bool) bool { + app.SlashingKeeper.IterateValidatorMissedBlockBitArray(ctx, consAddr, func(_ int64, missed bool) bool { suite.Require().True(missed) return false }) // verify that the slash packet was sent - gotCommit := app.IBCKeeper.ChannelKeeper.GetPacketCommitment(suite.ctx, ccv.ConsumerPortID, channelID, seq) + gotCommit := app.IBCKeeper.ChannelKeeper.GetPacketCommitment(ctx, ccv.ConsumerPortID, channelID, seq) suite.Require().NotNil(gotCommit, "did not found slash packet commitment") suite.Require().EqualValues(expCommit, gotCommit, "invalid slash packet commitment") // verify that the slash packet was sent - suite.Require().True(app.ConsumerKeeper.OutstandingDowntime(suite.ctx, consAddr)) + suite.Require().True(app.ConsumerKeeper.OutstandingDowntime(ctx, consAddr)) // check that the outstanding slashing flag prevents the jailed validator to keep missing block for ; height < missedBlockThreshold+signedBlocksWindow; height++ { - suite.ctx = suite.ctx.WithBlockHeight(height) - app.SlashingKeeper.HandleValidatorSignature(suite.ctx, vals[0].Address, valPower, false) + ctx = ctx.WithBlockHeight(height) + app.SlashingKeeper.HandleValidatorSignature(ctx, vals[0].Address, valPower, false) } - res, _ = app.SlashingKeeper.GetValidatorSigningInfo(suite.ctx, consAddr) + res, _ = app.SlashingKeeper.GetValidatorSigningInfo(ctx, consAddr) suite.Require().Zero(res.MissedBlocksCounter, "did not reset validator missed block counter") suite.Require().Zero(res.IndexOffset) - app.SlashingKeeper.IterateValidatorMissedBlockBitArray(suite.ctx, consAddr, func(_ int64, missed bool) bool { + app.SlashingKeeper.IterateValidatorMissedBlockBitArray(ctx, consAddr, func(_ int64, missed bool) bool { suite.Require().True(missed, "did not reset validator missed block bit array") return false }) @@ -581,13 +587,13 @@ func (suite *ConsumerKeeperTestSuite) TestValidatorDowntime() { // TestValidatorDoubleSigning tests if a slash packet is sent // when a double-signing evidence is handled by the evidence module -func (suite *ConsumerKeeperTestSuite) TestValidatorDoubleSigning() { +func (suite *CCVTestSuite) TestValidatorDoubleSigning() { // initial setup suite.SetupCCVChannel() suite.SendEmptyVSCPacket() // sync suite context after CCV channel is established - suite.ctx = suite.consumerChain.GetContext() + ctx := suite.consumerCtx() app := suite.consumerChain.App.(*appConsumer.App) channelID := suite.path.EndpointA.ChannelID @@ -598,7 +604,7 @@ func (suite *ConsumerKeeperTestSuite) TestValidatorDoubleSigning() { consAddr := sdk.ConsAddress(pubkey.Address()) // set an arbitrary infraction height - infractionHeight := suite.ctx.BlockHeight() - 1 + infractionHeight := ctx.BlockHeight() - 1 power := int64(100) // create evidence @@ -610,36 +616,36 @@ func (suite *ConsumerKeeperTestSuite) TestValidatorDoubleSigning() { } // add validator signing-info to the store - app.SlashingKeeper.SetValidatorSigningInfo(suite.ctx, consAddr, slashingtypes.ValidatorSigningInfo{ + app.SlashingKeeper.SetValidatorSigningInfo(ctx, consAddr, slashingtypes.ValidatorSigningInfo{ Address: consAddr.String(), Tombstoned: false, }) // save next sequence before sending a slash packet - seq, ok := app.GetIBCKeeper().ChannelKeeper.GetNextSequenceSend(suite.ctx, ccv.ConsumerPortID, channelID) + seq, ok := app.GetIBCKeeper().ChannelKeeper.GetNextSequenceSend(ctx, ccv.ConsumerPortID, channelID) suite.Require().True(ok) // construct slash packet data and get the expcted commit hash packetData := ccv.NewSlashPacketData( abci.Validator{Address: consAddr.Bytes(), Power: power}, // get VSC ID mapping to the infraction height with the TM delay substracted - app.ConsumerKeeper.GetHeightValsetUpdateID(suite.ctx, uint64(infractionHeight-sdk.ValidatorUpdateDelay)), + app.ConsumerKeeper.GetHeightValsetUpdateID(ctx, uint64(infractionHeight-sdk.ValidatorUpdateDelay)), stakingtypes.DoubleSign, ) - expCommit := suite.commitSlashPacket(suite.ctx, packetData) + expCommit := suite.commitSlashPacket(ctx, packetData) // expect to send slash packet when handling double-sign evidence - app.EvidenceKeeper.HandleEquivocationEvidence(suite.ctx, e) + app.EvidenceKeeper.HandleEquivocationEvidence(ctx, e) // check that slash packet is sent - gotCommit := app.IBCKeeper.ChannelKeeper.GetPacketCommitment(suite.ctx, ccv.ConsumerPortID, channelID, seq) + gotCommit := app.IBCKeeper.ChannelKeeper.GetPacketCommitment(ctx, ccv.ConsumerPortID, channelID, seq) suite.NotNil(gotCommit) suite.Require().EqualValues(expCommit, gotCommit) } // TestSendSlashPacket tests the functionality of SendSlashPacket and asserts state changes related to that method -func (suite *ConsumerKeeperTestSuite) TestSendSlashPacket() { +func (suite *CCVTestSuite) TestSendSlashPacket() { suite.SetupCCVChannel() app := suite.consumerChain.App.(*appConsumer.App) @@ -678,7 +684,7 @@ func (suite *ConsumerKeeperTestSuite) TestSendSlashPacket() { // verify that all requests are stored requests := app.ConsumerKeeper.GetPendingSlashRequests(ctx) - suite.Require().Len(requests, 16) + suite.Require().Len(requests.GetRequests(), 16) // save consumer next sequence seq, _ := app.GetIBCKeeper().ChannelKeeper.GetNextSequenceSend(ctx, ccv.ConsumerPortID, channelID) @@ -699,7 +705,7 @@ func (suite *ConsumerKeeperTestSuite) TestSendSlashPacket() { // check that outstanding downtime flags // are all set to true for validators slashed for downtime requests - for _, r := range requests { + for _, r := range requests.GetRequests() { downtime := r.Infraction == stakingtypes.Downtime if downtime { consAddr := sdk.ConsAddress(r.Packet.Validator.Address) @@ -709,12 +715,12 @@ func (suite *ConsumerKeeperTestSuite) TestSendSlashPacket() { // check that pending slash requests get cleared after being sent requests = app.ConsumerKeeper.GetPendingSlashRequests(ctx) - suite.Require().Len(requests, 0) + suite.Require().Len(requests.GetRequests(), 0) // check that slash requests aren't stored when channel is established app.ConsumerKeeper.SendSlashPacket(ctx, abci.Validator{}, 0, stakingtypes.Downtime) app.ConsumerKeeper.SendSlashPacket(ctx, abci.Validator{}, 0, stakingtypes.DoubleSign) requests = app.ConsumerKeeper.GetPendingSlashRequests(ctx) - suite.Require().Len(requests, 0) + suite.Require().Len(requests.GetRequests(), 0) } diff --git a/tests/e2e/stop_consumer_test.go b/tests/e2e/stop_consumer_test.go index e490d8c014..767b8d793d 100644 --- a/tests/e2e/stop_consumer_test.go +++ b/tests/e2e/stop_consumer_test.go @@ -1,19 +1,17 @@ package e2e_test import ( - "time" - sdk "github.com/cosmos/cosmos-sdk/types" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" - clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" channeltypes "github.com/cosmos/ibc-go/v3/modules/core/04-channel/types" + appConsumer "github.com/cosmos/interchain-security/app/consumer" appProvider "github.com/cosmos/interchain-security/app/provider" - providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" ccv "github.com/cosmos/interchain-security/x/ccv/types" abci "github.com/tendermint/tendermint/abci/types" ) -func (s *ProviderTestSuite) TestStopConsumerChain() { +// Tests the functionality of stopping a consumer chain at a higher level than unit tests +func (s *CCVTestSuite) TestStopConsumerChain() { // default consumer chain ID consumerChainID := s.consumerChain.ChainID @@ -45,22 +43,23 @@ func (s *ProviderTestSuite) TestStopConsumerChain() { // - undelegate the shares in four consecutive blocks evenly; create UnbondigOp and UnbondingOpIndex entries for the consumer chain ID // - set SlashAck and LockUnbondingOnTimeout states for the consumer chain ID setupOperations := []struct { - fn func(suite *ProviderTestSuite) error + fn func(suite *CCVTestSuite) error }{ { - func(suite *ProviderTestSuite) error { + func(suite *CCVTestSuite) error { suite.SetupCCVChannel() + suite.SetupTransferChannel() return nil }, }, { - func(suite *ProviderTestSuite) error { + func(suite *CCVTestSuite) error { testShares, err = s.providerChain.App.(*appProvider.App).StakingKeeper.Delegate(s.providerCtx(), delAddr, bondAmt, stakingtypes.Unbonded, stakingtypes.Validator(validator), true) return err }, }, { - func(suite *ProviderTestSuite) error { + func(suite *CCVTestSuite) error { for i := 0; i < ubdOpsNum; i++ { // undelegate one quarter of the shares _, err := s.providerChain.App.(*appProvider.App).StakingKeeper.Undelegate(s.providerCtx(), delAddr, valAddr, testShares.QuoInt64(int64(ubdOpsNum))) @@ -74,9 +73,10 @@ func (s *ProviderTestSuite) TestStopConsumerChain() { }, }, { - func(suite *ProviderTestSuite) error { + func(suite *CCVTestSuite) error { s.providerChain.App.(*appProvider.App).ProviderKeeper.SetSlashAcks(s.providerCtx(), consumerChainID, []string{"validator-1", "validator-2", "validator-3"}) s.providerChain.App.(*appProvider.App).ProviderKeeper.SetLockUnbondingOnTimeout(s.providerCtx(), consumerChainID) + s.providerChain.App.(*appProvider.App).ProviderKeeper.AppendPendingVSC(s.providerCtx(), consumerChainID, ccv.ValidatorSetChangePacketData{ValsetUpdateId: 1}) return nil }, }, @@ -95,109 +95,11 @@ func (s *ProviderTestSuite) TestStopConsumerChain() { s.checkConsumerChainIsRemoved(consumerChainID, false) } -func (s *ProviderTestSuite) TestStopConsumerChainProposal() { - var ( - ctx sdk.Context - proposal *providertypes.StopConsumerChainProposal - ok bool - ) - - chainID := s.consumerChain.ChainID - - testCases := []struct { - name string - malleate func(*ProviderTestSuite) - expPass bool - stopReached bool - }{ - { - "valid stop consumer chain proposal: stop time reached", func(suite *ProviderTestSuite) { - - // ctx blocktime is after proposal's stop time - ctx = s.providerCtx().WithBlockTime(time.Now().Add(time.Hour)) - content, err := providertypes.NewStopConsumerChainProposal("title", "description", chainID, time.Now()) - s.Require().NoError(err) - proposal, ok = content.(*providertypes.StopConsumerChainProposal) - s.Require().True(ok) - }, true, true, - }, - { - "valid proposal: stop time has not yet been reached", func(suite *ProviderTestSuite) { - - // ctx blocktime is before proposal's stop time - ctx = s.providerCtx().WithBlockTime(time.Now()) - content, err := providertypes.NewStopConsumerChainProposal("title", "description", chainID, time.Now().Add(time.Hour)) - s.Require().NoError(err) - proposal, ok = content.(*providertypes.StopConsumerChainProposal) - s.Require().True(ok) - }, true, false, - }, - { - "valid proposal: fail due to an invalid unbonding index", func(suite *ProviderTestSuite) { - - // ctx blocktime is after proposal's stop time - ctx = s.providerCtx().WithBlockTime(time.Now().Add(time.Hour)) - - // set invalid unbonding op index - s.providerChain.App.(*appProvider.App).ProviderKeeper.SetUnbondingOpIndex(ctx, chainID, 0, []uint64{0}) - - content, err := providertypes.NewStopConsumerChainProposal("title", "description", chainID, time.Now()) - s.Require().NoError(err) - proposal, ok = content.(*providertypes.StopConsumerChainProposal) - s.Require().True(ok) - }, false, true, - }, - } - - for _, tc := range testCases { - tc := tc - - s.Run(tc.name, func() { - s.SetupTest() - s.SetupCCVChannel() - - tc.malleate(s) - - err := s.providerChain.App.(*appProvider.App).ProviderKeeper.StopConsumerChainProposal(ctx, proposal) - if tc.expPass { - s.Require().NoError(err, "error returned on valid case") - if tc.stopReached { - // check that the pending stop consumer chain proposal is deleted - found := s.providerChain.App.(*appProvider.App).ProviderKeeper.GetPendingStopProposal(ctx, chainID, proposal.StopTime) - s.Require().False(found, "pending stop consumer proposal wasn't deleted") - - // check that the consumer chain is removed - s.checkConsumerChainIsRemoved(chainID, false) - - } else { - found := s.providerChain.App.(*appProvider.App).ProviderKeeper.GetPendingStopProposal(ctx, chainID, proposal.StopTime) - s.Require().True(found, "pending stop consumer was not found for chain ID %s", chainID) - - // check that the consumer chain client exists - _, found = s.providerChain.App.(*appProvider.App).ProviderKeeper.GetConsumerClientId(s.providerCtx(), chainID) - s.Require().True(found) - - // check that the chainToChannel and channelToChain exist for the consumer chain ID - _, found = s.providerChain.App.(*appProvider.App).ProviderKeeper.GetChainToChannel(s.providerCtx(), chainID) - s.Require().True(found) - - _, found = s.providerChain.App.(*appProvider.App).ProviderKeeper.GetChannelToChain(s.providerCtx(), s.path.EndpointB.ChannelID) - s.Require().True(found) - - // check that channel is in OPEN state - s.Require().Equal(channeltypes.OPEN, s.path.EndpointB.GetChannel().State) - } - } else { - s.Require().Error(err, "did not return error on invalid case") - } - }) - } -} - // TODO Simon: implement OnChanCloseConfirm in IBC-GO testing to close the consumer chain's channel end -func (s *ProviderTestSuite) TestStopConsumerOnChannelClosed() { +func (s *CCVTestSuite) TestStopConsumerOnChannelClosed() { // init the CCV channel states s.SetupCCVChannel() + s.SetupTransferChannel() s.SendEmptyVSCPacket() // stop the consumer chain @@ -222,7 +124,7 @@ func (s *ProviderTestSuite) TestStopConsumerOnChannelClosed() { // s.Require().False(found) } -func (s *ProviderTestSuite) checkConsumerChainIsRemoved(chainID string, lockUbd bool) { +func (s *CCVTestSuite) checkConsumerChainIsRemoved(chainID string, lockUbd bool) { channelID := s.path.EndpointB.ChannelID providerKeeper := s.providerChain.App.(*appProvider.App).ProviderKeeper @@ -250,8 +152,10 @@ func (s *ProviderTestSuite) checkConsumerChainIsRemoved(chainID string, lockUbd } // verify consumer chain's states are removed + _, found := providerKeeper.GetConsumerGenesis(s.providerCtx(), chainID) + s.Require().False(found) s.Require().False(providerKeeper.GetLockUnbondingOnTimeout(s.providerCtx(), chainID)) - _, found := providerKeeper.GetConsumerClientId(s.providerCtx(), chainID) + _, found = providerKeeper.GetConsumerClientId(s.providerCtx(), chainID) s.Require().False(found) _, found = providerKeeper.GetChainToChannel(s.providerCtx(), chainID) @@ -262,36 +166,31 @@ func (s *ProviderTestSuite) checkConsumerChainIsRemoved(chainID string, lockUbd s.Require().Nil(providerKeeper.GetSlashAcks(s.providerCtx(), chainID)) s.Require().Zero(providerKeeper.GetInitChainHeight(s.providerCtx(), chainID)) - // TODO Simon: check that pendingVSCPacket are emptied - once - // https://github.com/cosmos/interchain-security/issues/27 is implemented + s.Require().Nil(providerKeeper.GetPendingVSCs(s.providerCtx(), chainID)) } -// TODO Simon: duplicated from consumer/keeper_test.go; figure out how it can be refactored -// SendEmptyVSCPacket sends a VSC packet without any changes -// to ensure that the CCV channel gets established -func (s *ProviderTestSuite) SendEmptyVSCPacket() { - providerKeeper := s.providerChain.App.(*appProvider.App).ProviderKeeper - - oldBlockTime := s.providerChain.GetContext().BlockTime() - timeout := uint64(ccv.GetTimeoutTimestamp(oldBlockTime).UnixNano()) - - valUpdateID := providerKeeper.GetValidatorSetUpdateId(s.providerChain.GetContext()) - - pd := ccv.NewValidatorSetChangePacketData( - []abci.ValidatorUpdate{}, - valUpdateID, - nil, - ) - - seq, ok := s.providerChain.App.(*appProvider.App).GetIBCKeeper().ChannelKeeper.GetNextSequenceSend( - s.providerChain.GetContext(), ccv.ProviderPortID, s.path.EndpointB.ChannelID) - s.Require().True(ok) - - packet := channeltypes.NewPacket(pd.GetBytes(), seq, ccv.ProviderPortID, s.path.EndpointB.ChannelID, - ccv.ConsumerPortID, s.path.EndpointA.ChannelID, clienttypes.Height{}, timeout) - - err := s.path.EndpointB.SendPacket(packet) - s.Require().NoError(err) - err = s.path.EndpointA.RecvPacket(packet) - s.Require().NoError(err) +// TestProviderChannelClosed checks that a consumer chain panics +// when the provider channel was established and then closed +func (suite *CCVTestSuite) TestProviderChannelClosed() { + + suite.SetupCCVChannel() + // establish provider channel with a first VSC packet + suite.SendEmptyVSCPacket() + + channelID, found := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.GetProviderChannel(suite.consumerChain.GetContext()) + suite.Require().True(found) + + // close provider channel + err := suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.ChanCloseInit(suite.consumerChain.GetContext(), ccv.ConsumerPortID, channelID) + suite.Require().NoError(err) + suite.Require().True(suite.consumerChain.App.(*appConsumer.App).ConsumerKeeper.IsChannelClosed(suite.consumerChain.GetContext(), channelID)) + + // assert begin blocker did panics + defer func() { + if r := recover(); r != nil { + return + } + suite.Require().Fail("Begin blocker did not panic with a closed channel") + }() + suite.consumerChain.App.(*appConsumer.App).BeginBlocker(suite.consumerChain.GetContext(), abci.RequestBeginBlock{}) } diff --git a/tests/e2e/unbonding_test.go b/tests/e2e/unbonding_test.go index db3eff480c..baeb96eb22 100644 --- a/tests/e2e/unbonding_test.go +++ b/tests/e2e/unbonding_test.go @@ -11,8 +11,9 @@ import ( // TestUndelegationProviderFirst checks that an unbonding operation completes // when the unbonding period elapses first on the provider chain -func (s *ProviderTestSuite) TestUndelegationProviderFirst() { +func (s *CCVTestSuite) TestUndelegationProviderFirst() { s.SetupCCVChannel() + s.SetupTransferChannel() // delegate bondAmt and undelegate 1/2 of it bondAmt := sdk.NewInt(10000000) @@ -54,8 +55,9 @@ func (s *ProviderTestSuite) TestUndelegationProviderFirst() { // TestUndelegationConsumerFirst checks that an unbonding operation completes // when the unbonding period elapses first on the consumer chain -func (s *ProviderTestSuite) TestUndelegationConsumerFirst() { +func (s *CCVTestSuite) TestUndelegationConsumerFirst() { s.SetupCCVChannel() + s.SetupTransferChannel() // delegate bondAmt and undelegate 1/2 of it bondAmt := sdk.NewInt(10000000) @@ -95,8 +97,9 @@ func (s *ProviderTestSuite) TestUndelegationConsumerFirst() { // TestUndelegationNoValsetChange checks that an unbonding operation completes // even when the validator set is not changed -func (s *ProviderTestSuite) TestUndelegationNoValsetChange() { +func (s *CCVTestSuite) TestUndelegationNoValsetChange() { s.SetupCCVChannel() + s.SetupTransferChannel() // delegate bondAmt and undelegate all of it bondAmt := sdk.NewInt(10000000) @@ -137,7 +140,7 @@ func (s *ProviderTestSuite) TestUndelegationNoValsetChange() { // TestUndelegationDuringInit checks that before the CCV channel is established // - no undelegations can complete, even if the provider unbonding period elapses // - all the VSC packets are stored in state as pending -func (s *ProviderTestSuite) TestUndelegationDuringInit() { +func (s *CCVTestSuite) TestUndelegationDuringInit() { // delegate bondAmt and undelegate 1/2 of it bondAmt := sdk.NewInt(10000000) delAddr := s.providerChain.SenderAccount.GetAddress() @@ -175,6 +178,7 @@ func (s *ProviderTestSuite) TestUndelegationDuringInit() { // complete CCV channel setup s.SetupCCVChannel() + s.SetupTransferChannel() // relay VSC packets from provider to consumer relayAllCommittedPackets(s, s.providerChain, s.path, ccv.ProviderPortID, s.path.EndpointB.ChannelID, 2) @@ -200,7 +204,7 @@ func (s *ProviderTestSuite) TestUndelegationDuringInit() { // Check unbonding ops on both sides // Advance time so that provider's unbonding op completes // Check that unbonding has completed in provider staking -func (s *ProviderTestSuite) TestUnbondingNoConsumer() { +func (s *CCVTestSuite) TestUnbondingNoConsumer() { // remove the consumer chain, which was already registered during setup s.providerChain.App.(*appProvider.App).ProviderKeeper.DeleteConsumerClientId(s.providerCtx(), s.consumerChain.ChainID) @@ -231,7 +235,7 @@ func (s *ProviderTestSuite) TestUnbondingNoConsumer() { // TestRedelegationNoConsumer tests a redelegate transaction // submitted on a provider chain with no consumers -func (s *ProviderTestSuite) TestRedelegationNoConsumer() { +func (s *CCVTestSuite) TestRedelegationNoConsumer() { providerKeeper := s.providerChain.App.(*appProvider.App).ProviderKeeper stakingKeeper := s.providerChain.App.(*appProvider.App).StakingKeeper @@ -275,8 +279,9 @@ func (s *ProviderTestSuite) TestRedelegationNoConsumer() { // TestRedelegationWithConsumer tests a redelegate transaction submitted on a provider chain // when the unbonding period elapses first on the provider chain -func (s *ProviderTestSuite) TestRedelegationProviderFirst() { +func (s *CCVTestSuite) TestRedelegationProviderFirst() { s.SetupCCVChannel() + s.SetupTransferChannel() stakingKeeper := s.providerChain.App.(*appProvider.App).StakingKeeper providerKeeper := s.providerChain.App.(*appProvider.App).ProviderKeeper diff --git a/tests/e2e/valset_update_test.go b/tests/e2e/valset_update_test.go index b68edafff9..faecf9cd1f 100644 --- a/tests/e2e/valset_update_test.go +++ b/tests/e2e/valset_update_test.go @@ -13,8 +13,9 @@ import ( ) // TestPacketRoundtrip tests a CCV packet roundtrip when tokens are bonded on provider -func (s *ProviderTestSuite) TestPacketRoundtrip() { +func (s *CCVTestSuite) TestPacketRoundtrip() { s.SetupCCVChannel() + s.SetupTransferChannel() // Bond some tokens on provider to change validator powers bondAmt := sdk.NewInt(1000000) @@ -35,7 +36,7 @@ func (s *ProviderTestSuite) TestPacketRoundtrip() { } // TestSendVSCMaturedPackets tests the behavior of SendVSCMaturedPackets and related state checks -func (suite *ConsumerKeeperTestSuite) TestSendVSCMaturedPackets() { +func (suite *CCVTestSuite) TestSendVSCMaturedPackets() { // setup CCV channel suite.SetupCCVChannel() diff --git a/tests/integration/actions.go b/tests/integration/actions.go index 955e6f481d..32578bc72d 100644 --- a/tests/integration/actions.go +++ b/tests/integration/actions.go @@ -6,11 +6,13 @@ import ( "fmt" "log" "os/exec" + "strconv" "strings" "sync" "time" - clienttypes "github.com/cosmos/ibc-go/modules/core/02-client/types" + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" + "github.com/cosmos/interchain-security/x/ccv/provider/client" ) type SendTokensAction struct { @@ -191,25 +193,13 @@ type submitConsumerProposalAction struct { initialHeight clienttypes.Height } -// TODO: import this directly from the module once it is merged -type createConsumerChainProposalJSON struct { - Title string `json:"title"` - Description string `json:"description"` - ChainId string `json:"chain_id"` - InitialHeight clienttypes.Height `json:"initial_height"` - GenesisHash []byte `json:"genesis_hash"` - BinaryHash []byte `json:"binary_hash"` - SpawnTime time.Time `json:"spawn_time"` - Deposit string `json:"deposit"` -} - -func (tr TestRun) submitConsumerProposal( +func (tr TestRun) submitConsumerAdditionProposal( action submitConsumerProposalAction, verbose bool, ) { spawnTime := tr.containerConfig.now.Add(time.Duration(action.spawnTime) * time.Millisecond) - prop := createConsumerChainProposalJSON{ - Title: "Create a chain", + prop := client.ConsumerAdditionProposalJSON{ + Title: "Propose the addition of a new chain", Description: "Gonna be a great chain", ChainId: string(tr.chainConfigs[action.consumerChain].chainId), InitialHeight: action.initialHeight, @@ -240,7 +230,7 @@ func (tr TestRun) submitConsumerProposal( //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. bz, err = exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[action.chain].binaryName, - "tx", "gov", "submit-proposal", "create-consumer-chain", + "tx", "gov", "submit-proposal", "consumer-addition", "/temp-proposal.json", `--from`, `validator`+fmt.Sprint(action.from), @@ -257,6 +247,77 @@ func (tr TestRun) submitConsumerProposal( } } +type submitParamChangeProposalAction struct { + chain chainID + from validatorID + deposit uint + subspace string + key string + value interface{} +} + +type paramChangeProposalJSON struct { + Title string `json:"title"` + Description string `json:"description"` + Changes []paramChangeJSON `json:"changes"` + Deposit string `json:"deposit"` +} + +type paramChangeJSON struct { + Subspace string `json:"subspace"` + Key string `json:"key"` + Value interface{} `json:"value"` +} + +func (tr TestRun) submitParamChangeProposal( + action submitParamChangeProposalAction, + verbose bool, +) { + prop := paramChangeProposalJSON{ + Title: "Param change", + Description: "Changing module params", + Changes: []paramChangeJSON{{Subspace: action.subspace, Key: action.key, Value: action.value}}, + Deposit: fmt.Sprint(action.deposit) + `stake`, + } + + bz, err := json.Marshal(prop) + if err != nil { + log.Fatal(err) + } + + jsonStr := string(bz) + if strings.Contains(jsonStr, "'") { + log.Fatal("prop json contains single quote") + } + + //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. + bz, err = exec.Command("docker", "exec", tr.containerConfig.instanceName, + "/bin/bash", "-c", fmt.Sprintf(`echo '%s' > %s`, jsonStr, "/params-proposal.json")).CombinedOutput() + + if err != nil { + log.Fatal(err, "\n", string(bz)) + } + + //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. + bz, err = exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[action.chain].binaryName, + + "tx", "gov", "submit-proposal", "param-change", + "/params-proposal.json", + + `--from`, `validator`+fmt.Sprint(action.from), + `--chain-id`, string(tr.chainConfigs[action.chain].chainId), + `--home`, tr.getValidatorHome(action.chain, action.from), + `--node`, tr.getValidatorNode(action.chain, action.from), + `--keyring-backend`, `test`, + `-b`, `block`, + `-y`, + ).CombinedOutput() + + if err != nil { + log.Fatal(err, "\n", string(bz)) + } +} + type voteGovProposalAction struct { chain chainID from []validatorID @@ -300,9 +361,10 @@ func (tr TestRun) voteGovProposal( } type startConsumerChainAction struct { - consumerChain chainID - providerChain chainID - validators []StartChainValidator + consumerChain chainID + providerChain chainID + genesisChanges string + validators []StartChainValidator } func (tr TestRun) startConsumerChain( @@ -329,10 +391,15 @@ func (tr TestRun) startConsumerChain( log.Fatal(err, "\n", string(bz)) } + genesisChanges := ".app_state.ccvconsumer = " + string(bz) + if action.genesisChanges != "" { + genesisChanges = genesisChanges + " | " + action.genesisChanges + } + tr.startChain(StartChainAction{ chain: action.consumerChain, validators: action.validators, - genesisChanges: ".app_state.ccvconsumer = " + string(bz), + genesisChanges: genesisChanges, skipGentx: true, }, verbose) } @@ -518,6 +585,88 @@ func (tr TestRun) addIbcChannel( } } +type transferChannelCompleteAction struct { + chainA chainID + chainB chainID + connectionA uint + portA string + portB string + order string + channelA uint + channelB uint +} + +func (tr TestRun) transferChannelComplete( + action transferChannelCompleteAction, + verbose bool, +) { + //#nosec G204 -- Bypass linter warning for spawning subprocess with chanOpenTryCmd arguments. + chanOpenTryCmd := exec.Command("docker", "exec", tr.containerConfig.instanceName, "hermes", + "tx", "chan-open-try", + "--dst-chain", string(tr.chainConfigs[action.chainB].chainId), + "--src-chain", string(tr.chainConfigs[action.chainA].chainId), + "--dst-connection", "connection-"+fmt.Sprint(action.connectionA), + "--dst-port", action.portB, + "--src-port", action.portA, + "--src-channel", "channel-"+fmt.Sprint(action.channelA), + ) + executeCommand(chanOpenTryCmd, "transferChanOpenTry") + + //#nosec G204 -- Bypass linter warning for spawning subprocess with chanOpenAckCmd arguments. + chanOpenAckCmd := exec.Command("docker", "exec", tr.containerConfig.instanceName, "hermes", + "tx", "chan-open-ack", + "--dst-chain", string(tr.chainConfigs[action.chainA].chainId), + "--src-chain", string(tr.chainConfigs[action.chainB].chainId), + "--dst-connection", "connection-"+fmt.Sprint(action.connectionA), + "--dst-port", action.portA, + "--src-port", action.portB, + "--dst-channel", "channel-"+fmt.Sprint(action.channelA), + "--src-channel", "channel-"+fmt.Sprint(action.channelB), + ) + executeCommand(chanOpenAckCmd, "transferChanOpenAck") + + //#nosec G204 -- Bypass linter warning for spawning subprocess with chanOpenConfirmCmd arguments. + chanOpenConfirmCmd := exec.Command("docker", "exec", tr.containerConfig.instanceName, "hermes", + "tx", "chan-open-confirm", + "--dst-chain", string(tr.chainConfigs[action.chainB].chainId), + "--src-chain", string(tr.chainConfigs[action.chainA].chainId), + "--dst-connection", "connection-"+fmt.Sprint(action.connectionA), + "--dst-port", action.portB, + "--src-port", action.portA, + "--dst-channel", "channel-"+fmt.Sprint(action.channelB), + "--src-channel", "channel-"+fmt.Sprint(action.channelA), + ) + executeCommand(chanOpenConfirmCmd, "transferChanOpenConfirm") +} + +func executeCommand(cmd *exec.Cmd, cmdName string) { + if verbose { + fmt.Println(cmdName+" cmd:", cmd.String()) + } + + cmdReader, err := cmd.StdoutPipe() + if err != nil { + log.Fatal(err) + } + cmd.Stderr = cmd.Stdout + + if err := cmd.Start(); err != nil { + log.Fatal(err) + } + + scanner := bufio.NewScanner(cmdReader) + + for scanner.Scan() { + out := scanner.Text() + if verbose { + fmt.Println(cmdName + ": " + out) + } + } + if err := scanner.Err(); err != nil { + log.Fatal(err) + } +} + type relayPacketsAction struct { chain chainID port string @@ -545,6 +694,27 @@ func (tr TestRun) relayPackets( } } +type relayRewardPacketsToProviderAction struct { + consumerChain chainID + providerChain chainID + port string + channel uint +} + +func (tr TestRun) relayRewardPacketsToProvider( + action relayRewardPacketsToProviderAction, + verbose bool, +) { + blockPerDistribution, _ := strconv.ParseUint(strings.Trim(tr.getParam(action.consumerChain, Param{Subspace: "ccvconsumer", Key: "BlocksPerDistributionTransmission"}), "\""), 10, 64) + currentBlock := uint64(tr.getBlockHeight(action.consumerChain)) + if currentBlock <= blockPerDistribution { + tr.waitBlocks(action.consumerChain, uint(blockPerDistribution-currentBlock+1), 60*time.Second) + } + + tr.relayPackets(relayPacketsAction{chain: action.consumerChain, port: action.port, channel: action.channel}, verbose) + tr.waitBlocks(action.providerChain, 1, 10*time.Second) +} + type delegateTokensAction struct { chain chainID from validatorID @@ -736,3 +906,59 @@ func (tr TestRun) unjailValidator(action unjailValidatorAction, verbose bool) { log.Fatal(err, "\n", string(bz)) } } + +type registerRepresentativeAction struct { + chain chainID + representatives []validatorID + stakes []uint +} + +func (tr TestRun) registerRepresentative( + action registerRepresentativeAction, + verbose bool, +) { + var wg sync.WaitGroup + for i, val := range action.representatives { + wg.Add(1) + stake := action.stakes[i] + go func(val validatorID, stake uint) { + defer wg.Done() + + //#nosec G204 -- Bypass linter warning for spawning subprocess with pubKeycmd arguments. + pubKeycmd := exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[action.chain].binaryName, + "tendermint", "show-validator", + `--home`, tr.getValidatorHome(action.chain, val), + ) + + bzPubKey, err := pubKeycmd.CombinedOutput() + if err != nil { + log.Fatal(err, "\n", string(bzPubKey)) + } + + //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. + bz, err := exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[action.chain].binaryName, + "tx", "staking", "create-validator", + `--amount`, fmt.Sprint(stake)+"stake", + `--pubkey`, string(bzPubKey), + `--moniker`, fmt.Sprint(val), + `--commission-rate`, "0.1", + `--commission-max-rate`, "0.2", + `--commission-max-change-rate`, "0.01", + `--min-self-delegation`, "1", + `--from`, `validator`+fmt.Sprint(val), + `--chain-id`, string(tr.chainConfigs[action.chain].chainId), + `--home`, tr.getValidatorHome(action.chain, val), + `--node`, tr.getValidatorNode(action.chain, val), + `--keyring-backend`, `test`, + `-b`, `block`, + `-y`, + ).CombinedOutput() + + if err != nil { + log.Fatal(err, "\n", string(bz)) + } + }(val, stake) + } + + wg.Wait() +} diff --git a/tests/integration/config.go b/tests/integration/config.go index 9fb890e246..ffebd002dc 100644 --- a/tests/integration/config.go +++ b/tests/integration/config.go @@ -116,6 +116,17 @@ func DefaultTestRun() TestRun { ".app_state.slashing.params.downtime_jail_duration = \"2s\" | " + ".app_state.slashing.params.slash_fraction_downtime = \"0.010000000000000000\"", }, + chainID("democ"): { + chainId: chainID("democ"), + binaryName: "interchain-security-cdd", + ipPrefix: "7.7.9", + votingWaitTime: 10, + genesisChanges: ".app_state.gov.voting_params.voting_period = \"10s\" | " + + ".app_state.slashing.params.signed_blocks_window = \"2\" | " + + ".app_state.slashing.params.min_signed_per_window = \"0.500000000000000000\" | " + + ".app_state.slashing.params.downtime_jail_duration = \"2s\" | " + + ".app_state.slashing.params.slash_fraction_downtime = \"0.010000000000000000\"", + }, }, } } diff --git a/tests/integration/main.go b/tests/integration/main.go index 67a4b35f48..fada60b41b 100644 --- a/tests/integration/main.go +++ b/tests/integration/main.go @@ -11,9 +11,10 @@ import ( "github.com/kylelemons/godebug/pretty" ) -var verbose = true +var verbose = false func main() { + fmt.Println("============================================ start happy path tests ============================================") start := time.Now() tr := DefaultTestRun() tr.ParseCLIFlags() @@ -24,7 +25,17 @@ func main() { tr.runStep(step, verbose) } - fmt.Printf("test successful - time elapsed %v\n", time.Since(start)) + fmt.Printf("happy path tests successful - time elapsed %v\n", time.Since(start)) + + fmt.Println("============================================ start democracy tests ============================================") + start = time.Now() + tr.startDocker() + + for _, step := range democracySteps { + tr.runStep(step, verbose) + } + + fmt.Printf("democracy tests successful - time elapsed %v\n", time.Since(start)) } func (tr TestRun) runStep(step Step, verbose bool) { @@ -37,7 +48,9 @@ func (tr TestRun) runStep(step Step, verbose bool) { case submitTextProposalAction: tr.submitTextProposal(action, verbose) case submitConsumerProposalAction: - tr.submitConsumerProposal(action, verbose) + tr.submitConsumerAdditionProposal(action, verbose) + case submitParamChangeProposalAction: + tr.submitParamChangeProposal(action, verbose) case voteGovProposalAction: tr.voteGovProposal(action, verbose) case startConsumerChainAction: @@ -48,8 +61,12 @@ func (tr TestRun) runStep(step Step, verbose bool) { tr.addIbcConnection(action, verbose) case addIbcChannelAction: tr.addIbcChannel(action, verbose) + case transferChannelCompleteAction: + tr.transferChannelComplete(action, verbose) case relayPacketsAction: tr.relayPackets(action, verbose) + case relayRewardPacketsToProviderAction: + tr.relayRewardPacketsToProvider(action, verbose) case delegateTokensAction: tr.delegateTokens(action, verbose) case unbondTokensAction: @@ -60,6 +77,8 @@ func (tr TestRun) runStep(step Step, verbose bool) { tr.invokeDowntimeSlash(action, verbose) case unjailValidatorAction: tr.unjailValidator(action, verbose) + case registerRepresentativeAction: + tr.registerRepresentative(action, verbose) default: log.Fatalf(fmt.Sprintf(`unknown action: %#v`, action)) } diff --git a/tests/integration/state.go b/tests/integration/state.go index b8768c6963..23498ce1e5 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -9,7 +9,7 @@ import ( "strings" "time" - clienttypes "github.com/cosmos/ibc-go/modules/core/02-client/types" + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" "github.com/tidwall/gjson" "gopkg.in/yaml.v2" ) @@ -17,9 +17,12 @@ import ( type State map[chainID]ChainState type ChainState struct { - ValBalances *map[validatorID]uint - Proposals *map[uint]Proposal - ValPowers *map[validatorID]uint + ValBalances *map[validatorID]uint + Proposals *map[uint]Proposal + ValPowers *map[validatorID]uint + RepresentativePowers *map[validatorID]uint + Params *[]Param + Rewards *Rewards } type Proposal interface { @@ -42,8 +45,34 @@ type ConsumerProposal struct { Status string } +type Rewards struct { + IsRewarded map[validatorID]bool + //if true it will calculate if the validator/delegator is rewarded between 2 successive blocks, + //otherwise it will calculate if it received any rewards since the 1st block + IsIncrementalReward bool + //if true checks rewards for "stake" token, otherwise checks rewards from + //other chains (e.g. false is used to check if provider received rewards from a consumer chain) + IsNativeDenom bool +} + func (p ConsumerProposal) isProposal() {} +type ParamsProposal struct { + Deposit uint + Status string + Subspace string + Key string + Value string +} + +func (p ParamsProposal) isProposal() {} + +type Param struct { + Subspace string + Key string + Value string +} + func (tr TestRun) getState(modelState State) State { systemState := State{} for k, modelState := range modelState { @@ -72,6 +101,21 @@ func (tr TestRun) getChainState(chain chainID, modelState ChainState) ChainState chainState.ValPowers = &powers } + if modelState.RepresentativePowers != nil { + representPowers := tr.getRepresentativePowers(chain, *modelState.RepresentativePowers) + chainState.RepresentativePowers = &representPowers + } + + if modelState.Params != nil { + params := tr.getParams(chain, *modelState.Params) + chainState.Params = ¶ms + } + + if modelState.Rewards != nil { + rewards := tr.getRewards(chain, *modelState.Rewards) + chainState.Rewards = &rewards + } + return chainState } @@ -141,6 +185,66 @@ func (tr TestRun) getValPowers(chain chainID, modelState map[validatorID]uint) m return actualState } +func (tr TestRun) getRepresentativePowers(chain chainID, modelState map[validatorID]uint) map[validatorID]uint { + actualState := map[validatorID]uint{} + for k := range modelState { + actualState[k] = tr.getRepresentativePower(chain, k) + } + + return actualState +} + +func (tr TestRun) getParams(chain chainID, modelState []Param) []Param { + actualState := []Param{} + for _, p := range modelState { + actualState = append(actualState, Param{Subspace: p.Subspace, Key: p.Key, Value: tr.getParam(chain, p)}) + } + + return actualState +} + +func (tr TestRun) getRewards(chain chainID, modelState Rewards) Rewards { + receivedRewards := map[validatorID]bool{} + + currentBlock := tr.getBlockHeight(chain) + tr.waitBlocks(chain, 1, 10*time.Second) + nextBlock := tr.getBlockHeight(chain) + tr.waitBlocks(chain, 1, 10*time.Second) + + if !modelState.IsIncrementalReward { + currentBlock = 1 + } + for k := range modelState.IsRewarded { + receivedRewards[k] = tr.getReward(chain, k, nextBlock, modelState.IsNativeDenom) > tr.getReward(chain, k, currentBlock, modelState.IsNativeDenom) + } + + return Rewards{IsRewarded: receivedRewards, IsIncrementalReward: modelState.IsIncrementalReward, IsNativeDenom: modelState.IsNativeDenom} +} + +func (tr TestRun) getReward(chain chainID, validator validatorID, blockHeight uint, isNativeDenom bool) float64 { + //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. + bz, err := exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[chain].binaryName, + + "query", "distribution", "rewards", + tr.validatorConfigs[validator].delAddress, + + `--height`, fmt.Sprint(blockHeight), + `--node`, tr.getValidatorNode(chain, tr.getDefaultValidator(chain)), + `-o`, `json`, + ).CombinedOutput() + + if err != nil { + log.Fatal(err, "\n", string(bz)) + } + + denomCondition := `total.#(denom!="stake").amount` + if isNativeDenom { + denomCondition = `total.#(denom=="stake").amount` + } + + return gjson.Get(string(bz), denomCondition).Float() +} + func (tr TestRun) getBalance(chain chainID, validator validatorID) uint { //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. bz, err := exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[chain].binaryName, @@ -200,7 +304,7 @@ func (tr TestRun) getProposal(chain chainID, proposal uint) Proposal { Title: title, Description: description, } - case "/interchain_security.ccv.provider.v1.CreateConsumerChainProposal": + case "/interchain_security.ccv.provider.v1.ConsumerAdditionProposal": chainId := gjson.Get(string(bz), `content.chain_id`).String() spawnTime := gjson.Get(string(bz), `content.spawn_time`).Time().Sub(tr.containerConfig.now) @@ -222,7 +326,14 @@ func (tr TestRun) getProposal(chain chainID, proposal uint) Proposal { RevisionHeight: gjson.Get(string(bz), `content.initial_height.revision_height`).Uint(), }, } - + case "/cosmos.params.v1beta1.ParameterChangeProposal": + return ParamsProposal{ + Deposit: uint(deposit), + Status: status, + Subspace: gjson.Get(string(bz), `content.changes.0.subspace`).String(), + Key: gjson.Get(string(bz), `content.changes.0.key`).String(), + Value: gjson.Get(string(bz), `content.changes.0.value`).String(), + } } log.Fatal("unknown proposal type", string(bz)) @@ -288,6 +399,47 @@ func (tr TestRun) getValPower(chain chainID, validator validatorID) uint { return 0 } +func (tr TestRun) getRepresentativePower(chain chainID, validator validatorID) uint { + //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. + bz, err := exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[chain].binaryName, + + "query", "staking", "validator", + tr.validatorConfigs[validator].valoperAddress, + + `--node`, tr.getValidatorNode(chain, tr.getDefaultValidator(chain)), + `-o`, `json`, + ).CombinedOutput() + + if err != nil { + log.Fatal(err, "\n", string(bz)) + } + + amount := gjson.Get(string(bz), `tokens`) + + return uint(amount.Uint()) +} + +func (tr TestRun) getParam(chain chainID, param Param) string { + //#nosec G204 -- Bypass linter warning for spawning subprocess with cmd arguments. + bz, err := exec.Command("docker", "exec", tr.containerConfig.instanceName, tr.chainConfigs[chain].binaryName, + + "query", "params", "subspace", + param.Subspace, + param.Key, + + `--node`, tr.getValidatorNode(chain, tr.getDefaultValidator(chain)), + `-o`, `json`, + ).CombinedOutput() + + if err != nil { + log.Fatal(err, "\n", string(bz)) + } + + value := gjson.Get(string(bz), `value`) + + return value.String() +} + // Gets a default validator for txs and queries using the first subdirectory // of the directory of the input chain, which will be the home directory // of one of the validators. diff --git a/tests/integration/steps.go b/tests/integration/steps.go index 9c4889ad5f..e720bc698c 100644 --- a/tests/integration/steps.go +++ b/tests/integration/steps.go @@ -1,7 +1,7 @@ package main import ( - clienttypes "github.com/cosmos/ibc-go/modules/core/02-client/types" + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" ) type Step struct { diff --git a/tests/integration/steps_democracy.go b/tests/integration/steps_democracy.go new file mode 100644 index 0000000000..537411a450 --- /dev/null +++ b/tests/integration/steps_democracy.go @@ -0,0 +1,485 @@ +package main + +import ( + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" +) + +var democracySteps = []Step{ + { + action: StartChainAction{ + chain: chainID("provi"), + validators: []StartChainValidator{ + {id: validatorID("bob"), stake: 500000000, allocation: 10000000000}, + {id: validatorID("alice"), stake: 500000000, allocation: 10000000000}, + {id: validatorID("carol"), stake: 500000000, allocation: 10000000000}, + }, + genesisChanges: "", // No custom genesis changes for this action + skipGentx: false, + }, + state: State{ + chainID("provi"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9500000000, + validatorID("bob"): 9500000000, + }, + }, + }, + }, + { + action: SendTokensAction{ + chain: chainID("provi"), + from: validatorID("alice"), + to: validatorID("bob"), + amount: 2, + }, + state: State{ + chainID("provi"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9499999998, + validatorID("bob"): 9500000002, + }, + }, + }, + }, + { + action: submitConsumerProposalAction{ + chain: chainID("provi"), + from: validatorID("alice"), + deposit: 10000001, + consumerChain: chainID("democ"), + spawnTime: 0, + initialHeight: clienttypes.Height{RevisionNumber: 0, RevisionHeight: 1}, + }, + state: State{ + chainID("provi"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9489999997, + validatorID("bob"): 9500000002, + }, + Proposals: &map[uint]Proposal{ + 1: ConsumerProposal{ + Deposit: 10000001, + Chain: chainID("democ"), + SpawnTime: 0, + InitialHeight: clienttypes.Height{RevisionNumber: 0, RevisionHeight: 1}, + Status: "PROPOSAL_STATUS_VOTING_PERIOD", + }, + }, + }, + }, + }, + { + action: voteGovProposalAction{ + chain: chainID("provi"), + from: []validatorID{validatorID("alice"), validatorID("bob"), validatorID("carol")}, + vote: []string{"yes", "yes", "yes"}, + propNumber: 1, + }, + state: State{ + chainID("provi"): ChainState{ + Proposals: &map[uint]Proposal{ + 1: ConsumerProposal{ + Deposit: 10000001, + Chain: chainID("democ"), + SpawnTime: 0, + InitialHeight: clienttypes.Height{RevisionNumber: 0, RevisionHeight: 1}, + Status: "PROPOSAL_STATUS_PASSED", + }, + }, + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9499999998, + validatorID("bob"): 9500000002, + }, + }, + }, + }, + { + action: startConsumerChainAction{ + consumerChain: chainID("democ"), + providerChain: chainID("provi"), + genesisChanges: ".app_state.ccvconsumer.params.blocks_per_distribution_transmission = \"10\"", + validators: []StartChainValidator{ + {id: validatorID("carol"), stake: 500000000, allocation: 10000000000}, + {id: validatorID("alice"), stake: 500000000, allocation: 10000000000}, + {id: validatorID("bob"), stake: 500000000, allocation: 10000000000}, + }, + }, + state: State{ + chainID("provi"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9499999998, + validatorID("bob"): 9500000002, + }, + }, + chainID("democ"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 10000000000, + validatorID("bob"): 10000000000, + }, + }, + }, + }, + { + action: SendTokensAction{ + chain: chainID("democ"), + from: validatorID("alice"), + to: validatorID("bob"), + amount: 1, + }, + state: State{ + chainID("democ"): ChainState{ + // Tx on consumer chain should not go through before ICS channel is setup + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 10000000000, + validatorID("bob"): 10000000000, + }, + }, + }, + }, + { + action: addIbcConnectionAction{ + chainA: chainID("democ"), + chainB: chainID("provi"), + clientA: 0, + clientB: 0, + order: "ordered", + }, + state: State{}, + }, + { + action: addIbcChannelAction{ + chainA: chainID("democ"), + chainB: chainID("provi"), + connectionA: 0, + portA: "consumer", + portB: "provider", + order: "ordered", + }, + state: State{}, + }, + { + action: transferChannelCompleteAction{ + chainA: chainID("democ"), + chainB: chainID("provi"), + connectionA: 0, + portA: "transfer", + portB: "transfer", + order: "unordered", + channelA: 1, + channelB: 1, + }, + state: State{}, + }, + { + action: delegateTokensAction{ + chain: chainID("provi"), + from: validatorID("alice"), + to: validatorID("alice"), + amount: 11000000, + }, + state: State{ + chainID("provi"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + }, + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 500, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + }, + }, + }, + { + action: SendTokensAction{ + chain: chainID("democ"), + from: validatorID("alice"), + to: validatorID("bob"), + amount: 1, + }, + state: State{ + chainID("democ"): ChainState{ + // Tx should not go through, ICS channel is not setup until first VSC packet has been relayed to consumer + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 10000000000, + validatorID("bob"): 10000000000, + }, + }, + }, + }, + { + action: relayPacketsAction{ + chain: chainID("provi"), + port: "provider", + channel: 0, + }, + state: State{ + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + }, + }, + }, + { + action: SendTokensAction{ + chain: chainID("democ"), + from: validatorID("alice"), + to: validatorID("bob"), + amount: 1, + }, + state: State{ + chainID("democ"): ChainState{ + // Now tx should execute + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9999999999, + validatorID("bob"): 10000000001, + }, + }, + }, + }, + // sanity checks end here + { + action: registerRepresentativeAction{ + chain: chainID("democ"), + representatives: []validatorID{validatorID("alice"), validatorID("bob")}, + stakes: []uint{100000000, 40000000}, + }, + state: State{ + chainID("democ"): ChainState{ + RepresentativePowers: &map[validatorID]uint{ + validatorID("alice"): 100000000, + validatorID("bob"): 40000000, + }, + Rewards: &Rewards{ + IsRewarded: map[validatorID]bool{ + validatorID("alice"): true, + validatorID("bob"): true, + validatorID("carol"): false, + }, + IsIncrementalReward: true, + IsNativeDenom: true, + }, + }, + }, + }, + { + action: delegateTokensAction{ + chain: chainID("democ"), + from: validatorID("carol"), + to: validatorID("alice"), + amount: 500000, + }, + state: State{ + chainID("democ"): ChainState{ + //Check that delegators on gov-consumer chain can change representative powers + RepresentativePowers: &map[validatorID]uint{ + validatorID("alice"): 100500000, + validatorID("bob"): 40000000, + }, + // Check that delegating on gov-consumer does not change validator powers + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + //Check that tokens are minted and distributed to representatives and their delegators + Rewards: &Rewards{ + IsRewarded: map[validatorID]bool{ + validatorID("alice"): true, + validatorID("bob"): true, + validatorID("carol"): true, + }, + IsIncrementalReward: true, + IsNativeDenom: true, + }, + }, + }, + }, + { + action: submitParamChangeProposalAction{ + chain: chainID("democ"), + from: validatorID("alice"), + deposit: 10000001, + subspace: "staking", + key: "MaxValidators", + value: 105, + }, + state: State{ + chainID("democ"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9889999998, + validatorID("bob"): 9960000001, + }, + Proposals: &map[uint]Proposal{ + 1: ParamsProposal{ + Deposit: 10000001, + Status: "PROPOSAL_STATUS_VOTING_PERIOD", + Subspace: "staking", + Key: "MaxValidators", + Value: "105", + }, + }, + }, + }, + }, + { + //Have accounts vote on something on the gov-consumer chain + action: voteGovProposalAction{ + chain: chainID("democ"), + from: []validatorID{validatorID("alice"), validatorID("bob")}, + vote: []string{"yes", "no"}, + propNumber: 1, + }, + state: State{ + chainID("democ"): ChainState{ + ValBalances: &map[validatorID]uint{ + validatorID("alice"): 9899999999, + validatorID("bob"): 9960000001, + }, + //Check that the parameter is changed on gov-consumer chain + Params: &([]Param{{Subspace: "staking", Key: "MaxValidators", Value: "105"}}), + }, + }, + }, + { + action: relayRewardPacketsToProviderAction{ + consumerChain: chainID("democ"), + providerChain: chainID("provi"), + port: "transfer", + channel: 1, + }, + state: State{ + chainID("provi"): ChainState{ + //Check that tokens are minted and sent to provider chain and distributed to validators and their delegators on provider chain + Rewards: &Rewards{ + IsRewarded: map[validatorID]bool{ + validatorID("alice"): true, + validatorID("bob"): true, + validatorID("carol"): true, + }, + IsIncrementalReward: false, + IsNativeDenom: false, + }, + }, + }, + }, + { + action: downtimeSlashAction{ + chain: chainID("democ"), + // TODO: First validator cannot be brought down until this issue is resolved: + // https://github.com/cosmos/interchain-security/issues/263 + validator: validatorID("bob"), + }, + state: State{ + // validator should be slashed on consumer, powers not affected on either chain yet + chainID("provi"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + }, + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + }, + }, + }, + { + action: relayPacketsAction{ + chain: chainID("provi"), + port: "provider", + channel: 0, + }, + state: State{ + chainID("provi"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + // Downtime jailing and corresponding voting power change are processed by provider + validatorID("bob"): 0, + validatorID("carol"): 500, + }, + }, + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 500, + validatorID("carol"): 500, + }, + }, + }, + }, + // A block is incremented each action, hence why VSC is committed on provider, + // and can now be relayed as packet to consumer + { + action: relayPacketsAction{ + chain: chainID("provi"), + port: "provider", + channel: 0, + }, + state: State{ + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + // VSC now seen on consumer + validatorID("bob"): 0, + validatorID("carol"): 500, + }, + }, + }, + }, + { + action: unjailValidatorAction{ + provider: chainID("provi"), + validator: validatorID("bob"), + }, + state: State{ + chainID("provi"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + // 1% of bob's stake should be slashed as set in config.go + validatorID("bob"): 495, + validatorID("carol"): 500, + }, + }, + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 0, + validatorID("carol"): 500, + }, + }, + }, + }, + { + action: relayPacketsAction{ + chain: chainID("provi"), + port: "provider", + channel: 0, + }, + state: State{ + chainID("democ"): ChainState{ + ValPowers: &map[validatorID]uint{ + validatorID("alice"): 511, + validatorID("bob"): 495, + validatorID("carol"): 500, + }, + //Check that slashing on the gov-consumer chain does not result in slashing for the representatives or their delegators + RepresentativePowers: &map[validatorID]uint{ + validatorID("alice"): 100500000, + validatorID("bob"): 40000000, + }, + }, + }, + }, +} diff --git a/testutil/keeper/expectations.go b/testutil/keeper/expectations.go new file mode 100644 index 0000000000..7390115c7a --- /dev/null +++ b/testutil/keeper/expectations.go @@ -0,0 +1,93 @@ +package keeper + +import ( + time "time" + + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" + conntypes "github.com/cosmos/ibc-go/v3/modules/core/03-connection/types" + channeltypes "github.com/cosmos/ibc-go/v3/modules/core/04-channel/types" + ibctmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" + "github.com/golang/mock/gomock" + + ccv "github.com/cosmos/interchain-security/x/ccv/types" + + extra "github.com/oxyno-zeta/gomock-extra-matcher" +) + +// +// A file containing groups of commonly used mock expectations. +// Note: Each group of mock expectations is associated with a single method +// that may be called during unit tests. +// + +// GetMocksForCreateConsumerClient returns mock expectations needed to call CreateConsumerClient(). +func GetMocksForCreateConsumerClient(ctx sdk.Context, mocks *MockedKeepers, + expectedChainID string, expectedLatestHeight clienttypes.Height) []*gomock.Call { + + expectations := []*gomock.Call{ + mocks.MockStakingKeeper.EXPECT().UnbondingTime(ctx).Return(time.Hour).Times( + 1, // called once in CreateConsumerClient + ), + + mocks.MockClientKeeper.EXPECT().CreateClient( + ctx, + // Allows us to expect a match by field. These are the only two client state values + // that are dependant on parameters passed to CreateConsumerClient. + extra.StructMatcher().Field( + "ChainId", expectedChainID).Field( + "LatestHeight", expectedLatestHeight, + ), + gomock.Any(), + ).Return("clientID", nil).Times(1), + } + + expectations = append(expectations, GetMocksForMakeConsumerGenesis(ctx, mocks, time.Hour)...) + return expectations +} + +// GetMocksForMakeConsumerGenesis returns mock expectations needed to call MakeConsumerGenesis(). +func GetMocksForMakeConsumerGenesis(ctx sdk.Context, mocks *MockedKeepers, + unbondingTimeToInject time.Duration) []*gomock.Call { + return []*gomock.Call{ + mocks.MockStakingKeeper.EXPECT().UnbondingTime(ctx).Return(unbondingTimeToInject).Times(1), + + mocks.MockClientKeeper.EXPECT().GetSelfConsensusState(ctx, + clienttypes.GetSelfHeight(ctx)).Return(&ibctmtypes.ConsensusState{}, nil).Times(1), + + mocks.MockStakingKeeper.EXPECT().IterateLastValidatorPowers(ctx, gomock.Any()).Times(1), + } +} + +// GetMocksForSetConsumerChain returns mock expectations needed to call SetConsumerChain(). +func GetMocksForSetConsumerChain(ctx sdk.Context, mocks *MockedKeepers, + chainIDToInject string) []*gomock.Call { + return []*gomock.Call{ + mocks.MockChannelKeeper.EXPECT().GetChannel(ctx, ccv.ProviderPortID, gomock.Any()).Return( + channeltypes.Channel{ + State: channeltypes.OPEN, + ConnectionHops: []string{"connectionID"}, + }, + true, + ).Times(1), + mocks.MockConnectionKeeper.EXPECT().GetConnection(ctx, "connectionID").Return( + conntypes.ConnectionEnd{ClientId: "clientID"}, true, + ).Times(1), + mocks.MockClientKeeper.EXPECT().GetClientState(ctx, "clientID").Return( + &ibctmtypes.ClientState{ChainId: chainIDToInject}, true, + ).Times(1), + } +} + +// GetMocksForStopConsumerChain returns mock expectations needed to call StopConsumerChain(). +func GetMocksForStopConsumerChain(ctx sdk.Context, mocks *MockedKeepers) []*gomock.Call { + dummyCap := &capabilitytypes.Capability{} + return []*gomock.Call{ + mocks.MockChannelKeeper.EXPECT().GetChannel(ctx, ccv.ProviderPortID, "channelID").Return( + channeltypes.Channel{State: channeltypes.OPEN}, true, + ).Times(1), + mocks.MockScopedKeeper.EXPECT().GetCapability(ctx, gomock.Any()).Return(dummyCap, true).Times(1), + mocks.MockChannelKeeper.EXPECT().ChanCloseInit(ctx, ccv.ProviderPortID, "channelID", dummyCap).Times(1), + } +} diff --git a/testutil/keeper/unit_test_helpers.go b/testutil/keeper/unit_test_helpers.go index a4dd366139..f2aeb2e3ac 100644 --- a/testutil/keeper/unit_test_helpers.go +++ b/testutil/keeper/unit_test_helpers.go @@ -2,17 +2,20 @@ package keeper import ( "testing" + time "time" + + tmtypes "github.com/tendermint/tendermint/types" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/store" storetypes "github.com/cosmos/cosmos-sdk/store/types" sdk "github.com/cosmos/cosmos-sdk/types" - capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" consumerkeeper "github.com/cosmos/interchain-security/x/ccv/consumer/keeper" providerkeeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/golang/mock/gomock" "github.com/stretchr/testify/require" "github.com/tendermint/tendermint/crypto" "github.com/tendermint/tendermint/libs/log" @@ -22,169 +25,178 @@ import ( cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" cryptotypes "github.com/cosmos/cosmos-sdk/crypto/types" + + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" + commitmenttypes "github.com/cosmos/ibc-go/v3/modules/core/23-commitment/types" + ibctmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" + providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" ) -// Constructs a provider keeper and context object for unit tests, backed by an in-memory db. -func GetProviderKeeperAndCtx(t testing.TB) (providerkeeper.Keeper, sdk.Context) { +// Parameters needed to instantiate an in-memory keeper +type InMemKeeperParams struct { + Cdc *codec.ProtoCodec + StoreKey *storetypes.KVStoreKey + ParamsSubspace *paramstypes.Subspace + Ctx sdk.Context +} + +// NewInMemKeeperParams instantiates in-memory keeper params with default values +func NewInMemKeeperParams(t testing.TB) InMemKeeperParams { + storeKey := sdk.NewKVStoreKey(types.StoreKey) + memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) + + db := tmdb.NewMemDB() + stateStore := store.NewCommitMultiStore(db) + stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) + stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) + require.NoError(t, stateStore.LoadLatestVersion()) - cdc, storeKey, paramsSubspace, ctx := SetupInMemKeeper(t) + registry := codectypes.NewInterfaceRegistry() + cdc := codec.NewProtoCodec(registry) - k := providerkeeper.NewKeeper( - cdc, + paramsSubspace := paramstypes.NewSubspace(cdc, + codec.NewLegacyAmino(), storeKey, - paramsSubspace, - &MockScopedKeeper{}, - &MockChannelKeeper{}, - &MockPortKeeper{}, - &MockConnectionKeeper{}, - &MockClientKeeper{}, - &MockStakingKeeper{}, - &MockSlashingKeeper{}, - &MockAccountKeeper{}, - "", + memStoreKey, + paramstypes.ModuleName, ) - return k, ctx -} + ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) -// Constructs a consumer keeper and context object for unit tests, backed by an in-memory db. -func GetConsumerKeeperAndCtx(t testing.TB) (consumerkeeper.Keeper, sdk.Context) { + return InMemKeeperParams{ + Cdc: cdc, + StoreKey: storeKey, + ParamsSubspace: ¶msSubspace, + Ctx: ctx, + } +} - cdc, storeKey, paramsSubspace, ctx := SetupInMemKeeper(t) +// A struct holding pointers to any mocked external keeper needed for provider/consumer keeper setup. +type MockedKeepers struct { + *MockScopedKeeper + *MockChannelKeeper + *MockPortKeeper + *MockConnectionKeeper + *MockClientKeeper + *MockStakingKeeper + *MockSlashingKeeper + *MockAccountKeeper + *MockBankKeeper + *MockIBCTransferKeeper + *MockIBCCoreKeeper +} - k := consumerkeeper.NewKeeper( - cdc, - storeKey, - paramsSubspace, - &MockScopedKeeper{}, - &MockChannelKeeper{}, - &MockPortKeeper{}, - &MockConnectionKeeper{}, - &MockClientKeeper{}, - &MockSlashingKeeper{}, - &MockBankKeeper{}, - &MockAccountKeeper{}, - &MockIBCTransferKeeper{}, - &MockIBCCoreKeeper{}, - "", - ) - return k, ctx +// NewMockedKeepers instantiates a struct with pointers to properly instantiated mocked keepers. +func NewMockedKeepers(ctrl *gomock.Controller) MockedKeepers { + return MockedKeepers{ + MockScopedKeeper: NewMockScopedKeeper(ctrl), + MockChannelKeeper: NewMockChannelKeeper(ctrl), + MockPortKeeper: NewMockPortKeeper(ctrl), + MockConnectionKeeper: NewMockConnectionKeeper(ctrl), + MockClientKeeper: NewMockClientKeeper(ctrl), + MockStakingKeeper: NewMockStakingKeeper(ctrl), + MockSlashingKeeper: NewMockSlashingKeeper(ctrl), + MockAccountKeeper: NewMockAccountKeeper(ctrl), + MockBankKeeper: NewMockBankKeeper(ctrl), + MockIBCTransferKeeper: NewMockIBCTransferKeeper(ctrl), + MockIBCCoreKeeper: NewMockIBCCoreKeeper(ctrl), + } } -// Constructs a provider keeper for unit tests, backed by an in-memory db, -// with ability to pass mocked or otherwise manipulated parameters. -func GetProviderKeeperWithMocks( - cdc *codec.ProtoCodec, - storeKey *storetypes.KVStoreKey, - paramsSubspace paramstypes.Subspace, - capabilityKeeper capabilitykeeper.ScopedKeeper, - channelKeeper types.ChannelKeeper, - portKeeper types.PortKeeper, - connectionKeeper types.ConnectionKeeper, - clientKeeper types.ClientKeeper, - stakingKeeper types.StakingKeeper, - slashingKeeper types.SlashingKeeper, - accountKeeper types.AccountKeeper, -) providerkeeper.Keeper { +// NewInMemProviderKeeper instantiates an in-mem provider keeper from params and mocked keepers +func NewInMemProviderKeeper(params InMemKeeperParams, mocks MockedKeepers) providerkeeper.Keeper { return providerkeeper.NewKeeper( - cdc, - storeKey, - paramsSubspace, - capabilityKeeper, - channelKeeper, - portKeeper, - connectionKeeper, - clientKeeper, - stakingKeeper, - slashingKeeper, - accountKeeper, + params.Cdc, + params.StoreKey, + *params.ParamsSubspace, + mocks.MockScopedKeeper, + mocks.MockChannelKeeper, + mocks.MockPortKeeper, + mocks.MockConnectionKeeper, + mocks.MockClientKeeper, + mocks.MockStakingKeeper, + mocks.MockSlashingKeeper, + mocks.MockAccountKeeper, "", ) } -// Constructs a consumer keeper for unit tests, backed by an in-memory db, -// with ability to pass mocked or otherwise manipulated parameters. -func GetCustomConsumerKeeperWithMocks( - cdc *codec.ProtoCodec, - storeKey *storetypes.KVStoreKey, - paramsSubspace paramstypes.Subspace, - capabilityKeeper types.ScopedKeeper, - channelKeeper types.ChannelKeeper, - portKeeper types.PortKeeper, - connectionKeeper types.ConnectionKeeper, - clientKeeper types.ClientKeeper, - slashingKeeper types.SlashingKeeper, - bankKeeper types.BankKeeper, - accountKeeper types.AccountKeeper, - ibcTransferKeeper types.IBCTransferKeeper, - ibcCoreKeeper types.IBCCoreKeeper, -) consumerkeeper.Keeper { +// NewInMemConsumerKeeper instantiates an in-mem consumer keeper from params and mocked keepers +func NewInMemConsumerKeeper(params InMemKeeperParams, mocks MockedKeepers) consumerkeeper.Keeper { return consumerkeeper.NewKeeper( - cdc, - storeKey, - paramsSubspace, - capabilityKeeper, - channelKeeper, - portKeeper, - connectionKeeper, - clientKeeper, - slashingKeeper, - bankKeeper, - accountKeeper, - ibcTransferKeeper, - ibcCoreKeeper, + params.Cdc, + params.StoreKey, + *params.ParamsSubspace, + mocks.MockScopedKeeper, + mocks.MockChannelKeeper, + mocks.MockPortKeeper, + mocks.MockConnectionKeeper, + mocks.MockClientKeeper, + mocks.MockSlashingKeeper, + mocks.MockBankKeeper, + mocks.MockAccountKeeper, + mocks.MockIBCTransferKeeper, + mocks.MockIBCCoreKeeper, "", ) } -// Constructs a consumer keeper for unit tests, backed by an in-memory db, -// with ability to pass manipulated parameters, but no mocked keepers. -func GetCustomConsumerKeeper( - cdc *codec.ProtoCodec, - storeKey *storetypes.KVStoreKey, - paramsSubspace paramstypes.Subspace, -) consumerkeeper.Keeper { +// Returns an in-memory provider keeper, context, controller, and mocks, given a test instance and parameters. +// +// Note: Calling ctrl.Finish() at the end of a test function ensures that +// no unexpected calls to external keepers are made. +func GetProviderKeeperAndCtx(t *testing.T, params InMemKeeperParams) ( + providerkeeper.Keeper, sdk.Context, *gomock.Controller, MockedKeepers) { - return consumerkeeper.NewKeeper( - cdc, - storeKey, - paramsSubspace, - &MockScopedKeeper{}, - &MockChannelKeeper{}, - &MockPortKeeper{}, - &MockConnectionKeeper{}, - &MockClientKeeper{}, - &MockSlashingKeeper{}, - &MockBankKeeper{}, - &MockAccountKeeper{}, - &MockIBCTransferKeeper{}, - &MockIBCCoreKeeper{}, - "", - ) + ctrl := gomock.NewController(t) + mocks := NewMockedKeepers(ctrl) + return NewInMemProviderKeeper(params, mocks), params.Ctx, ctrl, mocks } -func SetupInMemKeeper(t testing.TB) (*codec.ProtoCodec, *storetypes.KVStoreKey, paramstypes.Subspace, sdk.Context) { - storeKey := sdk.NewKVStoreKey(types.StoreKey) - memStoreKey := storetypes.NewMemoryStoreKey(types.MemStoreKey) +// Return an in-memory consumer keeper, context, controller, and mocks, given a test instance and parameters. +// +// Note: Calling ctrl.Finish() at the end of a test function ensures that +// no unexpected calls to external keepers are made. +func GetConsumerKeeperAndCtx(t *testing.T, params InMemKeeperParams) ( + consumerkeeper.Keeper, sdk.Context, *gomock.Controller, MockedKeepers) { - db := tmdb.NewMemDB() - stateStore := store.NewCommitMultiStore(db) - stateStore.MountStoreWithDB(storeKey, sdk.StoreTypeIAVL, db) - stateStore.MountStoreWithDB(memStoreKey, sdk.StoreTypeMemory, nil) - require.NoError(t, stateStore.LoadLatestVersion()) + ctrl := gomock.NewController(t) + mocks := NewMockedKeepers(ctrl) + return NewInMemConsumerKeeper(params, mocks), params.Ctx, ctrl, mocks +} - registry := codectypes.NewInterfaceRegistry() - cdc := codec.NewProtoCodec(registry) +// Sets a template client state for a params subspace so that the provider's +// GetTemplateClient method will be satisfied. +func (params *InMemKeeperParams) SetTemplateClientState(customState *ibctmtypes.ClientState) { - paramsSubspace := paramstypes.NewSubspace(cdc, - codec.NewLegacyAmino(), - storeKey, - memStoreKey, - paramstypes.ModuleName, - ) - ctx := sdk.NewContext(stateStore, tmproto.Header{}, false, log.NewNopLogger()) - return cdc, storeKey, paramsSubspace, ctx + keyTable := paramstypes.NewKeyTable(paramstypes.NewParamSetPair( + providertypes.KeyTemplateClient, &ibctmtypes.ClientState{}, + func(value interface{}) error { return nil })) + + newSubspace := params.ParamsSubspace.WithKeyTable(keyTable) + params.ParamsSubspace = &newSubspace + + // Default template client state if none provided + if customState == nil { + customState = ibctmtypes.NewClientState("", ibctmtypes.DefaultTrustLevel, 0, 0, + time.Second*10, clienttypes.Height{}, commitmenttypes.GetSDKSpecs(), + []string{"upgrade", "upgradedIBCState"}, true, true) + } + + params.ParamsSubspace.Set(params.Ctx, providertypes.KeyTemplateClient, customState) +} + +// Registers proto interfaces for params.Cdc +// +// For now, we explicitly force certain unit tests to register sdk crypto interfaces. +// TODO: This function will be executed automatically once https://github.com/cosmos/interchain-security/issues/273 is solved. +func (params *InMemKeeperParams) RegisterSdkCryptoCodecInterfaces() { + ir := codectypes.NewInterfaceRegistry() + // Public key implementation registered here + cryptocodec.RegisterInterfaces(ir) + // Replace default cdc, with a custom (registered) codec + params.Cdc = codec.NewProtoCodec(ir) } type PrivateKey struct { @@ -196,3 +208,32 @@ func GenPubKey() (crypto.PubKey, error) { privKey := PrivateKey{ed25519.GenPrivKey()} return cryptocodec.ToTmPubKeyInterface(privKey.PrivKey.PubKey()) } + +func GetClientState(chainID string) *ibctmtypes.ClientState { + return ibctmtypes.NewClientState(chainID, ibctmtypes.DefaultTrustLevel, 0, 0, + time.Second*10, clienttypes.Height{}, commitmenttypes.GetSDKSpecs(), + []string{"upgrade", "upgradedIBCState"}, true, true) +} + +func GetConsensusState(clientID string, timestamp time.Time, vals ...*tmtypes.Validator) *ibctmtypes.ConsensusState { + return ibctmtypes.NewConsensusState(timestamp, commitmenttypes.NewMerkleRoot([]byte("apphash")), + tmtypes.NewValidatorSet(vals).Hash()[:]) +} + +// SetupForStoppingConsumerChain registers expected mock calls and corresponding state setup +// which asserts that a consumer chain was properly stopped from StopConsumerChain(). +func SetupForStoppingConsumerChain(t *testing.T, ctx sdk.Context, + providerKeeper *providerkeeper.Keeper, mocks MockedKeepers) { + + expectations := GetMocksForCreateConsumerClient(ctx, &mocks, + "chainID", clienttypes.NewHeight(2, 3)) + expectations = append(expectations, GetMocksForSetConsumerChain(ctx, &mocks, "chainID")...) + expectations = append(expectations, GetMocksForStopConsumerChain(ctx, &mocks)...) + + gomock.InOrder(expectations...) + + err := providerKeeper.CreateConsumerClient(ctx, "chainID", clienttypes.NewHeight(2, 3), false) + require.NoError(t, err) + err = providerKeeper.SetConsumerChain(ctx, "channelID") + require.NoError(t, err) +} diff --git a/testutil/simapp/simapp.go b/testutil/simapp/simapp.go index f18de89c9f..cd76500be6 100644 --- a/testutil/simapp/simapp.go +++ b/testutil/simapp/simapp.go @@ -13,6 +13,7 @@ import ( tmdb "github.com/tendermint/tm-db" appConsumer "github.com/cosmos/interchain-security/app/consumer" + appConsumerDemocracy "github.com/cosmos/interchain-security/app/consumer-democracy" appProvider "github.com/cosmos/interchain-security/app/provider" ) @@ -24,6 +25,14 @@ func SetupTestingappProvider() (ibctesting.TestingApp, map[string]json.RawMessag return testApp, appProvider.NewDefaultGenesisState(encoding.Marshaler) } +func SetupTestingAppConsumerDemocracy() (ibctesting.TestingApp, map[string]json.RawMessage) { + db := tmdb.NewMemDB() + // encCdc := app.MakeTestEncodingConfig() + encoding := cosmoscmd.MakeEncodingConfig(appConsumerDemocracy.ModuleBasics) + testApp := appConsumerDemocracy.New(log.NewNopLogger(), db, nil, true, map[int64]bool{}, simapp.DefaultNodeHome, 5, encoding, simapp.EmptyAppOptions{}).(ibctesting.TestingApp) + return testApp, appConsumerDemocracy.NewDefaultGenesisState(encoding.Marshaler) +} + func SetupTestingAppConsumer() (ibctesting.TestingApp, map[string]json.RawMessage) { db := tmdb.NewMemDB() // encCdc := app.MakeTestEncodingConfig() @@ -55,3 +64,16 @@ func NewProviderConsumerCoordinator(t *testing.T) (*ibctesting.Coordinator, *ibc consumerChain := coordinator.GetChain(chainID) return coordinator, providerChain, consumerChain } + +// NewCoordinator initializes Coordinator with provider and democracy consumer TestChains +func NewProviderConsumerDemocracyCoordinator(t *testing.T) (*ibctesting.Coordinator, *ibctesting.TestChain, *ibctesting.TestChain) { + coordinator := NewBasicCoordinator(t) + chainID := ibctesting.GetChainID(1) + coordinator.Chains[chainID] = ibctesting.NewTestChain(t, coordinator, SetupTestingappProvider, chainID) + providerChain := coordinator.GetChain(chainID) + chainID = ibctesting.GetChainID(2) + coordinator.Chains[chainID] = ibctesting.NewTestChainWithValSet(t, coordinator, + SetupTestingAppConsumerDemocracy, chainID, providerChain.Vals, providerChain.Signers) + consumerChain := coordinator.GetChain(chainID) + return coordinator, providerChain, consumerChain +} diff --git a/third_party/proto/cosmos/staking/v1beta1/staking.proto b/third_party/proto/cosmos/staking/v1beta1/staking.proto index 61fd247d1d..27581a12cf 100644 --- a/third_party/proto/cosmos/staking/v1beta1/staking.proto +++ b/third_party/proto/cosmos/staking/v1beta1/staking.proto @@ -9,6 +9,7 @@ import "google/protobuf/timestamp.proto"; import "cosmos_proto/cosmos.proto"; import "cosmos/base/v1beta1/coin.proto"; import "tendermint/types/types.proto"; +import "tendermint/abci/types.proto"; option go_package = "github.com/cosmos/cosmos-sdk/x/staking/types"; @@ -118,8 +119,11 @@ message Validator { (gogoproto.nullable) = false ]; - // True if this validator's unbonding has been stopped by an external module - bool unbonding_on_hold = 12; + // strictly positive if this validator's unbonding has been stopped by external modules + int64 unbonding_on_hold_ref_count = 12; + + // list of unbonding ids, each uniquely identifing an unbonding of this validator + repeated uint64 unbonding_ids = 13; } // BondStatus is the status of a validator. @@ -233,8 +237,8 @@ message UnbondingDelegationEntry { // Incrementing id that uniquely identifies this entry uint64 unbonding_id = 5; - // True if this entry's unbonding has been stopped by an external module - bool unbonding_on_hold = 6; + // Strictly positive if this entry's unbonding has been stopped by external modules + int64 unbonding_on_hold_ref_count = 6; } // RedelegationEntry defines a redelegation object with relevant metadata. @@ -260,8 +264,8 @@ message RedelegationEntry { // Incrementing id that uniquely identifies this entry uint64 unbonding_id = 5; - // True if this entry's unbonding has been stopped by an external module - bool unbonding_on_hold = 6; + // Strictly positive if this entry's unbonding has been stopped by external modules + int64 unbonding_on_hold_ref_count = 6; } // Redelegation contains the list of a particular delegator's redelegating bonds @@ -358,4 +362,9 @@ enum InfractionType { INFRACTION_TYPE_DOUBLE_SIGN = 1 [(gogoproto.enumvalue_customname) = "DoubleSign"]; // DOWNTIME defines a validator that missed signing too many blocks. INFRACTION_TYPE_DOWNTIME = 2 [(gogoproto.enumvalue_customname) = "Downtime"]; +} + +// ValidatorUpdates defines an array of abci.ValidatorUpdate objects. +message ValidatorUpdates { + repeated tendermint.abci.ValidatorUpdate updates = 1 [(gogoproto.nullable) = false]; } \ No newline at end of file diff --git a/x/ccv/consumer/ibc_module.go b/x/ccv/consumer/ibc_module.go index 77527c34f2..b16457e2fb 100644 --- a/x/ccv/consumer/ibc_module.go +++ b/x/ccv/consumer/ibc_module.go @@ -55,7 +55,7 @@ func (am AppModule) OnChanOpenInit( return err } - return am.keeper.VerifyProviderChain(ctx, channelID, connectionHops) + return am.keeper.VerifyProviderChain(ctx, connectionHops) } // validateCCVChannelParams validates a ccv channel diff --git a/x/ccv/consumer/ibc_module_test.go b/x/ccv/consumer/ibc_module_test.go new file mode 100644 index 0000000000..67eed2568c --- /dev/null +++ b/x/ccv/consumer/ibc_module_test.go @@ -0,0 +1,374 @@ +package consumer_test + +import ( + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + transfertypes "github.com/cosmos/ibc-go/v3/modules/apps/transfer/types" + conntypes "github.com/cosmos/ibc-go/v3/modules/core/03-connection/types" + channeltypes "github.com/cosmos/ibc-go/v3/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v3/modules/core/24-host" + testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + "github.com/cosmos/interchain-security/x/ccv/consumer" + consumerkeeper "github.com/cosmos/interchain-security/x/ccv/consumer/keeper" + providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" + ccv "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +// TestOnChanOpenInit validates the consumer's OnChanOpenInit implementation against the spec. +// Additional validation for VerifyProviderChain can be found in it's unit test. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-ccf-coinit1 +// Spec tag: [CCV-CCF-COINIT.1] +func TestOnChanOpenInit(t *testing.T) { + + // Params for the OnChanOpenInit method + type params struct { + ctx sdk.Context + order channeltypes.Order + connectionHops []string + portID string + channelID string + chanCap *capabilitytypes.Capability + counterparty channeltypes.Counterparty + version string + } + + testCases := []struct { + name string + // Test-case specific function that mutates method parameters and setups expected mock calls + setup func(*consumerkeeper.Keeper, *params, testkeeper.MockedKeepers) + expPass bool + }{ + { + "success", func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + gomock.InOrder( + mocks.MockScopedKeeper.EXPECT().ClaimCapability( + params.ctx, params.chanCap, host.ChannelCapabilityPath( + params.portID, params.channelID)).Return(nil).Times(1), + mocks.MockConnectionKeeper.EXPECT().GetConnection( + params.ctx, "connectionIDToProvider").Return( + conntypes.ConnectionEnd{ClientId: "clientIDToProvider"}, true).Times(1), + ) + }, true, + }, + { + "invalid: channel to provider already established", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + keeper.SetProviderChannel(params.ctx, "existingProviderChanID") + }, false, + }, + { + "invalid: UNORDERED channel", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + params.order = channeltypes.UNORDERED + }, false, + }, + { + "invalid port ID, not CCV port", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + params.portID = "someDingusPortID" + }, false, + }, + { + "invalid version", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + params.version = "someDingusVer" + }, false, + }, + { + "invalid counterparty port ID", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + params.counterparty.PortId = "someOtherDingusPortID" + }, false, + }, + { + "invalid clientID to provider", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + gomock.InOrder( + mocks.MockScopedKeeper.EXPECT().ClaimCapability( + params.ctx, params.chanCap, host.ChannelCapabilityPath( + params.portID, params.channelID)).Return(nil).Times(1), + mocks.MockConnectionKeeper.EXPECT().GetConnection( + params.ctx, "connectionIDToProvider").Return( + conntypes.ConnectionEnd{ClientId: "unexpectedClientID"}, true).Times(1), // unexpected clientID + ) + }, false, + }, + } + + for _, tc := range testCases { + + // Common setup + consumerKeeper, ctx, ctrl, mocks := testkeeper.GetConsumerKeeperAndCtx( + t, testkeeper.NewInMemKeeperParams(t)) + consumerModule := consumer.NewAppModule(consumerKeeper) + + consumerKeeper.SetPort(ctx, ccv.ConsumerPortID) + consumerKeeper.SetProviderClientID(ctx, "clientIDToProvider") + + // Instantiate valid params as default. Individual test cases mutate these as needed. + params := params{ + ctx: ctx, + order: channeltypes.ORDERED, + connectionHops: []string{"connectionIDToProvider"}, + portID: ccv.ConsumerPortID, + channelID: "consumerChannelID", + chanCap: &capabilitytypes.Capability{}, + counterparty: channeltypes.NewCounterparty(ccv.ProviderPortID, "providerChannelID"), + version: ccv.Version, + } + + tc.setup(&consumerKeeper, ¶ms, mocks) + + err := consumerModule.OnChanOpenInit( + params.ctx, + params.order, + params.connectionHops, + params.portID, + params.channelID, + params.chanCap, + params.counterparty, + params.version, + ) + + if tc.expPass { + require.NoError(t, err) + } else { + require.Error(t, err) + } + // Confirm there are no unexpected external keeper calls + ctrl.Finish() + } +} + +// TestOnChanOpenTry validates the consumer's OnChanOpenTry implementation against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-ccf-cotry1 +// Spec tag: [CCV-CCF-COTRY.1] +func TestOnChanOpenTry(t *testing.T) { + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + // No external keeper methods should be called + defer ctrl.Finish() + consumerModule := consumer.NewAppModule(consumerKeeper) + + // OnOpenTry must error even with correct arguments + _, err := consumerModule.OnChanOpenTry( + ctx, + channeltypes.ORDERED, + []string{"connection-1"}, + ccv.ConsumerPortID, + "channel-1", + nil, + channeltypes.NewCounterparty(ccv.ProviderPortID, "channel-1"), + ccv.Version, + ) + require.Error(t, err, "OnChanOpenTry callback must error on consumer chain") +} + +// TestOnChanOpenAck validates the consumer's OnChanOpenAck implementation against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-ccf-coack1 +// Spec tag: [CCV-CCF-COACK.1] +func TestOnChanOpenAck(t *testing.T) { + + // Params for the OnChanOpenAck method + type params struct { + ctx sdk.Context + portID string + channelID string + counterpartyChannelID string + counterpartyMetadata string + } + + testCases := []struct { + name string + // Test-case specific function that mutates method parameters and setups expected mock calls + setup func(*consumerkeeper.Keeper, *params, testkeeper.MockedKeepers) + expPass bool + }{ + { + "success", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + // Expected msg + distrTransferMsg := channeltypes.NewMsgChannelOpenInit( + transfertypes.PortID, + transfertypes.Version, + channeltypes.UNORDERED, + []string{"connectionID"}, + transfertypes.PortID, + "", // signer unused + ) + + // Expected mock calls + gomock.InOrder( + mocks.MockChannelKeeper.EXPECT().GetChannel( + params.ctx, params.portID, params.channelID).Return(channeltypes.Channel{ + ConnectionHops: []string{"connectionID"}, + }, true).Times(1), + mocks.MockIBCCoreKeeper.EXPECT().ChannelOpenInit( + sdk.WrapSDKContext(params.ctx), distrTransferMsg).Return( + &channeltypes.MsgChannelOpenInitResponse{}, nil, + ).Times(1), + ) + }, + true, + }, + { + "invalid: provider channel already established", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + keeper.SetProviderChannel(params.ctx, "existingProviderChannelID") + }, false, + }, + { + "invalid: cannot unmarshal ack metadata ", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + params.counterpartyMetadata = "bunkData" + }, false, + }, + { + "invalid: mismatched serialized version", + func(keeper *consumerkeeper.Keeper, params *params, mocks testkeeper.MockedKeepers) { + md := providertypes.HandshakeMetadata{ + ProviderFeePoolAddr: "", // dummy address used + Version: "bunkVersion", + } + metadataBz, err := md.Marshal() + require.NoError(t, err) + params.counterpartyMetadata = string(metadataBz) + }, false, + }, + } + + for _, tc := range testCases { + // Common setup + consumerKeeper, ctx, ctrl, mocks := testkeeper.GetConsumerKeeperAndCtx( + t, testkeeper.NewInMemKeeperParams(t)) + consumerModule := consumer.NewAppModule(consumerKeeper) + + // Instantiate valid params as default. Individual test cases mutate these as needed. + params := params{ + ctx: ctx, + portID: ccv.ConsumerPortID, + channelID: "consumerCCVChannelID", + counterpartyChannelID: "providerCCVChannelID", + } + + metadata := providertypes.HandshakeMetadata{ + ProviderFeePoolAddr: "someAcct", + Version: ccv.Version, + } + + metadataBz, err := metadata.Marshal() + require.NoError(t, err) + + params.counterpartyMetadata = string(metadataBz) + + tc.setup(&consumerKeeper, ¶ms, mocks) + + err = consumerModule.OnChanOpenAck( + params.ctx, + params.portID, + params.channelID, + params.counterpartyChannelID, + params.counterpartyMetadata, + ) + + if tc.expPass { + require.NoError(t, err) + // Confirm address of the distribution module account (on provider) was persisted on consumer + distModuleAcct := consumerKeeper.GetProviderFeePoolAddrStr(ctx) + require.Equal(t, "someAcct", distModuleAcct) + } else { + require.Error(t, err) + } + // Confirm there are no unexpected external keeper calls + ctrl.Finish() + } +} + +// TestOnChanOpenConfirm validates the consumer's OnChanOpenConfirm implementation against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-ccf-coconfirm1 +// Spec tag: [CCV-CCF-COCONFIRM.1] +func TestOnChanOpenConfirm(t *testing.T) { + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + consumerModule := consumer.NewAppModule(consumerKeeper) + + err := consumerModule.OnChanOpenConfirm(ctx, ccv.ConsumerPortID, "channel-1") + require.Error(t, err, "OnChanOpenConfirm callback must error on consumer chain") +} + +// TestOnChanCloseInit validates the consumer's OnChanCloseInit implementation against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-ccf-ccinit1 +// Spec tag: [CCV-CCF-CCINIT.1] +func TestOnChanCloseInit(t *testing.T) { + + testCases := []struct { + name string + channelToClose string + establishedProviderExists bool + expPass bool + }{ + { + name: "No established provider channel, error returned disallowing closing of channel", + channelToClose: "someChannelID", + establishedProviderExists: false, + expPass: false, + }, + { + name: "Provider channel is established, User CANNOT close established provider channel", + channelToClose: "provider", + establishedProviderExists: true, + expPass: false, + }, + { + name: "User CAN close duplicate channel that is NOT established provider", + channelToClose: "someChannelID", + establishedProviderExists: true, + expPass: true, + }, + } + + for _, tc := range testCases { + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + consumerModule := consumer.NewAppModule(consumerKeeper) + + if tc.establishedProviderExists { + consumerKeeper.SetProviderChannel(ctx, "provider") + } + + err := consumerModule.OnChanCloseInit(ctx, "portID", tc.channelToClose) + + if tc.expPass { + require.NoError(t, err) + } else { + require.Error(t, err) + } + ctrl.Finish() + } +} + +// TestOnChanCloseConfirm validates the consumer's OnChanCloseConfirm implementation against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-ccconfirm1// Spec tag: [CCV-CCF-CCINIT.1] +// Spec tag: [CCV-PCF-CCCONFIRM.1] +func TestOnChanCloseConfirm(t *testing.T) { + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + + // No external keeper methods should be called + defer ctrl.Finish() + + consumerModule := consumer.NewAppModule(consumerKeeper) + + // Nothing happens, no error returned + err := consumerModule.OnChanCloseConfirm(ctx, "portID", "channelID") + require.NoError(t, err) +} diff --git a/x/ccv/consumer/keeper/genesis.go b/x/ccv/consumer/keeper/genesis.go index 93fa8d8b6b..74873b96b2 100644 --- a/x/ccv/consumer/keeper/genesis.go +++ b/x/ccv/consumer/keeper/genesis.go @@ -6,6 +6,7 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" ibctmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" + "github.com/cosmos/interchain-security/x/ccv/consumer/types" consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" ccv "github.com/cosmos/interchain-security/x/ccv/types" utils "github.com/cosmos/interchain-security/x/ccv/utils" @@ -17,6 +18,8 @@ import ( // InitGenesis initializes the CCV consumer state and binds to PortID. func (k Keeper) InitGenesis(ctx sdk.Context, state *consumertypes.GenesisState) []abci.ValidatorUpdate { k.SetParams(ctx, state.Params) + // TODO: Remove enabled flag and find a better way to setup e2e tests + // See: https://github.com/cosmos/interchain-security/issues/339 if !state.Params.Enabled { return nil } @@ -87,6 +90,11 @@ func (k Keeper) InitGenesis(ctx sdk.Context, state *consumertypes.GenesisState) unbondingTime := utils.ComputeConsumerUnbondingPeriod(tmClientState.UnbondingPeriod) k.SetUnbondingTime(ctx, unbondingTime) + // set height to valset update id mapping + for _, h2v := range state.HeightToValsetUpdateId { + k.SetHeightValsetUpdateID(ctx, h2v.Height, h2v.ValsetUpdateId) + } + // set provider client id k.SetProviderClientID(ctx, state.ProviderClientId) // set provider channel id. @@ -97,6 +105,7 @@ func (k Keeper) InitGenesis(ctx sdk.Context, state *consumertypes.GenesisState) } } + // populate cross chain validators states with initial valset k.ApplyCCValidatorChanges(ctx, state.InitialValSet) return state.InitialValSet @@ -104,56 +113,89 @@ func (k Keeper) InitGenesis(ctx sdk.Context, state *consumertypes.GenesisState) // ExportGenesis exports the CCV consumer state. If the channel has already been established, then we export // provider chain. Otherwise, this is still considered a new chain and we export latest client state. -func (k Keeper) ExportGenesis(ctx sdk.Context) *consumertypes.GenesisState { +func (k Keeper) ExportGenesis(ctx sdk.Context) (genesis *consumertypes.GenesisState) { params := k.GetParams(ctx) if !params.Enabled { return consumertypes.DefaultGenesisState() } + // export the current validator set + valset, err := k.GetValidatorUpdates(ctx) + if err != nil { + panic(fmt.Sprintf("fail to retrieve the validator set: %s", err)) + } + + // export all the states created after a provider channel got established if channelID, ok := k.GetProviderChannel(ctx); ok { clientID, ok := k.GetProviderClientID(ctx) if !ok { panic("provider client does not exist") } - // ValUpdates must be filled in off-line - gs := consumertypes.NewRestartGenesisState(clientID, channelID, nil, nil, params) - maturingPackets := []consumertypes.MaturingVSCPacket{} - cb := func(vscId, timeNs uint64) bool { - mat := consumertypes.MaturingVSCPacket{ + maturingPackets := []types.MaturingVSCPacket{} + k.IteratePacketMaturityTime(ctx, func(vscId, timeNs uint64) bool { + mat := types.MaturingVSCPacket{ VscId: vscId, MaturityTime: timeNs, } maturingPackets = append(maturingPackets, mat) return false - } - k.IteratePacketMaturityTime(ctx, cb) + }) - gs.MaturingPackets = maturingPackets - return gs - } - clientID, ok := k.GetProviderClientID(ctx) - // if provider clientID and channelID don't exist on the consumer chain, then CCV protocol is disabled for this chain - // return a disabled genesis state - if !ok { - return consumertypes.DefaultGenesisState() - } - cs, ok := k.clientKeeper.GetClientState(ctx, clientID) - if !ok { - panic("provider client not set on already running consumer chain") - } - tmCs, ok := cs.(*ibctmtypes.ClientState) - if !ok { - panic("provider client consensus state is not tendermint client state") - } - consState, ok := k.clientKeeper.GetLatestClientConsensusState(ctx, clientID) - if !ok { - panic("provider consensus state not set on already running consumer chain") - } - tmConsState, ok := consState.(*ibctmtypes.ConsensusState) - if !ok { - panic("provider consensus state is not tendermint consensus state") + heightToVCIDs := []types.HeightToValsetUpdateID{} + k.IterateHeightToValsetUpdateID(ctx, func(height, vscID uint64) bool { + hv := types.HeightToValsetUpdateID{ + Height: height, + ValsetUpdateId: vscID, + } + heightToVCIDs = append(heightToVCIDs, hv) + return true + }) + + outstandingDowntimes := []types.OutstandingDowntime{} + k.IterateOutstandingDowntime(ctx, func(addr string) bool { + od := types.OutstandingDowntime{ + ValidatorConsensusAddress: addr, + } + outstandingDowntimes = append(outstandingDowntimes, od) + return false + }) + + genesis = types.NewRestartGenesisState( + clientID, + channelID, + maturingPackets, + valset, + heightToVCIDs, + outstandingDowntimes, + params, + ) + } else { + clientID, ok := k.GetProviderClientID(ctx) + // if provider clientID and channelID don't exist on the consumer chain, then CCV protocol is disabled for this chain + // return a disabled genesis state + if !ok { + return consumertypes.DefaultGenesisState() + } + cs, ok := k.clientKeeper.GetClientState(ctx, clientID) + if !ok { + panic("provider client not set on already running consumer chain") + } + tmCs, ok := cs.(*ibctmtypes.ClientState) + if !ok { + panic("provider client consensus state is not tendermint client state") + } + consState, ok := k.clientKeeper.GetLatestClientConsensusState(ctx, clientID) + if !ok { + panic("provider consensus state not set on already running consumer chain") + } + tmConsState, ok := consState.(*ibctmtypes.ConsensusState) + if !ok { + panic("provider consensus state is not tendermint consensus state") + } + // export client states and pending slashing requests into a new chain genesis + genesis = consumertypes.NewInitialGenesisState(tmCs, tmConsState, valset, k.GetPendingSlashRequests(ctx), params) } - // ValUpdates must be filled in off-line - return consumertypes.NewInitialGenesisState(tmCs, tmConsState, nil, params) + + return } diff --git a/x/ccv/consumer/keeper/genesis_test.go b/x/ccv/consumer/keeper/genesis_test.go new file mode 100644 index 0000000000..4077858303 --- /dev/null +++ b/x/ccv/consumer/keeper/genesis_test.go @@ -0,0 +1,265 @@ +package keeper_test + +import ( + "testing" + "time" + + sdk "github.com/cosmos/cosmos-sdk/types" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + host "github.com/cosmos/ibc-go/v3/modules/core/24-host" + testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + consumerkeeper "github.com/cosmos/interchain-security/x/ccv/consumer/keeper" + "github.com/cosmos/interchain-security/x/ccv/consumer/types" + ccv "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" + abci "github.com/tendermint/tendermint/abci/types" + tmtypes "github.com/tendermint/tendermint/types" + + cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" + "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + + testutil "github.com/cosmos/interchain-security/testutil/keeper" +) + +func TestInitGenesis(t *testing.T) { + + // store consumer chain states in variables + + // create channel and client IDs for the consumer + channelID := "channelID" + clientID := "tendermint-07" + + // generate validator public key + pubKey, err := testutil.GenPubKey() + require.NoError(t, err) + + // create validator set with single validator + validator := tmtypes.NewValidator(pubKey, 1) + + // create consensus state using a single validator + consensusState := testutil.GetConsensusState(clientID, time.Time{}, validator) + + slashRequests := consumertypes.SlashRequests{ + Requests: []consumertypes.SlashRequest{{Infraction: stakingtypes.Downtime}}, + } + matPacket := consumertypes.MaturingVSCPacket{ + VscId: uint64(1), + MaturityTime: uint64(time.Now().UnixNano()), + } + + // create paramameters for a new chain + params := types.NewParams(true, types.DefaultBlocksPerDistributionTransmission, "", "") + + testCases := []struct { + name string + malleate func(sdk.Context, testutil.MockedKeepers) + genesis *consumertypes.GenesisState + assertStates func(sdk.Context, consumerkeeper.Keeper, *consumertypes.GenesisState) + }{ + { + name: "restart a new chain", + malleate: func(ctx sdk.Context, mocks testutil.MockedKeepers) { + gomock.InOrder( + expectGetCapabilityMock(ctx, mocks), + expectCreateClientMock(ctx, mocks, "", clientID, validator), + ) + }, + genesis: consumertypes.NewInitialGenesisState(testutil.GetClientState(""), consensusState, + []abci.ValidatorUpdate{tmtypes.TM2PB.ValidatorUpdate(validator)}, slashRequests, params), + + assertStates: func(ctx sdk.Context, ck consumerkeeper.Keeper, gs *consumertypes.GenesisState) { + require.Equal(t, gs.Params, ck.GetParams(ctx)) + require.Equal(t, ccv.ConsumerPortID, ck.GetPort(ctx)) + + ubdTime, found := ck.GetUnbondingTime(ctx) + require.True(t, found) + require.Equal(t, gs.ProviderClientState.UnbondingPeriod, ubdTime) + + require.Zero(t, ck.GetHeightValsetUpdateID(ctx, uint64(ctx.BlockHeight()))) + + cid, ok := ck.GetProviderClientID(ctx) + require.True(t, ok) + require.Equal(t, clientID, cid) + }, + }, { + name: "restart a chain with an already established channel", + malleate: func(ctx sdk.Context, mocks testutil.MockedKeepers) { + gomock.InOrder( + expectGetCapabilityMock(ctx, mocks), + expectLatestConsensusStateMock(ctx, mocks, clientID, validator), + expectGetClientStateMock(ctx, mocks, "", clientID), + ) + }, + genesis: consumertypes.NewRestartGenesisState(clientID, channelID, + []consumertypes.MaturingVSCPacket{matPacket}, + []abci.ValidatorUpdate{tmtypes.TM2PB.ValidatorUpdate(validator)}, + []consumertypes.HeightToValsetUpdateID{{ValsetUpdateId: matPacket.VscId, Height: uint64(0)}}, + []consumertypes.OutstandingDowntime{{ValidatorConsensusAddress: sdk.ConsAddress(validator.Bytes()).String()}}, + params, + ), + assertStates: func(ctx sdk.Context, ck consumerkeeper.Keeper, gs *consumertypes.GenesisState) { + require.Equal(t, gs.Params, ck.GetParams(ctx)) + require.Equal(t, ccv.ConsumerPortID, ck.GetPort(ctx)) + + ubdTime, found := ck.GetUnbondingTime(ctx) + require.True(t, found) + require.Equal(t, testutil.GetClientState("").UnbondingPeriod, ubdTime) + + // export states to genesis + require.Equal(t, matPacket.VscId, ck.GetHeightValsetUpdateID(ctx, uint64(0))) + + require.Equal(t, matPacket.MaturityTime, ck.GetPacketMaturityTime(ctx, matPacket.VscId)) + require.Equal(t, gs.Params, ck.GetParams(ctx)) + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + + keeperParams := testkeeper.NewInMemKeeperParams(t) + // Explicitly register codec with public key interface + keeperParams.RegisterSdkCryptoCodecInterfaces() + consumerKeeper, ctx, ctrl, mocks := testkeeper.GetConsumerKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() + + // test setup + tc.malleate(ctx, mocks) + + // init the chain states using a genesis + consumerKeeper.InitGenesis(ctx, tc.genesis) + + // assert states + tc.assertStates(ctx, consumerKeeper, tc.genesis) + }) + } +} + +func TestExportGenesis(t *testing.T) { + + clientID := "tendermint-07" + channelID := "channelID" + + // define the states exported into genesis + slashRequests := consumertypes.SlashRequests{ + Requests: []consumertypes.SlashRequest{{Infraction: stakingtypes.Downtime}}, + } + restartHeight := uint64(0) + matPacket := consumertypes.MaturingVSCPacket{ + VscId: uint64(1), + MaturityTime: uint64(time.Now().UnixNano()), + } + + params := types.NewParams(true, types.DefaultBlocksPerDistributionTransmission, "", "") + + // create a single validator + pubKey := ed25519.GenPrivKey().PubKey() + tmPK, err := cryptocodec.ToTmPubKeyInterface(pubKey) + require.NoError(t, err) + validator := tmtypes.NewValidator(tmPK, 1) + + // create consensus state using a single validator + consensusState := testutil.GetConsensusState(clientID, time.Time{}, validator) + + testCases := []struct { + name string + malleate func(sdk.Context, consumerkeeper.Keeper, testutil.MockedKeepers) + expGenesis *consumertypes.GenesisState + }{ + { + name: "export a new chain", + malleate: func(ctx sdk.Context, ck consumerkeeper.Keeper, mocks testutil.MockedKeepers) { + // populate the states used by a new consumer chain + cVal, err := consumertypes.NewCCValidator(validator.Address.Bytes(), 1, pubKey) + require.NoError(t, err) + ck.SetCCValidator(ctx, cVal) + ck.SetProviderClientID(ctx, clientID) + ck.SetPendingSlashRequests( + ctx, + slashRequests, + ) + + // set the mock calls executed during the export + gomock.InOrder( + expectGetClientStateMock(ctx, mocks, "", clientID), + expectLatestConsensusStateMock(ctx, mocks, clientID, validator), + ) + }, + + expGenesis: consumertypes.NewInitialGenesisState(testutil.GetClientState(""), consensusState, + []abci.ValidatorUpdate{tmtypes.TM2PB.ValidatorUpdate(validator)}, slashRequests, params), + }, + { + name: "export a chain that has an established CCV channel", + malleate: func(ctx sdk.Context, ck consumerkeeper.Keeper, mocks testutil.MockedKeepers) { + // populate the states used by a running chain + cVal, err := consumertypes.NewCCValidator(validator.Address.Bytes(), 1, pubKey) + require.NoError(t, err) + ck.SetCCValidator(ctx, cVal) + ck.SetOutstandingDowntime(ctx, sdk.ConsAddress(validator.Address.Bytes())) + + // populate the required states to simulate a completed handshake + ck.SetProviderClientID(ctx, clientID) + ck.SetProviderChannel(ctx, channelID) + ck.SetHeightValsetUpdateID(ctx, restartHeight, matPacket.VscId) + ck.SetPacketMaturityTime(ctx, matPacket.VscId, matPacket.MaturityTime) + }, + expGenesis: consumertypes.NewRestartGenesisState( + clientID, + channelID, + []consumertypes.MaturingVSCPacket{matPacket}, + []abci.ValidatorUpdate{tmtypes.TM2PB.ValidatorUpdate(validator)}, + []types.HeightToValsetUpdateID{{Height: restartHeight, ValsetUpdateId: matPacket.VscId}}, + []consumertypes.OutstandingDowntime{{ValidatorConsensusAddress: sdk.ConsAddress(validator.Address.Bytes()).String()}}, + params, + ), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + + keeperParams := testkeeper.NewInMemKeeperParams(t) + // Explicitly register codec with public key interface + keeperParams.RegisterSdkCryptoCodecInterfaces() + consumerKeeper, ctx, ctrl, mocks := testkeeper.GetConsumerKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() + consumerKeeper.SetParams(ctx, params) + + // test setup + tc.malleate(ctx, consumerKeeper, mocks) + + // export staet to genesis + gotGen := consumerKeeper.ExportGenesis(ctx) + + // check the obtained genesis + require.EqualValues(t, tc.expGenesis, gotGen) + }) + } +} + +func expectLatestConsensusStateMock(ctx sdk.Context, mocks testutil.MockedKeepers, clientID string, vals ...*tmtypes.Validator) *gomock.Call { + consState := testutil.GetConsensusState(clientID, time.Time{}, vals...) + return mocks.MockClientKeeper.EXPECT(). + GetLatestClientConsensusState(ctx, clientID).Return(consState, true).Times(1) +} + +func expectGetClientStateMock(ctx sdk.Context, mocks testutil.MockedKeepers, chainID, clientID string) *gomock.Call { + cs := testutil.GetClientState(chainID) + return mocks.MockClientKeeper.EXPECT().GetClientState(ctx, clientID).Return(cs, true).Times(1) +} + +func expectCreateClientMock(ctx sdk.Context, mocks testutil.MockedKeepers, chainID, clientID string, vals ...*tmtypes.Validator) *gomock.Call { + cs := testutil.GetClientState(chainID) + consState := testutil.GetConsensusState(clientID, time.Time{}, vals...) + + return mocks.MockClientKeeper.EXPECT().CreateClient(ctx, cs, consState).Return(clientID, nil).Times(1) +} + +func expectGetCapabilityMock(ctx sdk.Context, mocks testutil.MockedKeepers) *gomock.Call { + return mocks.MockScopedKeeper.EXPECT().GetCapability( + ctx, host.PortPath(ccv.ConsumerPortID), + ).Return(nil, true).Times(1) +} diff --git a/x/ccv/consumer/keeper/keeper.go b/x/ccv/consumer/keeper/keeper.go index f0dfb9edd5..c7433ffaf0 100644 --- a/x/ccv/consumer/keeper/keeper.go +++ b/x/ccv/consumer/keeper/keeper.go @@ -1,9 +1,7 @@ package keeper import ( - "bytes" "encoding/binary" - "encoding/json" "fmt" "time" @@ -122,7 +120,7 @@ func (k Keeper) GetPort(ctx sdk.Context) string { return string(store.Get(types.PortKey())) } -// SetPort sets the portID for the transfer module. Used in InitGenesis +// SetPort sets the portID for the CCV module. Used in InitGenesis func (k Keeper) SetPort(ctx sdk.Context, portID string) { store := ctx.KVStore(k.storeKey) store.Set(types.PortKey(), []byte(portID)) @@ -133,8 +131,7 @@ func (k Keeper) AuthenticateCapability(ctx sdk.Context, cap *capabilitytypes.Cap return k.scopedKeeper.AuthenticateCapability(ctx, cap, name) } -// ClaimCapability allows the transfer module that can claim a capability that IBC module -// passes to it +// ClaimCapability claims a capability that the IBC module passes to it func (k Keeper) ClaimCapability(ctx sdk.Context, cap *capabilitytypes.Capability, name string) error { return k.scopedKeeper.ClaimCapability(ctx, cap, name) } @@ -163,14 +160,14 @@ func (k Keeper) DeleteUnbondingTime(ctx sdk.Context) { store.Delete(types.UnbondingTimeKey()) } -// SetProviderClientID sets the provider clientID that is validating the chain. +// SetProviderClientID sets the clientID for the client to the provider. // Set in InitGenesis func (k Keeper) SetProviderClientID(ctx sdk.Context, clientID string) { store := ctx.KVStore(k.storeKey) store.Set(types.ProviderClientIDKey(), []byte(clientID)) } -// GetProviderClientID gets the provider clientID that is validating the chain. +// GetProviderClientID gets the clientID for the client to the provider. func (k Keeper) GetProviderClientID(ctx sdk.Context) (string, bool) { store := ctx.KVStore(k.storeKey) clientIdBytes := store.Get(types.ProviderClientIDKey()) @@ -180,13 +177,13 @@ func (k Keeper) GetProviderClientID(ctx sdk.Context) (string, bool) { return string(clientIdBytes), true } -// SetProviderChannel sets the provider channelID that is validating the chain. +// SetProviderChannel sets the channelID for the channel to the provider. func (k Keeper) SetProviderChannel(ctx sdk.Context, channelID string) { store := ctx.KVStore(k.storeKey) store.Set(types.ProviderChannelKey(), []byte(channelID)) } -// GetProviderChannel gets the provider channelID that is validating the chain. +// GetProviderChannel gets the channelID for the channel to the provider. func (k Keeper) GetProviderChannel(ctx sdk.Context) (string, bool) { store := ctx.KVStore(k.storeKey) channelIdBytes := store.Get(types.ProviderChannelKey()) @@ -196,7 +193,7 @@ func (k Keeper) GetProviderChannel(ctx sdk.Context) (string, bool) { return string(channelIdBytes), true } -// DeleteProviderChannel deletes the provider channel ID that is validating the chain. +// DeleteProviderChannel deletes the channelID for the channel to the provider. func (k Keeper) DeleteProviderChannel(ctx sdk.Context) { store := ctx.KVStore(k.storeKey) store.Delete(types.ProviderChannelKey()) @@ -270,7 +267,7 @@ func (k Keeper) GetPacketMaturityTime(ctx sdk.Context, vscId uint64) uint64 { return binary.BigEndian.Uint64(bz) } -// DeletePacketMaturityTime deletes the the maturity time for a given received VSC packet id +// DeletePacketMaturityTime deletes the packet maturity time for a given received VSC packet id func (k Keeper) DeletePacketMaturityTime(ctx sdk.Context, vscId uint64) { store := ctx.KVStore(k.storeKey) store.Delete(types.PacketMaturityTimeKey(vscId)) @@ -278,7 +275,7 @@ func (k Keeper) DeletePacketMaturityTime(ctx sdk.Context, vscId uint64) { // VerifyProviderChain verifies that the chain trying to connect on the channel handshake // is the expected provider chain. -func (k Keeper) VerifyProviderChain(ctx sdk.Context, channelID string, connectionHops []string) error { +func (k Keeper) VerifyProviderChain(ctx sdk.Context, connectionHops []string) error { if len(connectionHops) != 1 { return sdkerrors.Wrap(channeltypes.ErrTooManyConnectionHops, "must have direct connection to provider chain") } @@ -323,6 +320,24 @@ func (k Keeper) DeleteHeightValsetUpdateID(ctx sdk.Context, height uint64) { store.Delete(types.HeightValsetUpdateIDKey(height)) } +// IterateHeightToValsetUpdateID iterates over the block height to valset update ID mapping in store +func (k Keeper) IterateHeightToValsetUpdateID(ctx sdk.Context, cb func(height, vscID uint64) bool) { + store := ctx.KVStore(k.storeKey) + iterator := sdk.KVStorePrefixIterator(store, []byte{types.HeightValsetUpdateIDBytePrefix}) + + defer iterator.Close() + for ; iterator.Valid(); iterator.Next() { + heightBytes := iterator.Key()[1:] + height := binary.BigEndian.Uint64(heightBytes) + + vscID := binary.BigEndian.Uint64(iterator.Value()) + + if !cb(height, vscID) { + break + } + } +} + // OutstandingDowntime returns the outstanding downtime flag for a given validator func (k Keeper) OutstandingDowntime(ctx sdk.Context, address sdk.ConsAddress) bool { store := ctx.KVStore(k.storeKey) @@ -336,9 +351,9 @@ func (k Keeper) SetOutstandingDowntime(ctx sdk.Context, address sdk.ConsAddress) store.Set(types.OutstandingDowntimeKey(address), []byte{}) } -// ClearOutstandingDowntime clears the outstanding downtime flag for a given validator -func (k Keeper) ClearOutstandingDowntime(ctx sdk.Context, address string) { - consAddr, err := sdk.ConsAddressFromBech32(address) +// DeleteOutstandingDowntime deletes the outstanding downtime flag for the given validator consensus address +func (k Keeper) DeleteOutstandingDowntime(ctx sdk.Context, consAddress string) { + consAddr, err := sdk.ConsAddressFromBech32(consAddress) if err != nil { return } @@ -346,6 +361,21 @@ func (k Keeper) ClearOutstandingDowntime(ctx sdk.Context, address string) { store.Delete(types.OutstandingDowntimeKey(consAddr)) } +// IterateOutstandingDowntime iterates over the validator addresses of outstanding downtime flags +func (k Keeper) IterateOutstandingDowntime(ctx sdk.Context, cb func(address string) bool) { + store := ctx.KVStore(k.storeKey) + iterator := sdk.KVStorePrefixIterator(store, []byte{types.OutstandingDowntimeBytePrefix}) + + defer iterator.Close() + for ; iterator.Valid(); iterator.Next() { + addrBytes := iterator.Key()[1:] + addr := sdk.ConsAddress(addrBytes).String() + if !cb(addr) { + break + } + } +} + // SetCCValidator sets a cross-chain validator under its validator address func (k Keeper) SetCCValidator(ctx sdk.Context, v types.CrossChainValidator) { store := ctx.KVStore(k.storeKey) @@ -389,41 +419,42 @@ func (k Keeper) GetAllCCValidator(ctx sdk.Context) (validators []types.CrossChai } // SetPendingSlashRequests sets the pending slash requests in store -func (k Keeper) SetPendingSlashRequests(ctx sdk.Context, requests []types.SlashRequest) { +func (k Keeper) SetPendingSlashRequests(ctx sdk.Context, requests types.SlashRequests) { store := ctx.KVStore(k.storeKey) - buf := &bytes.Buffer{} - err := json.NewEncoder(buf).Encode(&requests) + bz, err := requests.Marshal() if err != nil { panic(fmt.Errorf("failed to encode slash request json: %w", err)) } - store.Set([]byte{types.PendingSlashRequestsBytePrefix}, buf.Bytes()) + store.Set([]byte{types.PendingSlashRequestsBytePrefix}, bz) } // GetPendingSlashRequest returns the pending slash requests in store -func (k Keeper) GetPendingSlashRequests(ctx sdk.Context) (requests []types.SlashRequest) { +func (k Keeper) GetPendingSlashRequests(ctx sdk.Context) types.SlashRequests { store := ctx.KVStore(k.storeKey) bz := store.Get([]byte{types.PendingSlashRequestsBytePrefix}) if bz == nil { - return + return types.SlashRequests{} } - buf := bytes.NewBuffer(bz) - err := json.NewDecoder(buf).Decode(&requests) + + var sr types.SlashRequests + err := sr.Unmarshal(bz) if err != nil { panic(fmt.Errorf("failed to decode slash request json: %w", err)) } - return -} - -// AppendPendingSlashRequests appends the given slash request to the pending slash requests in store -func (k Keeper) AppendPendingSlashRequests(ctx sdk.Context, req types.SlashRequest) { - requests := k.GetPendingSlashRequests(ctx) - requests = append(requests, req) - k.SetPendingSlashRequests(ctx, requests) + return sr } // ClearPendingSlashRequests clears the pending slash requests in store -func (k Keeper) ClearPendingSlashRequests(ctx sdk.Context) { +func (k Keeper) DeletePendingSlashRequests(ctx sdk.Context) { store := ctx.KVStore(k.storeKey) store.Delete([]byte{types.PendingSlashRequestsBytePrefix}) } + +// AppendPendingSlashRequests appends the given slash request to the pending slash requests in store +func (k Keeper) AppendPendingSlashRequests(ctx sdk.Context, req types.SlashRequest) { + sr := k.GetPendingSlashRequests(ctx) + srArray := sr.GetRequests() + srArray = append(srArray, req) + k.SetPendingSlashRequests(ctx, types.SlashRequests{Requests: srArray}) +} diff --git a/x/ccv/consumer/keeper/keeper_test.go b/x/ccv/consumer/keeper/keeper_test.go index 6c40a6b0bd..67a5db53ca 100644 --- a/x/ccv/consumer/keeper/keeper_test.go +++ b/x/ccv/consumer/keeper/keeper_test.go @@ -4,12 +4,13 @@ import ( "testing" "time" - "github.com/cosmos/cosmos-sdk/codec" - codectypes "github.com/cosmos/cosmos-sdk/codec/types" "github.com/cosmos/cosmos-sdk/crypto/keys/ed25519" + sdk "github.com/cosmos/cosmos-sdk/types" + conntypes "github.com/cosmos/ibc-go/v3/modules/core/03-connection/types" testkeeper "github.com/cosmos/interchain-security/testutil/keeper" "github.com/cosmos/interchain-security/x/ccv/consumer/types" ccv "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/golang/mock/gomock" "github.com/stretchr/testify/require" abci "github.com/tendermint/tendermint/abci/types" @@ -18,7 +19,10 @@ import ( // TestUnbondingTime tests getter and setter functionality for the unbonding period of a consumer chain func TestUnbondingTime(t *testing.T) { - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + _, ok := consumerKeeper.GetUnbondingTime(ctx) require.False(t, ok) unbondingPeriod := time.Hour * 24 * 7 * 3 @@ -30,7 +34,10 @@ func TestUnbondingTime(t *testing.T) { // TestProviderClientID tests getter and setter functionality for the client ID stored on consumer keeper func TestProviderClientID(t *testing.T) { - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + _, ok := consumerKeeper.GetProviderClientID(ctx) require.False(t, ok) consumerKeeper.SetProviderClientID(ctx, "someClientID") @@ -41,7 +48,10 @@ func TestProviderClientID(t *testing.T) { // TestProviderChannel tests getter and setter functionality for the channel ID stored on consumer keeper func TestProviderChannel(t *testing.T) { - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + _, ok := consumerKeeper.GetProviderChannel(ctx) require.False(t, ok) consumerKeeper.SetProviderChannel(ctx, "channelID") @@ -72,7 +82,9 @@ func TestPendingChanges(t *testing.T) { nil, ) - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + err = consumerKeeper.SetPendingChanges(ctx, pd) require.NoError(t, err) gotPd, ok := consumerKeeper.GetPendingChanges(ctx) @@ -86,7 +98,10 @@ func TestPendingChanges(t *testing.T) { // TestPacketMaturityTime tests getter, setter, and iterator functionality for the packet maturity time of a received VSC packet func TestPacketMaturityTime(t *testing.T) { - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + consumerKeeper.SetPacketMaturityTime(ctx, 1, 10) consumerKeeper.SetPacketMaturityTime(ctx, 2, 25) consumerKeeper.SetPacketMaturityTime(ctx, 5, 15) @@ -115,20 +130,11 @@ func TestPacketMaturityTime(t *testing.T) { // TestCrossChainValidator tests the getter, setter, and deletion method for cross chain validator records func TestCrossChainValidator(t *testing.T) { - // Construct a keeper with a custom codec - // TODO: Ensure all custom interfaces are registered in prod, see https://github.com/cosmos/interchain-security/issues/273 - _, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) - ir := codectypes.NewInterfaceRegistry() - - // Public key implementation must be registered - cryptocodec.RegisterInterfaces(ir) - cdc := codec.NewProtoCodec(ir) - - consumerKeeper := testkeeper.GetCustomConsumerKeeper( - cdc, - storeKey, - paramsSubspace, - ) + keeperParams := testkeeper.NewInMemKeeperParams(t) + // Explicitly register codec with public key interface + keeperParams.RegisterSdkCryptoCodecInterfaces() + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() // should return false _, found := consumerKeeper.GetCCValidator(ctx, ed25519.GenPrivKey().PubKey().Address()) @@ -165,13 +171,15 @@ func TestCrossChainValidator(t *testing.T) { // TestPendingSlashRequests tests the getter, setter, appending method, and deletion method for pending slash requests func TestPendingSlashRequests(t *testing.T) { - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() // prepare test setup by storing 10 pending slash requests - request := []types.SlashRequest{} + requests := []types.SlashRequest{} for i := 0; i < 10; i++ { - request = append(request, types.SlashRequest{}) - consumerKeeper.SetPendingSlashRequests(ctx, request) + requests = append(requests, types.SlashRequest{}) + consumerKeeper.SetPendingSlashRequests(ctx, types.SlashRequests{Requests: requests}) } // test set, append and clear operations @@ -185,7 +193,7 @@ func TestPendingSlashRequests(t *testing.T) { operation: func() { consumerKeeper.AppendPendingSlashRequests(ctx, types.SlashRequest{}) }, expLen: 11, }, { - operation: func() { consumerKeeper.ClearPendingSlashRequests(ctx) }, + operation: func() { consumerKeeper.DeletePendingSlashRequests(ctx) }, expLen: 0, }, } @@ -193,6 +201,89 @@ func TestPendingSlashRequests(t *testing.T) { for _, tc := range testCases { tc.operation() requests := consumerKeeper.GetPendingSlashRequests(ctx) - require.Len(t, requests, tc.expLen) + require.Len(t, requests.Requests, tc.expLen) + } +} + +// TestVerifyProviderChain tests the VerifyProviderChain method for the consumer keeper +func TestVerifyProviderChain(t *testing.T) { + + testCases := []struct { + name string + // State-mutating setup specific to this test case + mockSetup func(sdk.Context, testkeeper.MockedKeepers) + connectionHops []string + expError bool + }{ + { + name: "success", + mockSetup: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) { + gomock.InOrder( + mocks.MockConnectionKeeper.EXPECT().GetConnection( + ctx, "connectionID", + ).Return(conntypes.ConnectionEnd{ClientId: "clientID"}, true).Times(1), + ) + }, + connectionHops: []string{"connectionID"}, + expError: false, + }, + { + name: "connection hops is not length 1", + mockSetup: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) { + // Expect no calls to GetConnection(), VerifyProviderChain will return from first step. + gomock.InAnyOrder( + mocks.MockConnectionKeeper.EXPECT().GetConnection(gomock.Any(), gomock.Any()).Times(0), + ) + }, + connectionHops: []string{"connectionID", "otherConnID"}, + expError: true, + }, + { + name: "connection does not exist", + mockSetup: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) { + gomock.InOrder( + mocks.MockConnectionKeeper.EXPECT().GetConnection( + ctx, "connectionID").Return(conntypes.ConnectionEnd{}, + false, // Found is returned as false + ).Times(1), + ) + }, + connectionHops: []string{"connectionID"}, + expError: true, + }, + { + name: "found clientID does not match expectation", + mockSetup: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) { + gomock.InOrder( + mocks.MockConnectionKeeper.EXPECT().GetConnection( + ctx, "connectionID").Return( + conntypes.ConnectionEnd{ClientId: "unexpectedClientID"}, true, + ).Times(1), + ) + }, + connectionHops: []string{"connectionID"}, + expError: true, + }, + } + + for _, tc := range testCases { + + keeperParams := testkeeper.NewInMemKeeperParams(t) + consumerKeeper, ctx, ctrl, mocks := testkeeper.GetConsumerKeeperAndCtx(t, keeperParams) + + // Common setup + consumerKeeper.SetProviderClientID(ctx, "clientID") // Set expected provider clientID + + // Specific mock setup + tc.mockSetup(ctx, mocks) + + err := consumerKeeper.VerifyProviderChain(ctx, tc.connectionHops) + + if tc.expError { + require.Error(t, err, "invalid case did not return error") + } else { + require.NoError(t, err, "valid case returned error") + } + ctrl.Finish() } } diff --git a/x/ccv/consumer/keeper/params_test.go b/x/ccv/consumer/keeper/params_test.go index 3de23cb73f..0bc043d984 100644 --- a/x/ccv/consumer/keeper/params_test.go +++ b/x/ccv/consumer/keeper/params_test.go @@ -10,7 +10,8 @@ import ( // TestParams tests the default params set for a consumer chain, and related getters/setters func TestParams(t *testing.T) { - consumerKeeper, ctx := testkeeper.GetConsumerKeeperAndCtx(t) + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() consumerKeeper.SetParams(ctx, types.DefaultParams()) expParams := types.NewParams(false, 1000, "", "") // these are the default params, IBC suite independently sets enabled=true diff --git a/x/ccv/consumer/keeper/relay.go b/x/ccv/consumer/keeper/relay.go index 65f607bc17..b32ee25d8b 100644 --- a/x/ccv/consumer/keeper/relay.go +++ b/x/ccv/consumer/keeper/relay.go @@ -64,9 +64,10 @@ func (k Keeper) OnRecvVSCPacket(ctx sdk.Context, packet channeltypes.Packet, new // set height to VSC id mapping k.SetHeightValsetUpdateID(ctx, uint64(ctx.BlockHeight())+1, newChanges.ValsetUpdateId) - // set outstanding slashing flags to false + // remove outstanding slashing flags of the validators + // for which the slashing was acknowledged by the provider chain for _, addr := range newChanges.GetSlashAcks() { - k.ClearOutstandingDowntime(ctx, addr) + k.DeleteOutstandingDowntime(ctx, addr) } ack := channeltypes.NewResultAcknowledgement([]byte{byte(1)}) @@ -172,7 +173,7 @@ func (k Keeper) SendPendingSlashRequests(ctx sdk.Context) { } // iterate over pending slash requests in reverse order - requests := k.GetPendingSlashRequests(ctx) + requests := k.GetPendingSlashRequests(ctx).Requests for i := len(requests) - 1; i >= 0; i-- { slashReq := requests[i] @@ -201,7 +202,7 @@ func (k Keeper) SendPendingSlashRequests(ctx sdk.Context) { } // clear pending slash requests - k.ClearPendingSlashRequests(ctx) + k.DeletePendingSlashRequests(ctx) } // OnAcknowledgementPacket executes application logic for acknowledgments of sent VSCMatured and Slash packets diff --git a/x/ccv/consumer/keeper/relay_test.go b/x/ccv/consumer/keeper/relay_test.go index 95e5b7a2cd..028675793c 100644 --- a/x/ccv/consumer/keeper/relay_test.go +++ b/x/ccv/consumer/keeper/relay_test.go @@ -109,29 +109,8 @@ func TestOnRecvVSCPacket(t *testing.T) { }, } - // Instantiate custom keeper with mocks - ctrl := gomock.NewController(t) + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) defer ctrl.Finish() - cdc, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) - - mockScopedKeeper := testkeeper.NewMockScopedKeeper(ctrl) - mockChannelKeeper := testkeeper.NewMockChannelKeeper(ctrl) - - consumerKeeper := testkeeper.GetCustomConsumerKeeperWithMocks( - cdc, - storeKey, - paramsSubspace, - mockScopedKeeper, - mockChannelKeeper, - testkeeper.NewMockPortKeeper(ctrl), - testkeeper.NewMockConnectionKeeper(ctrl), - testkeeper.NewMockClientKeeper(ctrl), - testkeeper.NewMockSlashingKeeper(ctrl), - testkeeper.NewMockBankKeeper(ctrl), - testkeeper.NewMockAccountKeeper(ctrl), - testkeeper.NewMockIBCTransferKeeper(ctrl), - testkeeper.NewMockIBCCoreKeeper(ctrl), - ) // Set channel to provider, still in context of consumer chain consumerKeeper.SetProviderChannel(ctx, consumerCCVChannelID) @@ -181,29 +160,13 @@ func TestOnAcknowledgementPacket(t *testing.T) { // Channel ID on destination (counter party) chain channelIDOnDest := "ChannelIDOnDest" - // Instantiate custom keeper with mocks + // Instantiate in-mem keeper with mocks ctrl := gomock.NewController(t) defer ctrl.Finish() - cdc, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) - - mockScopedKeeper := testkeeper.NewMockScopedKeeper(ctrl) - mockChannelKeeper := testkeeper.NewMockChannelKeeper(ctrl) - - consumerKeeper := testkeeper.GetCustomConsumerKeeperWithMocks( - cdc, - storeKey, - paramsSubspace, - mockScopedKeeper, - mockChannelKeeper, - testkeeper.NewMockPortKeeper(ctrl), - testkeeper.NewMockConnectionKeeper(ctrl), - testkeeper.NewMockClientKeeper(ctrl), - testkeeper.NewMockSlashingKeeper(ctrl), - testkeeper.NewMockBankKeeper(ctrl), - testkeeper.NewMockAccountKeeper(ctrl), - testkeeper.NewMockIBCTransferKeeper(ctrl), - testkeeper.NewMockIBCCoreKeeper(ctrl), - ) + keeperParams := testkeeper.NewInMemKeeperParams(t) + mocks := testkeeper.NewMockedKeepers(ctrl) + consumerKeeper := testkeeper.NewInMemConsumerKeeper(keeperParams, mocks) + ctx := keeperParams.Ctx // Set an established provider channel for later in test consumerKeeper.SetProviderChannel(ctx, channelIDToProvider) @@ -235,20 +198,20 @@ func TestOnAcknowledgementPacket(t *testing.T) { dummyCap := &capabilitytypes.Capability{} gomock.InOrder( - mockScopedKeeper.EXPECT().GetCapability( + mocks.MockScopedKeeper.EXPECT().GetCapability( ctx, host.ChannelCapabilityPath(ccv.ConsumerPortID, channelIDToDestChain), ).Return(dummyCap, true).Times(1), // Due to input error ack, ChanCloseInit is called on channel to destination chain - mockChannelKeeper.EXPECT().ChanCloseInit( + mocks.MockChannelKeeper.EXPECT().ChanCloseInit( ctx, ccv.ConsumerPortID, channelIDToDestChain, dummyCap, ).Return(nil).Times(1), - mockScopedKeeper.EXPECT().GetCapability( + mocks.MockScopedKeeper.EXPECT().GetCapability( ctx, host.ChannelCapabilityPath(ccv.ConsumerPortID, channelIDToProvider), ).Return(dummyCap, true).Times(1), // Due to input error ack and existence of established channel to provider, // ChanCloseInit is called on channel to provider - mockChannelKeeper.EXPECT().ChanCloseInit( + mocks.MockChannelKeeper.EXPECT().ChanCloseInit( ctx, ccv.ConsumerPortID, channelIDToProvider, dummyCap, ).Return(nil).Times(1), ) diff --git a/x/ccv/consumer/keeper/validators.go b/x/ccv/consumer/keeper/validators.go index ed8c54a58a..eeb94d5c93 100644 --- a/x/ccv/consumer/keeper/validators.go +++ b/x/ccv/consumer/keeper/validators.go @@ -202,3 +202,21 @@ func (k Keeper) TrackHistoricalInfo(ctx sdk.Context) { // Set latest HistoricalInfo at current height k.SetHistoricalInfo(ctx, ctx.BlockHeight(), &historicalEntry) } + +// ValidatorUpdates gets all cross-chain validators converted to the ABCI validator update type +func (k Keeper) GetValidatorUpdates(ctx sdk.Context) ([]abci.ValidatorUpdate, error) { + vals := k.GetAllCCValidator(ctx) + valUpdates := make([]abci.ValidatorUpdate, 0, len(vals)) + for _, v := range vals { + pk, err := v.ConsPubKey() + if err != nil { + return nil, err + } + tmPK, err := cryptocodec.ToTmProtoPublicKey(pk) + if err != nil { + return nil, err + } + valUpdates = append(valUpdates, abci.ValidatorUpdate{PubKey: tmPK, Power: v.Power}) + } + return valUpdates, nil +} diff --git a/x/ccv/consumer/keeper/validators_test.go b/x/ccv/consumer/keeper/validators_test.go index 15f14ed1b5..2b4f1484fe 100644 --- a/x/ccv/consumer/keeper/validators_test.go +++ b/x/ccv/consumer/keeper/validators_test.go @@ -3,8 +3,6 @@ package keeper_test import ( "testing" - "github.com/cosmos/cosmos-sdk/codec" - codectypes "github.com/cosmos/cosmos-sdk/codec/types" cryptocodec "github.com/cosmos/cosmos-sdk/crypto/codec" sdk "github.com/cosmos/cosmos-sdk/types" stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" @@ -19,19 +17,12 @@ import ( // TestApplyCCValidatorChanges tests the ApplyCCValidatorChanges method for a consumer keeper func TestApplyCCValidatorChanges(t *testing.T) { - // Construct a keeper with a custom codec - _, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) - ir := codectypes.NewInterfaceRegistry() - // Public key implementation must be registered - cryptocodec.RegisterInterfaces(ir) - cdc := codec.NewProtoCodec(ir) - - consumerKeeper := testkeeper.GetCustomConsumerKeeper( - cdc, - storeKey, - paramsSubspace, - ) + keeperParams := testkeeper.NewInMemKeeperParams(t) + // Explicitly register cdc with public key interface + keeperParams.RegisterSdkCryptoCodecInterfaces() + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() // utility functions getCCVals := func() (vals []types.CrossChainValidator) { @@ -116,20 +107,11 @@ func TestApplyCCValidatorChanges(t *testing.T) { // Tests the getter and setter behavior for historical info func TestHistoricalInfo(t *testing.T) { - // Construct a keeper with a custom codec - _, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) - ir := codectypes.NewInterfaceRegistry() - - // Public key implementation must be registered - cryptocodec.RegisterInterfaces(ir) - cdc := codec.NewProtoCodec(ir) - - consumerKeeper := testkeeper.GetCustomConsumerKeeper( - cdc, - storeKey, - paramsSubspace, - ) - + keeperParams := testkeeper.NewInMemKeeperParams(t) + // Explicitly register cdc with public key interface + keeperParams.RegisterSdkCryptoCodecInterfaces() + consumerKeeper, ctx, ctrl, _ := testkeeper.GetConsumerKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() ctx = ctx.WithBlockHeight(15) // Generate test validators, save them to store, and retrieve stored records diff --git a/x/ccv/consumer/module.go b/x/ccv/consumer/module.go index bb6a9d7cbf..3c8080d5c1 100644 --- a/x/ccv/consumer/module.go +++ b/x/ccv/consumer/module.go @@ -150,9 +150,6 @@ func (am AppModule) BeginBlock(ctx sdk.Context, req abci.RequestBeginBlock) { // the CCV channel was established, but it was then closed; // the consumer chain is no longer safe - // cleanup state - am.keeper.DeleteProviderChannel(ctx) - channelClosedMsg := fmt.Sprintf("CCV channel %q was closed - shutdown consumer chain since it is not secured anymore", channelID) ctx.Logger().Error(channelClosedMsg) panic(channelClosedMsg) diff --git a/x/ccv/consumer/types/codec.go b/x/ccv/consumer/types/codec.go new file mode 100644 index 0000000000..e90cf8a7f9 --- /dev/null +++ b/x/ccv/consumer/types/codec.go @@ -0,0 +1,10 @@ +package types + +// // RegisterInterfaces register the ibc transfer module interfaces to protobuf +// // Any. +// func RegisterInterfaces(registry codectypes.InterfaceRegistry) { +// registry.RegisterImplementations( +// (*govtypes.Content)(nil), +// &ConsumerAdditionProposal{}, +// ) +// } diff --git a/x/ccv/consumer/types/consumer.pb.go b/x/ccv/consumer/types/consumer.pb.go index df4994c932..f6a55d06c0 100644 --- a/x/ccv/consumer/types/consumer.pb.go +++ b/x/ccv/consumer/types/consumer.pb.go @@ -29,6 +29,8 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // Params defines the parameters for CCV consumer module type Params struct { + // TODO: Remove enabled flag and find a better way to setup e2e tests + // See: https://github.com/cosmos/interchain-security/issues/339 Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` /////////////////////// // Distribution Params @@ -266,11 +268,57 @@ func (m *SlashRequest) GetInfraction() types2.InfractionType { return types2.InfractionEmpty } +// SlashRequests is a list of slash requests for CCV consumer module +type SlashRequests struct { + Requests []SlashRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests"` +} + +func (m *SlashRequests) Reset() { *m = SlashRequests{} } +func (m *SlashRequests) String() string { return proto.CompactTextString(m) } +func (*SlashRequests) ProtoMessage() {} +func (*SlashRequests) Descriptor() ([]byte, []int) { + return fileDescriptor_5b27a82b276e7f93, []int{4} +} +func (m *SlashRequests) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *SlashRequests) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_SlashRequests.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *SlashRequests) XXX_Merge(src proto.Message) { + xxx_messageInfo_SlashRequests.Merge(m, src) +} +func (m *SlashRequests) XXX_Size() int { + return m.Size() +} +func (m *SlashRequests) XXX_DiscardUnknown() { + xxx_messageInfo_SlashRequests.DiscardUnknown(m) +} + +var xxx_messageInfo_SlashRequests proto.InternalMessageInfo + +func (m *SlashRequests) GetRequests() []SlashRequest { + if m != nil { + return m.Requests + } + return nil +} + func init() { proto.RegisterType((*Params)(nil), "interchain_security.ccv.consumer.v1.Params") proto.RegisterType((*LastTransmissionBlockHeight)(nil), "interchain_security.ccv.consumer.v1.LastTransmissionBlockHeight") proto.RegisterType((*CrossChainValidator)(nil), "interchain_security.ccv.consumer.v1.CrossChainValidator") proto.RegisterType((*SlashRequest)(nil), "interchain_security.ccv.consumer.v1.SlashRequest") + proto.RegisterType((*SlashRequests)(nil), "interchain_security.ccv.consumer.v1.SlashRequests") } func init() { @@ -278,44 +326,46 @@ func init() { } var fileDescriptor_5b27a82b276e7f93 = []byte{ - // 578 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x53, 0xcd, 0x6e, 0xd3, 0x4c, - 0x14, 0xad, 0xbf, 0x7e, 0x04, 0x98, 0x56, 0x2c, 0x4c, 0x54, 0x42, 0x91, 0xdc, 0xd6, 0x54, 0xa8, - 0x12, 0xea, 0x58, 0x49, 0xc5, 0x26, 0xbb, 0x26, 0x55, 0xc5, 0x9f, 0x44, 0xe4, 0x46, 0x2c, 0xd8, - 0x58, 0xe3, 0xf1, 0x8d, 0x3d, 0x8a, 0x3d, 0x63, 0x66, 0xc6, 0x06, 0xbf, 0x05, 0x7b, 0x5e, 0x80, - 0x07, 0xe0, 0x21, 0x10, 0xab, 0x2e, 0x59, 0x21, 0x94, 0xbc, 0x01, 0x5b, 0x36, 0xc8, 0x7f, 0x21, - 0x48, 0x64, 0x77, 0x8f, 0xee, 0x39, 0xc7, 0xd7, 0xf7, 0x9e, 0x41, 0x03, 0xc6, 0x35, 0x48, 0x1a, - 0x11, 0xc6, 0x3d, 0x05, 0x34, 0x93, 0x4c, 0x17, 0x0e, 0xa5, 0xb9, 0x43, 0x05, 0x57, 0x59, 0x02, - 0xd2, 0xc9, 0xfb, 0xab, 0x1a, 0xa7, 0x52, 0x68, 0x61, 0x3e, 0xfc, 0x87, 0x06, 0x53, 0x9a, 0xe3, - 0x15, 0x2f, 0xef, 0xef, 0x1f, 0x6f, 0x32, 0x2e, 0xfd, 0x68, 0x5e, 0x5b, 0xed, 0xdf, 0x0f, 0x85, - 0x08, 0x63, 0x70, 0x2a, 0xe4, 0x67, 0x33, 0x87, 0xf0, 0xa2, 0x69, 0x1d, 0x53, 0xa1, 0x12, 0xa1, - 0x1c, 0xa5, 0xc9, 0x9c, 0xf1, 0xd0, 0xc9, 0xfb, 0x3e, 0x68, 0xd2, 0x6f, 0x71, 0xc3, 0xea, 0x86, - 0x22, 0x14, 0x55, 0xe9, 0x94, 0x55, 0x6b, 0x5b, 0x6b, 0xbd, 0xba, 0x51, 0x83, 0xba, 0x65, 0xff, - 0x32, 0x50, 0x67, 0x42, 0x24, 0x49, 0x94, 0xd9, 0x43, 0x37, 0x81, 0x13, 0x3f, 0x86, 0xa0, 0x67, - 0x1c, 0x1a, 0x27, 0xb7, 0xdc, 0x16, 0x9a, 0xaf, 0xd0, 0xb1, 0x1f, 0x0b, 0x3a, 0x57, 0x5e, 0x0a, - 0xd2, 0x0b, 0x98, 0xd2, 0x92, 0xf9, 0x99, 0x66, 0x82, 0x7b, 0x5a, 0x12, 0xae, 0x12, 0xa6, 0x14, - 0x13, 0xbc, 0xf7, 0xdf, 0xa1, 0x71, 0xb2, 0xed, 0x1e, 0xd5, 0xdc, 0x09, 0xc8, 0x8b, 0x35, 0xe6, - 0x74, 0x8d, 0x68, 0x3e, 0x47, 0x47, 0x1b, 0x5d, 0x3c, 0x1a, 0x11, 0xce, 0x21, 0xee, 0x6d, 0x1f, - 0x1a, 0x27, 0xb7, 0xdd, 0x83, 0x60, 0x83, 0xc9, 0xb8, 0xa6, 0x99, 0x43, 0xb4, 0x9f, 0x4a, 0x91, - 0xb3, 0x00, 0xa4, 0x37, 0x03, 0xf0, 0x52, 0x21, 0x62, 0x8f, 0x04, 0x81, 0xf4, 0x94, 0x96, 0xbd, - 0xff, 0x2b, 0x93, 0xbd, 0x96, 0x71, 0x09, 0x30, 0x11, 0x22, 0x3e, 0x0f, 0x02, 0x79, 0xa5, 0xa5, - 0xfd, 0x04, 0x3d, 0x78, 0x49, 0x94, 0x5e, 0xb7, 0x1d, 0x95, 0xc3, 0x3f, 0x05, 0x16, 0x46, 0xda, - 0xdc, 0x43, 0x9d, 0xa8, 0xaa, 0xaa, 0x85, 0x6c, 0xbb, 0x0d, 0xb2, 0x3f, 0x19, 0xe8, 0xee, 0x58, - 0x0a, 0xa5, 0xc6, 0xe5, 0x3d, 0x5f, 0x93, 0x98, 0x05, 0x44, 0x0b, 0x59, 0x6e, 0xb0, 0xfc, 0x30, - 0x28, 0x55, 0x09, 0x76, 0xdd, 0x16, 0x9a, 0x5d, 0x74, 0x23, 0x15, 0xef, 0x40, 0x36, 0x2b, 0xaa, - 0x81, 0x49, 0x50, 0x27, 0xcd, 0xfc, 0x39, 0x14, 0xd5, 0xbf, 0xee, 0x0c, 0xba, 0xb8, 0xbe, 0x3f, - 0x6e, 0xef, 0x8f, 0xcf, 0x79, 0x31, 0x3a, 0xfb, 0xf9, 0xfd, 0xe0, 0x5e, 0x41, 0x92, 0x78, 0x68, - 0x97, 0x89, 0x02, 0xae, 0x32, 0xe5, 0xd5, 0x3a, 0xfb, 0xeb, 0xe7, 0xd3, 0x6e, 0x73, 0x4f, 0x2a, - 0x8b, 0x54, 0x0b, 0x3c, 0xc9, 0xfc, 0x17, 0x50, 0xb8, 0x8d, 0xb1, 0xfd, 0xd1, 0x40, 0xbb, 0x57, - 0x31, 0x51, 0x91, 0x0b, 0x6f, 0x33, 0x50, 0xda, 0x1c, 0xa3, 0x4e, 0x4a, 0xe8, 0x1c, 0xea, 0x7f, - 0xda, 0x19, 0x3c, 0xc6, 0x9b, 0xe2, 0x9b, 0xf7, 0x71, 0xa5, 0x9c, 0x54, 0xf4, 0x0b, 0xa2, 0x89, - 0xdb, 0x48, 0xcd, 0x4b, 0x84, 0x18, 0x9f, 0x49, 0x42, 0x75, 0x7b, 0xf6, 0x3b, 0x83, 0x47, 0xb8, - 0x19, 0xa4, 0x4d, 0x64, 0x93, 0x50, 0xfc, 0x6c, 0xc5, 0x9c, 0x16, 0x29, 0xb8, 0x6b, 0xca, 0xd1, - 0xf4, 0xcb, 0xc2, 0x32, 0xae, 0x17, 0x96, 0xf1, 0x63, 0x61, 0x19, 0x1f, 0x96, 0xd6, 0xd6, 0xf5, - 0xd2, 0xda, 0xfa, 0xb6, 0xb4, 0xb6, 0xde, 0x0c, 0x43, 0xa6, 0xa3, 0xcc, 0xc7, 0x54, 0x24, 0x4d, - 0x60, 0x9d, 0x3f, 0x73, 0x9e, 0xae, 0x5e, 0xd0, 0xfb, 0xbf, 0x1f, 0xa7, 0x2e, 0x52, 0x50, 0x7e, - 0xa7, 0x5a, 0xdf, 0xd9, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb1, 0x85, 0x02, 0x02, 0xcd, 0x03, - 0x00, 0x00, + // 614 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0x8d, 0xbf, 0xf4, 0x0b, 0x65, 0x5a, 0x58, 0x98, 0xa8, 0x84, 0x22, 0xa5, 0xa9, 0xa9, 0x50, + 0x24, 0xd4, 0xb1, 0x92, 0x8a, 0x4d, 0x77, 0x4d, 0xaa, 0x8a, 0x3f, 0x89, 0xc8, 0xad, 0x58, 0xb0, + 0xb1, 0xc6, 0xe3, 0x5b, 0x67, 0x14, 0x7b, 0xc6, 0xcc, 0x8c, 0x0d, 0x7e, 0x0b, 0xf6, 0xbc, 0x00, + 0x0f, 0xc0, 0x43, 0x54, 0xac, 0xba, 0x64, 0x55, 0xa1, 0xf6, 0x0d, 0xd8, 0xb2, 0x41, 0xfe, 0x0b, + 0x46, 0x22, 0x12, 0xbb, 0x7b, 0x34, 0xe7, 0x1c, 0xcf, 0xdc, 0x73, 0xaf, 0xd1, 0x98, 0x71, 0x0d, + 0x92, 0xce, 0x09, 0xe3, 0xae, 0x02, 0x9a, 0x48, 0xa6, 0x33, 0x9b, 0xd2, 0xd4, 0xa6, 0x82, 0xab, + 0x24, 0x02, 0x69, 0xa7, 0xa3, 0x65, 0x8d, 0x63, 0x29, 0xb4, 0x30, 0x1f, 0xfd, 0x45, 0x83, 0x29, + 0x4d, 0xf1, 0x92, 0x97, 0x8e, 0xb6, 0xf7, 0x56, 0x19, 0xe7, 0x7e, 0x34, 0x2d, 0xad, 0xb6, 0x1f, + 0x04, 0x42, 0x04, 0x21, 0xd8, 0x05, 0xf2, 0x92, 0x73, 0x9b, 0xf0, 0xac, 0x3a, 0xda, 0xa3, 0x42, + 0x45, 0x42, 0xd9, 0x4a, 0x93, 0x05, 0xe3, 0x81, 0x9d, 0x8e, 0x3c, 0xd0, 0x64, 0x54, 0xe3, 0x8a, + 0xd5, 0x0d, 0x44, 0x20, 0x8a, 0xd2, 0xce, 0xab, 0xda, 0xb6, 0xd4, 0xba, 0xe5, 0x41, 0x09, 0xca, + 0x23, 0xeb, 0xa7, 0x81, 0x3a, 0x33, 0x22, 0x49, 0xa4, 0xcc, 0x1e, 0xba, 0x05, 0x9c, 0x78, 0x21, + 0xf8, 0x3d, 0x63, 0x60, 0x0c, 0xd7, 0x9d, 0x1a, 0x9a, 0xaf, 0xd1, 0x9e, 0x17, 0x0a, 0xba, 0x50, + 0x6e, 0x0c, 0xd2, 0xf5, 0x99, 0xd2, 0x92, 0x79, 0x89, 0x66, 0x82, 0xbb, 0x5a, 0x12, 0xae, 0x22, + 0xa6, 0x14, 0x13, 0xbc, 0xf7, 0xdf, 0xc0, 0x18, 0xb6, 0x9d, 0xdd, 0x92, 0x3b, 0x03, 0x79, 0xdc, + 0x60, 0x9e, 0x35, 0x88, 0xe6, 0x0b, 0xb4, 0xbb, 0xd2, 0xc5, 0xa5, 0x73, 0xc2, 0x39, 0x84, 0xbd, + 0xf6, 0xc0, 0x18, 0xde, 0x76, 0x76, 0xfc, 0x15, 0x26, 0xd3, 0x92, 0x66, 0x1e, 0xa2, 0xed, 0x58, + 0x8a, 0x94, 0xf9, 0x20, 0xdd, 0x73, 0x00, 0x37, 0x16, 0x22, 0x74, 0x89, 0xef, 0x4b, 0x57, 0x69, + 0xd9, 0x5b, 0x2b, 0x4c, 0xb6, 0x6a, 0xc6, 0x09, 0xc0, 0x4c, 0x88, 0xf0, 0xc8, 0xf7, 0xe5, 0xa9, + 0x96, 0xd6, 0x53, 0xf4, 0xf0, 0x15, 0x51, 0xba, 0x69, 0x3b, 0xc9, 0x2f, 0xff, 0x0c, 0x58, 0x30, + 0xd7, 0xe6, 0x16, 0xea, 0xcc, 0x8b, 0xaa, 0x68, 0x48, 0xdb, 0xa9, 0x90, 0xf5, 0xd9, 0x40, 0xf7, + 0xa6, 0x52, 0x28, 0x35, 0xcd, 0xf3, 0x7c, 0x43, 0x42, 0xe6, 0x13, 0x2d, 0x64, 0xde, 0xc1, 0xfc, + 0xc3, 0xa0, 0x54, 0x21, 0xd8, 0x74, 0x6a, 0x68, 0x76, 0xd1, 0xff, 0xb1, 0x78, 0x0f, 0xb2, 0x6a, + 0x51, 0x09, 0x4c, 0x82, 0x3a, 0x71, 0xe2, 0x2d, 0x20, 0x2b, 0xde, 0xba, 0x31, 0xee, 0xe2, 0x32, + 0x7f, 0x5c, 0xe7, 0x8f, 0x8f, 0x78, 0x36, 0x39, 0xf8, 0x71, 0xb5, 0x73, 0x3f, 0x23, 0x51, 0x78, + 0x68, 0xe5, 0x13, 0x05, 0x5c, 0x25, 0xca, 0x2d, 0x75, 0xd6, 0xd7, 0x2f, 0xfb, 0xdd, 0x2a, 0x4f, + 0x2a, 0xb3, 0x58, 0x0b, 0x3c, 0x4b, 0xbc, 0x97, 0x90, 0x39, 0x95, 0xb1, 0xf5, 0xc9, 0x40, 0x9b, + 0xa7, 0x21, 0x51, 0x73, 0x07, 0xde, 0x25, 0xa0, 0xb4, 0x39, 0x45, 0x9d, 0x98, 0xd0, 0x05, 0x94, + 0x6f, 0xda, 0x18, 0x3f, 0xc1, 0xab, 0xc6, 0x37, 0x1d, 0xe1, 0x42, 0x39, 0x2b, 0xe8, 0xc7, 0x44, + 0x13, 0xa7, 0x92, 0x9a, 0x27, 0x08, 0x31, 0x7e, 0x2e, 0x09, 0xd5, 0x75, 0xec, 0x77, 0xc7, 0x8f, + 0x71, 0x75, 0x91, 0x7a, 0x22, 0xab, 0x09, 0xc5, 0xcf, 0x97, 0xcc, 0xb3, 0x2c, 0x06, 0xa7, 0xa1, + 0xb4, 0x7c, 0x74, 0xa7, 0x79, 0x39, 0x65, 0x9e, 0xa2, 0x75, 0x59, 0xd5, 0x3d, 0x63, 0xd0, 0x1e, + 0x6e, 0x8c, 0x47, 0xf8, 0x1f, 0xd6, 0x0b, 0x37, 0x5d, 0x26, 0x6b, 0x17, 0x57, 0x3b, 0x2d, 0x67, + 0x69, 0x34, 0x39, 0xbb, 0xb8, 0xee, 0x1b, 0x97, 0xd7, 0x7d, 0xe3, 0xfb, 0x75, 0xdf, 0xf8, 0x78, + 0xd3, 0x6f, 0x5d, 0xde, 0xf4, 0x5b, 0xdf, 0x6e, 0xfa, 0xad, 0xb7, 0x87, 0x01, 0xd3, 0xf3, 0xc4, + 0xc3, 0x54, 0x44, 0xd5, 0x5a, 0xd8, 0xbf, 0xbf, 0xb6, 0xbf, 0xdc, 0xd3, 0x0f, 0x7f, 0xfe, 0x02, + 0x74, 0x16, 0x83, 0xf2, 0x3a, 0x45, 0x48, 0x07, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x91, 0xef, + 0xc0, 0x3c, 0x33, 0x04, 0x00, 0x00, } func (m *Params) Marshal() (dAtA []byte, err error) { @@ -485,6 +535,43 @@ func (m *SlashRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *SlashRequests) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *SlashRequests) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *SlashRequests) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Requests) > 0 { + for iNdEx := len(m.Requests) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Requests[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintConsumer(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + func encodeVarintConsumer(dAtA []byte, offset int, v uint64) int { offset -= sovConsumer(v) base := offset @@ -567,6 +654,21 @@ func (m *SlashRequest) Size() (n int) { return n } +func (m *SlashRequests) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Requests) > 0 { + for _, e := range m.Requests { + l = e.Size() + n += 1 + l + sovConsumer(uint64(l)) + } + } + return n +} + func sovConsumer(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -1039,6 +1141,90 @@ func (m *SlashRequest) Unmarshal(dAtA []byte) error { } return nil } +func (m *SlashRequests) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowConsumer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: SlashRequests: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: SlashRequests: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Requests", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowConsumer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthConsumer + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthConsumer + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Requests = append(m.Requests, SlashRequest{}) + if err := m.Requests[len(m.Requests)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipConsumer(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthConsumer + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipConsumer(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/x/ccv/consumer/types/genesis.go b/x/ccv/consumer/types/genesis.go index a114d0bd17..0e2a0b2557 100644 --- a/x/ccv/consumer/types/genesis.go +++ b/x/ccv/consumer/types/genesis.go @@ -12,9 +12,8 @@ import ( ) // NewInitialGenesisState returns a consumer GenesisState for a completely new consumer chain. -// TODO: Include chain status func NewInitialGenesisState(cs *ibctmtypes.ClientState, consState *ibctmtypes.ConsensusState, - initValSet []abci.ValidatorUpdate, params Params) *GenesisState { + initValSet []abci.ValidatorUpdate, slashRequests SlashRequests, params Params) *GenesisState { return &GenesisState{ Params: params, @@ -22,21 +21,28 @@ func NewInitialGenesisState(cs *ibctmtypes.ClientState, consState *ibctmtypes.Co ProviderClientState: cs, ProviderConsensusState: consState, InitialValSet: initValSet, + PendingSlashRequests: slashRequests, } } // NewRestartGenesisState returns a consumer GenesisState that has already been established. func NewRestartGenesisState(clientID, channelID string, maturingPackets []MaturingVSCPacket, - initValSet []abci.ValidatorUpdate, params Params) *GenesisState { + initValSet []abci.ValidatorUpdate, + heightToValsetUpdateIDs []HeightToValsetUpdateID, + outstandingDowntimes []OutstandingDowntime, + params Params, +) *GenesisState { return &GenesisState{ - Params: params, - ProviderClientId: clientID, - ProviderChannelId: channelID, - MaturingPackets: maturingPackets, - NewChain: false, - InitialValSet: initValSet, + Params: params, + ProviderClientId: clientID, + ProviderChannelId: channelID, + MaturingPackets: maturingPackets, + NewChain: false, + InitialValSet: initValSet, + HeightToValsetUpdateId: heightToValsetUpdateIDs, + OutstandingDowntimeSlashing: outstandingDowntimes, } } diff --git a/x/ccv/consumer/types/genesis.pb.go b/x/ccv/consumer/types/genesis.pb.go index 19ffb21d47..aa66509507 100644 --- a/x/ccv/consumer/types/genesis.pb.go +++ b/x/ccv/consumer/types/genesis.pb.go @@ -35,9 +35,17 @@ type GenesisState struct { // ProviderClientState filled in on new chain, nil on restart. ProviderClientState *types.ClientState `protobuf:"bytes,5,opt,name=provider_client_state,json=providerClientState,proto3" json:"provider_client_state,omitempty"` // ProviderConsensusState filled in on new chain, nil on restart. - ProviderConsensusState *types.ConsensusState `protobuf:"bytes,6,opt,name=provider_consensus_state,json=providerConsensusState,proto3" json:"provider_consensus_state,omitempty"` - MaturingPackets []MaturingVSCPacket `protobuf:"bytes,7,rep,name=maturing_packets,json=maturingPackets,proto3" json:"maturing_packets"` - InitialValSet []types1.ValidatorUpdate `protobuf:"bytes,8,rep,name=initial_val_set,json=initialValSet,proto3" json:"initial_val_set"` + ProviderConsensusState *types.ConsensusState `protobuf:"bytes,6,opt,name=provider_consensus_state,json=providerConsensusState,proto3" json:"provider_consensus_state,omitempty"` + // MaturingPackets nil on new chain, filled on restart. + MaturingPackets []MaturingVSCPacket `protobuf:"bytes,7,rep,name=maturing_packets,json=maturingPackets,proto3" json:"maturing_packets"` + // InitialValset filled in on new chain, manually filled in on restart. + InitialValSet []types1.ValidatorUpdate `protobuf:"bytes,8,rep,name=initial_val_set,json=initialValSet,proto3" json:"initial_val_set"` + // HeightToValsetUpdateId nil on new chain, filled on restart. + HeightToValsetUpdateId []HeightToValsetUpdateID `protobuf:"bytes,9,rep,name=height_to_valset_update_id,json=heightToValsetUpdateId,proto3" json:"height_to_valset_update_id"` + // OutstandingDowntimes nil on new chain, filled on restart. + OutstandingDowntimeSlashing []OutstandingDowntime `protobuf:"bytes,10,rep,name=outstanding_downtime_slashing,json=outstandingDowntimeSlashing,proto3" json:"outstanding_downtime_slashing"` + // PendingSlashRequests filled in on new chain, nil on restart. + PendingSlashRequests SlashRequests `protobuf:"bytes,11,opt,name=pending_slash_requests,json=pendingSlashRequests,proto3" json:"pending_slash_requests"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -129,7 +137,29 @@ func (m *GenesisState) GetInitialValSet() []types1.ValidatorUpdate { return nil } -// UnbondingSequence defines the genesis information for each unbonding packet sequence. +func (m *GenesisState) GetHeightToValsetUpdateId() []HeightToValsetUpdateID { + if m != nil { + return m.HeightToValsetUpdateId + } + return nil +} + +func (m *GenesisState) GetOutstandingDowntimeSlashing() []OutstandingDowntime { + if m != nil { + return m.OutstandingDowntimeSlashing + } + return nil +} + +func (m *GenesisState) GetPendingSlashRequests() SlashRequests { + if m != nil { + return m.PendingSlashRequests + } + return SlashRequests{} +} + +// UnbondingSequence defines the genesis information for each unbonding packet +// sequence. type MaturingVSCPacket struct { VscId uint64 `protobuf:"varint,1,opt,name=vscId,proto3" json:"vscId,omitempty"` MaturityTime uint64 `protobuf:"varint,2,opt,name=maturity_time,json=maturityTime,proto3" json:"maturity_time,omitempty"` @@ -182,9 +212,111 @@ func (m *MaturingVSCPacket) GetMaturityTime() uint64 { return 0 } +// HeightValsetUpdateID defines the genesis information for the mapping +// of each block height to a valset update id +type HeightToValsetUpdateID struct { + Height uint64 `protobuf:"varint,1,opt,name=height,proto3" json:"height,omitempty"` + ValsetUpdateId uint64 `protobuf:"varint,2,opt,name=valset_update_id,json=valsetUpdateId,proto3" json:"valset_update_id,omitempty"` +} + +func (m *HeightToValsetUpdateID) Reset() { *m = HeightToValsetUpdateID{} } +func (m *HeightToValsetUpdateID) String() string { return proto.CompactTextString(m) } +func (*HeightToValsetUpdateID) ProtoMessage() {} +func (*HeightToValsetUpdateID) Descriptor() ([]byte, []int) { + return fileDescriptor_2db73a6057a27482, []int{2} +} +func (m *HeightToValsetUpdateID) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *HeightToValsetUpdateID) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_HeightToValsetUpdateID.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *HeightToValsetUpdateID) XXX_Merge(src proto.Message) { + xxx_messageInfo_HeightToValsetUpdateID.Merge(m, src) +} +func (m *HeightToValsetUpdateID) XXX_Size() int { + return m.Size() +} +func (m *HeightToValsetUpdateID) XXX_DiscardUnknown() { + xxx_messageInfo_HeightToValsetUpdateID.DiscardUnknown(m) +} + +var xxx_messageInfo_HeightToValsetUpdateID proto.InternalMessageInfo + +func (m *HeightToValsetUpdateID) GetHeight() uint64 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *HeightToValsetUpdateID) GetValsetUpdateId() uint64 { + if m != nil { + return m.ValsetUpdateId + } + return 0 +} + +// OutstandingDowntime defines the genesis information for each validator +// flagged with an outstanding downtime slashing. +type OutstandingDowntime struct { + ValidatorConsensusAddress string `protobuf:"bytes,1,opt,name=validator_consensus_address,json=validatorConsensusAddress,proto3" json:"validator_consensus_address,omitempty"` +} + +func (m *OutstandingDowntime) Reset() { *m = OutstandingDowntime{} } +func (m *OutstandingDowntime) String() string { return proto.CompactTextString(m) } +func (*OutstandingDowntime) ProtoMessage() {} +func (*OutstandingDowntime) Descriptor() ([]byte, []int) { + return fileDescriptor_2db73a6057a27482, []int{3} +} +func (m *OutstandingDowntime) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *OutstandingDowntime) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_OutstandingDowntime.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *OutstandingDowntime) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutstandingDowntime.Merge(m, src) +} +func (m *OutstandingDowntime) XXX_Size() int { + return m.Size() +} +func (m *OutstandingDowntime) XXX_DiscardUnknown() { + xxx_messageInfo_OutstandingDowntime.DiscardUnknown(m) +} + +var xxx_messageInfo_OutstandingDowntime proto.InternalMessageInfo + +func (m *OutstandingDowntime) GetValidatorConsensusAddress() string { + if m != nil { + return m.ValidatorConsensusAddress + } + return "" +} + func init() { proto.RegisterType((*GenesisState)(nil), "interchain_security.ccv.consumer.v1.GenesisState") proto.RegisterType((*MaturingVSCPacket)(nil), "interchain_security.ccv.consumer.v1.MaturingVSCPacket") + proto.RegisterType((*HeightToValsetUpdateID)(nil), "interchain_security.ccv.consumer.v1.HeightToValsetUpdateID") + proto.RegisterType((*OutstandingDowntime)(nil), "interchain_security.ccv.consumer.v1.OutstandingDowntime") } func init() { @@ -192,41 +324,53 @@ func init() { } var fileDescriptor_2db73a6057a27482 = []byte{ - // 543 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xcf, 0x6f, 0xd3, 0x30, - 0x14, 0x6e, 0x58, 0x37, 0x36, 0x6f, 0xd3, 0xb6, 0x6c, 0xa0, 0x68, 0x93, 0x42, 0xd9, 0x2e, 0x95, - 0x00, 0x5b, 0x2d, 0x12, 0x07, 0x8e, 0xeb, 0x01, 0xf5, 0xc0, 0x34, 0xa5, 0xa3, 0x07, 0x2e, 0x91, - 0xeb, 0x3c, 0xa5, 0x16, 0x89, 0x1d, 0xc5, 0x6e, 0x46, 0xff, 0x0b, 0xfe, 0xac, 0x1d, 0x77, 0xdc, - 0x09, 0xa1, 0xf6, 0x1f, 0x41, 0xb1, 0x93, 0xfe, 0x00, 0x24, 0x7a, 0xb3, 0x9f, 0xdf, 0xf7, 0xbe, - 0xef, 0x7d, 0xcf, 0x0f, 0x75, 0xb8, 0xd0, 0x90, 0xb3, 0x31, 0xe5, 0x22, 0x54, 0xc0, 0x26, 0x39, - 0xd7, 0x53, 0xc2, 0x58, 0x41, 0x98, 0x14, 0x6a, 0x92, 0x42, 0x4e, 0x8a, 0x0e, 0x89, 0x41, 0x80, - 0xe2, 0x0a, 0x67, 0xb9, 0xd4, 0xd2, 0xbd, 0xfa, 0x07, 0x04, 0x33, 0x56, 0xe0, 0x1a, 0x82, 0x8b, - 0xce, 0x39, 0xe1, 0x23, 0x46, 0x12, 0x1e, 0x8f, 0x35, 0x4b, 0x38, 0x08, 0xad, 0x88, 0x06, 0x11, - 0x41, 0x9e, 0x72, 0xa1, 0xcb, 0x92, 0xcb, 0x9b, 0xad, 0x7a, 0xfe, 0xba, 0x04, 0x30, 0x99, 0x03, - 0x61, 0x63, 0x2a, 0x04, 0x24, 0x65, 0x56, 0x75, 0xac, 0x52, 0xce, 0x62, 0x19, 0x4b, 0x73, 0x24, - 0xe5, 0xa9, 0x8a, 0x76, 0x37, 0xe9, 0x60, 0x21, 0xcd, 0x62, 0x2e, 0x56, 0xc4, 0xd0, 0x11, 0xe3, - 0x44, 0x4f, 0x33, 0xa8, 0xfa, 0xbb, 0x7c, 0x6a, 0xa2, 0x83, 0x4f, 0xb6, 0xe3, 0x81, 0xa6, 0x1a, - 0xdc, 0x3e, 0xda, 0xc9, 0x68, 0x4e, 0x53, 0xe5, 0x39, 0x2d, 0xa7, 0xbd, 0xdf, 0x7d, 0x83, 0x37, - 0x70, 0x00, 0xdf, 0x1a, 0xc8, 0x75, 0xf3, 0xe1, 0xe7, 0xab, 0x46, 0x50, 0x15, 0x70, 0xdf, 0x22, - 0x37, 0xcb, 0x65, 0xc1, 0x23, 0xc8, 0x43, 0x6b, 0x4c, 0xc8, 0x23, 0xef, 0x59, 0xcb, 0x69, 0xef, - 0x05, 0xc7, 0xf5, 0x4b, 0xcf, 0x3c, 0xf4, 0x23, 0x17, 0xa3, 0xd3, 0x65, 0xb6, 0xb5, 0xa2, 0x4c, - 0xdf, 0x32, 0xe9, 0x27, 0x8b, 0x74, 0xfb, 0xd2, 0x8f, 0xdc, 0x0b, 0xb4, 0x27, 0xe0, 0x3e, 0x34, - 0xc2, 0xbc, 0x66, 0xcb, 0x69, 0xef, 0x06, 0xbb, 0x02, 0xee, 0x7b, 0xe5, 0xdd, 0x0d, 0xd1, 0x8b, - 0x3f, 0xa9, 0x55, 0xd9, 0x9e, 0xb7, 0x5d, 0x37, 0x35, 0x62, 0x78, 0x75, 0x62, 0x78, 0x65, 0x46, - 0x45, 0x07, 0x5b, 0x55, 0xc6, 0x91, 0xe0, 0x74, 0x5d, 0xaa, 0xb5, 0x69, 0x8c, 0xbc, 0x25, 0x81, - 0x14, 0x0a, 0x84, 0x9a, 0xa8, 0x8a, 0x63, 0xc7, 0x70, 0xe0, 0xff, 0x72, 0xd4, 0x30, 0x4b, 0xf3, - 0x72, 0x41, 0xb3, 0x16, 0x77, 0x63, 0x74, 0x9c, 0x52, 0x3d, 0xc9, 0xb9, 0x88, 0xc3, 0x8c, 0xb2, - 0x6f, 0xa0, 0x95, 0xf7, 0xbc, 0xb5, 0xd5, 0xde, 0xef, 0x7e, 0xd8, 0x68, 0x34, 0x9f, 0x2b, 0xf0, - 0x70, 0xd0, 0xbb, 0x35, 0xf0, 0x6a, 0x4a, 0x47, 0x75, 0x55, 0x1b, 0x55, 0xee, 0x0d, 0x3a, 0xe2, - 0x82, 0x6b, 0x4e, 0x93, 0xb0, 0xa0, 0x49, 0xa8, 0x40, 0x7b, 0xbb, 0x86, 0xa7, 0xb5, 0x2a, 0xbc, - 0xfc, 0x41, 0x78, 0x48, 0x13, 0x1e, 0x51, 0x2d, 0xf3, 0x2f, 0x59, 0x44, 0x35, 0x54, 0x15, 0x0f, - 0x2b, 0xf8, 0x90, 0x26, 0x03, 0xd0, 0x97, 0x37, 0xe8, 0xe4, 0x2f, 0x6e, 0xf7, 0x0c, 0x6d, 0x17, - 0x8a, 0xf5, 0x23, 0xf3, 0xbb, 0x9a, 0x81, 0xbd, 0xb8, 0x57, 0xe8, 0xd0, 0xaa, 0xd1, 0xd3, 0x50, - 0xf3, 0x14, 0xcc, 0x27, 0x69, 0x06, 0x07, 0x75, 0xf0, 0x8e, 0xa7, 0x70, 0x7d, 0xf7, 0x30, 0xf3, - 0x9d, 0xc7, 0x99, 0xef, 0xfc, 0x9a, 0xf9, 0xce, 0x8f, 0xb9, 0xdf, 0x78, 0x9c, 0xfb, 0x8d, 0xa7, - 0xb9, 0xdf, 0xf8, 0xfa, 0x31, 0xe6, 0x7a, 0x3c, 0x19, 0x61, 0x26, 0x53, 0xc2, 0xa4, 0x4a, 0xa5, - 0x22, 0x4b, 0x67, 0xde, 0x2d, 0xf6, 0xe4, 0xfb, 0xfa, 0xa6, 0x98, 0x35, 0x18, 0xed, 0x98, 0x3d, - 0x78, 0xff, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xcd, 0x97, 0x1d, 0x59, 0x1c, 0x04, 0x00, 0x00, + // 729 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcf, 0x4f, 0x1b, 0x39, + 0x14, 0xce, 0x2c, 0x21, 0x10, 0x03, 0x0b, 0x18, 0x36, 0x9a, 0x25, 0xda, 0x6c, 0x36, 0x5c, 0x22, + 0x6d, 0x3b, 0xa3, 0xa4, 0x52, 0x55, 0xb5, 0x52, 0xa5, 0x02, 0x52, 0x9b, 0x43, 0x29, 0x9a, 0x40, + 0x0e, 0x5c, 0x46, 0x8e, 0xc7, 0x9a, 0xb1, 0x3a, 0x63, 0xa7, 0x63, 0xcf, 0x50, 0x0e, 0xbd, 0xf4, + 0x2f, 0xe8, 0x9f, 0xc5, 0xa1, 0x07, 0x8e, 0x3d, 0x55, 0x15, 0xfc, 0x23, 0xd5, 0xd8, 0xce, 0xaf, + 0x12, 0xa9, 0xb9, 0xd9, 0x7e, 0xef, 0xfb, 0xbe, 0xf7, 0xbe, 0x67, 0x1b, 0x74, 0x28, 0x93, 0x24, + 0xc5, 0x11, 0xa2, 0xcc, 0x17, 0x04, 0x67, 0x29, 0x95, 0xd7, 0x2e, 0xc6, 0xb9, 0x8b, 0x39, 0x13, + 0x59, 0x42, 0x52, 0x37, 0xef, 0xb8, 0x21, 0x61, 0x44, 0x50, 0xe1, 0x8c, 0x52, 0x2e, 0x39, 0x3c, + 0x5c, 0x00, 0x71, 0x30, 0xce, 0x9d, 0x31, 0xc4, 0xc9, 0x3b, 0x07, 0x2e, 0x1d, 0x62, 0x37, 0xa6, + 0x61, 0x24, 0x71, 0x4c, 0x09, 0x93, 0xc2, 0x95, 0x84, 0x05, 0x24, 0x4d, 0x28, 0x93, 0x05, 0xe5, + 0x74, 0xa7, 0x59, 0x0f, 0xfe, 0x2b, 0x00, 0x98, 0xa7, 0xc4, 0xc5, 0x11, 0x62, 0x8c, 0xc4, 0x45, + 0x96, 0x59, 0x9a, 0x94, 0xfd, 0x90, 0x87, 0x5c, 0x2d, 0xdd, 0x62, 0x65, 0x4e, 0xbb, 0xcb, 0x74, + 0x30, 0x29, 0x4d, 0x63, 0xea, 0x33, 0xc5, 0xa0, 0x21, 0xa6, 0xae, 0xbc, 0x1e, 0x11, 0xd3, 0x5f, + 0xeb, 0xeb, 0x1a, 0xd8, 0x7c, 0xad, 0x3b, 0xee, 0x4b, 0x24, 0x09, 0xec, 0x81, 0xca, 0x08, 0xa5, + 0x28, 0x11, 0xb6, 0xd5, 0xb4, 0xda, 0x1b, 0xdd, 0xff, 0x9d, 0x25, 0x1c, 0x70, 0xce, 0x14, 0xe4, + 0xa8, 0x7c, 0xf3, 0xfd, 0xdf, 0x92, 0x67, 0x08, 0xe0, 0x23, 0x00, 0x47, 0x29, 0xcf, 0x69, 0x40, + 0x52, 0x5f, 0x1b, 0xe3, 0xd3, 0xc0, 0xfe, 0xa3, 0x69, 0xb5, 0xab, 0xde, 0xce, 0x38, 0x72, 0xac, + 0x02, 0xbd, 0x00, 0x3a, 0x60, 0x6f, 0x9a, 0xad, 0xad, 0x28, 0xd2, 0x57, 0x54, 0xfa, 0xee, 0x24, + 0x5d, 0x47, 0x7a, 0x01, 0xac, 0x83, 0x2a, 0x23, 0x57, 0xbe, 0x2a, 0xcc, 0x2e, 0x37, 0xad, 0xf6, + 0xba, 0xb7, 0xce, 0xc8, 0xd5, 0x71, 0xb1, 0x87, 0x3e, 0xf8, 0xeb, 0x57, 0x69, 0x51, 0xb4, 0x67, + 0xaf, 0x8e, 0x9b, 0x1a, 0x62, 0x67, 0x76, 0x62, 0xce, 0xcc, 0x8c, 0xf2, 0x8e, 0xa3, 0xab, 0x52, + 0x8e, 0x78, 0x7b, 0xf3, 0xa5, 0x6a, 0x9b, 0x22, 0x60, 0x4f, 0x05, 0x38, 0x13, 0x84, 0x89, 0x4c, + 0x18, 0x8d, 0x8a, 0xd2, 0x70, 0x7e, 0xab, 0x31, 0x86, 0x69, 0x99, 0xda, 0x44, 0x66, 0xee, 0x1c, + 0x86, 0x60, 0x27, 0x41, 0x32, 0x4b, 0x29, 0x0b, 0xfd, 0x11, 0xc2, 0xef, 0x89, 0x14, 0xf6, 0x5a, + 0x73, 0xa5, 0xbd, 0xd1, 0x7d, 0xba, 0xd4, 0x68, 0xde, 0x1a, 0xf0, 0xa0, 0x7f, 0x7c, 0xa6, 0xe0, + 0x66, 0x4a, 0xdb, 0x63, 0x56, 0x7d, 0x2a, 0xe0, 0x29, 0xd8, 0xa6, 0x8c, 0x4a, 0x8a, 0x62, 0x3f, + 0x47, 0xb1, 0x2f, 0x88, 0xb4, 0xd7, 0x95, 0x4e, 0x73, 0xb6, 0xf0, 0xe2, 0x06, 0x39, 0x03, 0x14, + 0xd3, 0x00, 0x49, 0x9e, 0x5e, 0x8c, 0x02, 0x24, 0x89, 0x61, 0xdc, 0x32, 0xf0, 0x01, 0x8a, 0xfb, + 0x44, 0xc2, 0x4f, 0xe0, 0x20, 0x22, 0x45, 0xfb, 0xbe, 0xe4, 0x05, 0xa3, 0x20, 0xd2, 0xcf, 0x54, + 0x7e, 0x31, 0xd7, 0xaa, 0xa2, 0x7e, 0xb1, 0x54, 0x0b, 0x6f, 0x14, 0xcd, 0x39, 0x1f, 0x28, 0x12, + 0xad, 0xd9, 0x3b, 0x31, 0xaa, 0xb5, 0x68, 0x51, 0x34, 0x80, 0x9f, 0x2d, 0xf0, 0x0f, 0xcf, 0xa4, + 0x90, 0x88, 0x05, 0x85, 0x77, 0x01, 0xbf, 0x62, 0x92, 0x26, 0xc4, 0x17, 0x31, 0x12, 0x11, 0x65, + 0xa1, 0x0d, 0x54, 0x09, 0xcf, 0x96, 0x2a, 0xe1, 0xdd, 0x94, 0xe9, 0xc4, 0x10, 0x19, 0xfd, 0x3a, + 0x7f, 0x18, 0xea, 0x1b, 0x09, 0xc8, 0x40, 0x6d, 0x44, 0xb4, 0xbe, 0x92, 0xf5, 0x53, 0xf2, 0x21, + 0x23, 0x42, 0x0a, 0x7b, 0x43, 0x5d, 0x92, 0xee, 0x52, 0xe2, 0x8a, 0xce, 0x33, 0x48, 0x23, 0xbb, + 0x6f, 0x78, 0xe7, 0x62, 0xad, 0x53, 0xb0, 0xfb, 0x60, 0xde, 0x70, 0x1f, 0xac, 0xe6, 0x02, 0xf7, + 0x02, 0xf5, 0xa2, 0xcb, 0x9e, 0xde, 0xc0, 0x43, 0xb0, 0xa5, 0x6f, 0x80, 0xbc, 0xf6, 0x8b, 0x9a, + 0xd5, 0xc3, 0x2c, 0x7b, 0x9b, 0xe3, 0xc3, 0x73, 0x9a, 0x90, 0xd6, 0x25, 0xa8, 0x2d, 0x36, 0x1f, + 0xd6, 0x40, 0x45, 0x1b, 0x6f, 0x58, 0xcd, 0x0e, 0xb6, 0xc1, 0xce, 0x83, 0x59, 0x6b, 0xe6, 0x3f, + 0xf3, 0xb9, 0x01, 0xb5, 0x2e, 0xc0, 0xde, 0x02, 0x57, 0xe1, 0x4b, 0x50, 0xcf, 0xc7, 0xd7, 0x6b, + 0xe6, 0x69, 0xa1, 0x20, 0x48, 0x89, 0xd0, 0xbf, 0x52, 0xd5, 0xfb, 0x7b, 0x92, 0x32, 0x79, 0x2d, + 0xaf, 0x74, 0xc2, 0xd1, 0xf9, 0xcd, 0x5d, 0xc3, 0xba, 0xbd, 0x6b, 0x58, 0x3f, 0xee, 0x1a, 0xd6, + 0x97, 0xfb, 0x46, 0xe9, 0xf6, 0xbe, 0x51, 0xfa, 0x76, 0xdf, 0x28, 0x5d, 0x3e, 0x0f, 0xa9, 0x8c, + 0xb2, 0xa1, 0x83, 0x79, 0xe2, 0x62, 0x2e, 0x12, 0x2e, 0xdc, 0xa9, 0xfb, 0x8f, 0x27, 0xdf, 0xe9, + 0xc7, 0xf9, 0x0f, 0x55, 0xfd, 0x96, 0xc3, 0x8a, 0xfa, 0x2e, 0x9f, 0xfc, 0x0c, 0x00, 0x00, 0xff, + 0xff, 0x09, 0x61, 0x41, 0x6b, 0x43, 0x06, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -249,6 +393,44 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + { + size, err := m.PendingSlashRequests.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x5a + if len(m.OutstandingDowntimeSlashing) > 0 { + for iNdEx := len(m.OutstandingDowntimeSlashing) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.OutstandingDowntimeSlashing[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } + } + if len(m.HeightToValsetUpdateId) > 0 { + for iNdEx := len(m.HeightToValsetUpdateId) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.HeightToValsetUpdateId[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x4a + } + } if len(m.InitialValSet) > 0 { for iNdEx := len(m.InitialValSet) - 1; iNdEx >= 0; iNdEx-- { { @@ -371,6 +553,69 @@ func (m *MaturingVSCPacket) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *HeightToValsetUpdateID) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *HeightToValsetUpdateID) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *HeightToValsetUpdateID) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.ValsetUpdateId != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.ValsetUpdateId)) + i-- + dAtA[i] = 0x10 + } + if m.Height != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.Height)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *OutstandingDowntime) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *OutstandingDowntime) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *OutstandingDowntime) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.ValidatorConsensusAddress) > 0 { + i -= len(m.ValidatorConsensusAddress) + copy(dAtA[i:], m.ValidatorConsensusAddress) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.ValidatorConsensusAddress))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { offset -= sovGenesis(v) base := offset @@ -421,6 +666,20 @@ func (m *GenesisState) Size() (n int) { n += 1 + l + sovGenesis(uint64(l)) } } + if len(m.HeightToValsetUpdateId) > 0 { + for _, e := range m.HeightToValsetUpdateId { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + if len(m.OutstandingDowntimeSlashing) > 0 { + for _, e := range m.OutstandingDowntimeSlashing { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + l = m.PendingSlashRequests.Size() + n += 1 + l + sovGenesis(uint64(l)) return n } @@ -439,6 +698,34 @@ func (m *MaturingVSCPacket) Size() (n int) { return n } +func (m *HeightToValsetUpdateID) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Height != 0 { + n += 1 + sovGenesis(uint64(m.Height)) + } + if m.ValsetUpdateId != 0 { + n += 1 + sovGenesis(uint64(m.ValsetUpdateId)) + } + return n +} + +func (m *OutstandingDowntime) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.ValidatorConsensusAddress) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } + return n +} + func sovGenesis(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -731,6 +1018,107 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 9: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field HeightToValsetUpdateId", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.HeightToValsetUpdateId = append(m.HeightToValsetUpdateId, HeightToValsetUpdateID{}) + if err := m.HeightToValsetUpdateId[len(m.HeightToValsetUpdateId)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field OutstandingDowntimeSlashing", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.OutstandingDowntimeSlashing = append(m.OutstandingDowntimeSlashing, OutstandingDowntime{}) + if err := m.OutstandingDowntimeSlashing[len(m.OutstandingDowntimeSlashing)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 11: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field PendingSlashRequests", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.PendingSlashRequests.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenesis(dAtA[iNdEx:]) @@ -840,6 +1228,176 @@ func (m *MaturingVSCPacket) Unmarshal(dAtA []byte) error { } return nil } +func (m *HeightToValsetUpdateID) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: HeightToValsetUpdateID: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: HeightToValsetUpdateID: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) + } + m.Height = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Height |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ValsetUpdateId", wireType) + } + m.ValsetUpdateId = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.ValsetUpdateId |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *OutstandingDowntime) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: OutstandingDowntime: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: OutstandingDowntime: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ValidatorConsensusAddress", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ValidatorConsensusAddress = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipGenesis(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/x/ccv/consumer/types/genesis_test.go b/x/ccv/consumer/types/genesis_test.go index a122619961..3408c81ccb 100644 --- a/x/ccv/consumer/types/genesis_test.go +++ b/x/ccv/consumer/types/genesis_test.go @@ -55,29 +55,29 @@ func TestValidateInitialGenesisState(t *testing.T) { }{ { "valid new consumer genesis state", - types.NewInitialGenesisState(cs, consensusState, valUpdates, params), + types.NewInitialGenesisState(cs, consensusState, valUpdates, types.SlashRequests{}, params), false, }, { "invalid new consumer genesis state: nil client state", - types.NewInitialGenesisState(nil, consensusState, valUpdates, params), + types.NewInitialGenesisState(nil, consensusState, valUpdates, types.SlashRequests{}, params), true, }, { "invalid new consumer genesis state: invalid client state", types.NewInitialGenesisState(&ibctmtypes.ClientState{ChainId: "badClientState"}, - consensusState, valUpdates, params), + consensusState, valUpdates, types.SlashRequests{}, params), true, }, { "invalid new consumer genesis state: nil consensus state", - types.NewInitialGenesisState(cs, nil, valUpdates, params), + types.NewInitialGenesisState(cs, nil, valUpdates, types.SlashRequests{}, params), true, }, { "invalid new consumer genesis state: invalid consensus state", types.NewInitialGenesisState(cs, &ibctmtypes.ConsensusState{Timestamp: time.Now()}, - valUpdates, params), + valUpdates, types.SlashRequests{}, params), true, }, { @@ -91,6 +91,9 @@ func TestValidateInitialGenesisState(t *testing.T) { consensusState, nil, valUpdates, + nil, + nil, + types.SlashRequests{}, }, true, }, @@ -105,6 +108,9 @@ func TestValidateInitialGenesisState(t *testing.T) { consensusState, nil, valUpdates, + nil, + nil, + types.SlashRequests{}, }, true, }, @@ -119,12 +125,15 @@ func TestValidateInitialGenesisState(t *testing.T) { consensusState, []types.MaturingVSCPacket{{}}, valUpdates, + nil, + nil, + types.SlashRequests{}, }, true, }, { "invalid new consumer genesis state: nil initial validator set", - types.NewInitialGenesisState(cs, consensusState, nil, params), + types.NewInitialGenesisState(cs, consensusState, nil, types.SlashRequests{}, params), true, }, { @@ -132,7 +141,7 @@ func TestValidateInitialGenesisState(t *testing.T) { types.NewInitialGenesisState( cs, ibctmtypes.NewConsensusState( time.Now(), commitmenttypes.NewMerkleRoot([]byte("apphash")), []byte("wrong_hash")), - valUpdates, params), + valUpdates, types.SlashRequests{}, params), true, }, } @@ -173,7 +182,7 @@ func TestValidateRestartGenesisState(t *testing.T) { }{ { "valid restart consumer genesis state: empty maturing packets", - types.NewRestartGenesisState("ccvclient", "ccvchannel", nil, valUpdates, params), + types.NewRestartGenesisState("ccvclient", "ccvchannel", nil, valUpdates, nil, nil, params), false, }, { @@ -182,31 +191,31 @@ func TestValidateRestartGenesisState(t *testing.T) { {1, uint64(time.Now().UnixNano())}, {3, uint64(time.Now().UnixNano())}, {5, uint64(time.Now().UnixNano())}, - }, valUpdates, params), + }, valUpdates, nil, nil, params), false, }, { "invalid restart consumer genesis state: channel id is empty", - types.NewRestartGenesisState("", "ccvchannel", nil, valUpdates, params), + types.NewRestartGenesisState("", "ccvchannel", nil, valUpdates, nil, nil, params), true, }, { "invalid restart consumer genesis state: channel id is empty", - types.NewRestartGenesisState("ccvclient", "", nil, valUpdates, params), + types.NewRestartGenesisState("ccvclient", "", nil, valUpdates, nil, nil, params), true, }, { "invalid restart consumer genesis state: maturing packet vscId is invalid", types.NewRestartGenesisState("ccvclient", "ccvchannel", []types.MaturingVSCPacket{ {0, uint64(time.Now().UnixNano())}, - }, valUpdates, params), + }, valUpdates, nil, nil, params), true, }, { "invalid restart consumer genesis state: maturing packet time is invalid", types.NewRestartGenesisState("ccvclient", "ccvchannel", []types.MaturingVSCPacket{ {1, 0}, - }, valUpdates, params), + }, valUpdates, nil, nil, params), true, }, { @@ -220,6 +229,9 @@ func TestValidateRestartGenesisState(t *testing.T) { nil, nil, valUpdates, + nil, + nil, + types.SlashRequests{}, }, true, }, @@ -234,12 +246,15 @@ func TestValidateRestartGenesisState(t *testing.T) { consensusState, nil, valUpdates, + nil, + nil, + types.SlashRequests{}, }, true, }, { "invalid restart consumer genesis state: nil initial validator set", - types.NewRestartGenesisState("ccvclient", "ccvchannel", nil, nil, params), + types.NewRestartGenesisState("ccvclient", "ccvchannel", nil, nil, nil, nil, params), true, }, } diff --git a/x/ccv/consumer/types/keys.go b/x/ccv/consumer/types/keys.go index 0a037f2e7f..a94006840a 100644 --- a/x/ccv/consumer/types/keys.go +++ b/x/ccv/consumer/types/keys.go @@ -4,7 +4,6 @@ import ( "encoding/binary" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/cosmos/cosmos-sdk/types/address" ) const ( @@ -122,8 +121,8 @@ func HeightValsetUpdateIDKey(height uint64) []byte { } // OutstandingDowntimeKey returns the key to a validators' outstanding downtime by consensus address -func OutstandingDowntimeKey(v sdk.ConsAddress) []byte { - return append([]byte{OutstandingDowntimeBytePrefix}, address.MustLengthPrefix(v.Bytes())...) +func OutstandingDowntimeKey(address sdk.ConsAddress) []byte { + return append([]byte{OutstandingDowntimeBytePrefix}, address.Bytes()...) } // CrossChainValidatorKey returns the key to a cross chain validator by consensus address diff --git a/x/ccv/democracy/distribution/doc.go b/x/ccv/democracy/distribution/doc.go new file mode 100644 index 0000000000..2905f74cd4 --- /dev/null +++ b/x/ccv/democracy/distribution/doc.go @@ -0,0 +1,9 @@ +/* +Package distribution defines a "wrapper" module around the Cosmos SDK's native +x/distribution module. In other words, it provides the exact same functionality as +the native module in that it simply embeds the native module. + +The consumer chain should utilize the x/ccv/democracy/distribution module to perform democratic +actions such as participating and voting within the chain's governance system. +*/ +package distribution diff --git a/x/ccv/democracy/distribution/module.go b/x/ccv/democracy/distribution/module.go new file mode 100644 index 0000000000..4c535aad44 --- /dev/null +++ b/x/ccv/democracy/distribution/module.go @@ -0,0 +1,125 @@ +package distribution + +import ( + "time" + + "github.com/cosmos/cosmos-sdk/codec" + "github.com/cosmos/cosmos-sdk/types/module" + + "github.com/cosmos/cosmos-sdk/telemetry" + sdk "github.com/cosmos/cosmos-sdk/types" + distr "github.com/cosmos/cosmos-sdk/x/distribution" + "github.com/cosmos/cosmos-sdk/x/distribution/keeper" + stakingkeeper "github.com/cosmos/cosmos-sdk/x/staking/keeper" + stakingtypes "github.com/cosmos/cosmos-sdk/x/staking/types" + consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + + distrtypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + abci "github.com/tendermint/tendermint/abci/types" +) + +var ( + _ module.AppModule = AppModule{} + _ module.AppModuleBasic = AppModuleBasic{} + _ module.AppModuleSimulation = AppModule{} +) + +// AppModule embeds the Cosmos SDK's x/distribution AppModuleBasic. +type AppModuleBasic struct { + distr.AppModuleBasic +} + +// AppModule embeds the Cosmos SDK's x/distribution AppModule +type AppModule struct { + // embed the Cosmos SDK's x/distribution AppModule + distr.AppModule + + keeper keeper.Keeper + accountKeeper distrtypes.AccountKeeper + bankKeeper distrtypes.BankKeeper + stakingKeeper stakingkeeper.Keeper + + feeCollectorName string +} + +// NewAppModule creates a new AppModule object using the native x/distribution module +// AppModule constructor. +func NewAppModule( + cdc codec.Codec, keeper keeper.Keeper, ak distrtypes.AccountKeeper, + bk distrtypes.BankKeeper, sk stakingkeeper.Keeper, feeCollectorName string, +) AppModule { + distrAppMod := distr.NewAppModule(cdc, keeper, ak, bk, sk) + return AppModule{ + AppModule: distrAppMod, + keeper: keeper, + accountKeeper: ak, + bankKeeper: bk, + stakingKeeper: sk, + feeCollectorName: feeCollectorName, + } +} + +// BeginBlocker mirror functionality of cosmos-sdk/distribution BeginBlocker +// however it allocates no proposer reward +func (am AppModule) BeginBlock(ctx sdk.Context, req abci.RequestBeginBlock) { + defer telemetry.ModuleMeasureSince(distrtypes.ModuleName, time.Now(), telemetry.MetricKeyBeginBlocker) + + // TODO this is Tendermint-dependent + // ref https://github.com/cosmos/cosmos-sdk/issues/3095 + if ctx.BlockHeight() > 1 { + am.AllocateTokens(ctx) + } +} + +// AllocateTokens handles distribution of the collected fees +func (am AppModule) AllocateTokens( + ctx sdk.Context, +) { + + // fetch and clear the collected fees for distribution, since this is + // called in BeginBlock, collected fees will be from the previous block + // (and distributed to the current representatives) + feeCollector := am.accountKeeper.GetModuleAccount(ctx, consumertypes.ConsumerRedistributeName) + feesCollectedInt := am.bankKeeper.GetAllBalances(ctx, feeCollector.GetAddress()) + feesCollected := sdk.NewDecCoinsFromCoins(feesCollectedInt...) + + // transfer collected fees to the distribution module account + err := am.bankKeeper.SendCoinsFromModuleToModule(ctx, consumertypes.ConsumerRedistributeName, distrtypes.ModuleName, feesCollectedInt) + if err != nil { + panic(err) + } + + // temporary workaround to keep CanWithdrawInvariant happy + // general discussions here: https://github.com/cosmos/cosmos-sdk/issues/2906#issuecomment-441867634 + feePool := am.keeper.GetFeePool(ctx) + vs := am.stakingKeeper.GetValidatorSet() + totalBondedTokens := vs.TotalBondedTokens(ctx) + if totalBondedTokens.IsZero() { + feePool.CommunityPool = feePool.CommunityPool.Add(feesCollected...) + am.keeper.SetFeePool(ctx, feePool) + return + } + + // calculate the fraction allocated to representatives by subtracting the community tax. + // e.g. if community tax is 0.02, representatives fraction will be 0.98 (2% goes to the community pool and the rest to the representatives) + remaining := feesCollected + communityTax := am.keeper.GetCommunityTax(ctx) + representativesFraction := sdk.OneDec().Sub(communityTax) + + // allocate tokens proportionally to representatives voting power + vs.IterateBondedValidatorsByPower(ctx, func(_ int64, validator stakingtypes.ValidatorI) bool { + //we get this validator's percentage of the total power by dividing their tokens by the total bonded tokens + powerFraction := sdk.NewDecFromInt(validator.GetTokens()).QuoTruncate(sdk.NewDecFromInt(totalBondedTokens)) + //we truncate here again, which means that the reward will be slightly lower than it should be + reward := feesCollected.MulDecTruncate(representativesFraction).MulDecTruncate(powerFraction) + am.keeper.AllocateTokensToValidator(ctx, validator, reward) + remaining = remaining.Sub(reward) + + return false + }) + + // allocate community funding + //due to the 3 truncations above, remaining sent to the community pool will be slightly more than it should be. This is OK + feePool.CommunityPool = feePool.CommunityPool.Add(remaining...) + am.keeper.SetFeePool(ctx, feePool) +} diff --git a/x/ccv/staking/doc.go b/x/ccv/democracy/staking/doc.go similarity index 85% rename from x/ccv/staking/doc.go rename to x/ccv/democracy/staking/doc.go index 71c779783a..a405c6fa2d 100644 --- a/x/ccv/staking/doc.go +++ b/x/ccv/democracy/staking/doc.go @@ -6,7 +6,7 @@ overrides two core methods, `InitGenesis` and `EndBlock`. Specifically, these methods perform no-ops and return no validator set updates, as validator sets are tracked by the consumer chain's x/ccv/consumer module. -The consumer chain should utilize the x/ccv/staking module to perform democratic +The consumer chain should utilize the x/ccv/democracy/staking module to perform democratic actions such as participating and voting within the chain's governance system. */ package staking diff --git a/x/ccv/staking/module.go b/x/ccv/democracy/staking/module.go similarity index 100% rename from x/ccv/staking/module.go rename to x/ccv/democracy/staking/module.go diff --git a/x/ccv/provider/client/proposal_handler.go b/x/ccv/provider/client/proposal_handler.go index f6199eaea3..345001c64a 100644 --- a/x/ccv/provider/client/proposal_handler.go +++ b/x/ccv/provider/client/proposal_handler.go @@ -21,21 +21,21 @@ import ( ) // ProposalHandler is the param change proposal handler. -var ProposalHandler = govclient.NewProposalHandler(NewCreateConsumerChainProposalTxCmd, ProposalRESTHandler) +var ProposalHandler = govclient.NewProposalHandler(SubmitConsumerAdditionPropTxCmd, ProposalRESTHandler) -// NewCreateConsumerChainProposalTxCmd returns a CLI command handler for creating -// a new consumer chain proposal governance transaction. -func NewCreateConsumerChainProposalTxCmd() *cobra.Command { +// SubmitConsumerAdditionPropTxCmd returns a CLI command handler for submitting +// a consumer addition proposal via a transaction. +func SubmitConsumerAdditionPropTxCmd() *cobra.Command { return &cobra.Command{ - Use: "create-consumer-chain [proposal-file]", + Use: "consumer-addition [proposal-file]", Args: cobra.ExactArgs(1), - Short: "Submit a consumer chain creation proposal", + Short: "Submit a consumer addition proposal", Long: ` -Submit a consumer chain creation proposal along with an initial deposit. +Submit a consumer addition proposal along with an initial deposit. The proposal details must be supplied via a JSON file. Example: -$ %s tx gov submit-proposal create-consumer-chain --from= +$ %s tx gov submit-proposal consumer-addition --from= Where proposal.json contains: @@ -59,12 +59,12 @@ Where proposal.json contains: return err } - proposal, err := ParseCreateConsumerChainProposalJSON(args[0]) + proposal, err := ParseConsumerAdditionProposalJSON(args[0]) if err != nil { return err } - content := types.NewCreateConsumerChainProposal( + content := types.NewConsumerAdditionProposal( proposal.Title, proposal.Description, proposal.ChainId, proposal.InitialHeight, proposal.GenesisHash, proposal.BinaryHash, proposal.SpawnTime) @@ -85,7 +85,7 @@ Where proposal.json contains: } } -type CreateConsumerChainProposalJSON struct { +type ConsumerAdditionProposalJSON struct { Title string `json:"title"` Description string `json:"description"` ChainId string `json:"chain_id"` @@ -96,7 +96,7 @@ type CreateConsumerChainProposalJSON struct { Deposit string `json:"deposit"` } -type CreateConsumerChainProposalReq struct { +type ConsumerAdditionProposalReq struct { BaseReq rest.BaseReq `json:"base_req"` Proposer sdk.AccAddress `json:"proposer"` @@ -110,8 +110,8 @@ type CreateConsumerChainProposalReq struct { Deposit sdk.Coins `json:"deposit"` } -func ParseCreateConsumerChainProposalJSON(proposalFile string) (CreateConsumerChainProposalJSON, error) { - proposal := CreateConsumerChainProposalJSON{} +func ParseConsumerAdditionProposalJSON(proposalFile string) (ConsumerAdditionProposalJSON, error) { + proposal := ConsumerAdditionProposalJSON{} contents, err := ioutil.ReadFile(filepath.Clean(proposalFile)) if err != nil { @@ -129,14 +129,14 @@ func ParseCreateConsumerChainProposalJSON(proposalFile string) (CreateConsumerCh // change REST handler with a given sub-route. func ProposalRESTHandler(clientCtx client.Context) govrest.ProposalRESTHandler { return govrest.ProposalRESTHandler{ - SubRoute: "create_consumer_chain", + SubRoute: "propose_consumer_addition", Handler: postProposalHandlerFn(clientCtx), } } func postProposalHandlerFn(clientCtx client.Context) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - var req CreateConsumerChainProposalReq + var req ConsumerAdditionProposalReq if !rest.ReadRESTReq(w, r, clientCtx.LegacyAmino, &req) { return } @@ -146,7 +146,7 @@ func postProposalHandlerFn(clientCtx client.Context) http.HandlerFunc { return } - content := types.NewCreateConsumerChainProposal( + content := types.NewConsumerAdditionProposal( req.Title, req.Description, req.ChainId, req.InitialHeight, req.GenesisHash, req.BinaryHash, req.SpawnTime) diff --git a/x/ccv/provider/ibc_module.go b/x/ccv/provider/ibc_module.go index e573478197..8792236d14 100644 --- a/x/ccv/provider/ibc_module.go +++ b/x/ccv/provider/ibc_module.go @@ -16,6 +16,9 @@ import ( ) // OnChanOpenInit implements the IBCModule interface +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-coinit1 +// Spec Tag: [CCV-PCF-COINIT.1] func (am AppModule) OnChanOpenInit( ctx sdk.Context, order channeltypes.Order, @@ -30,6 +33,10 @@ func (am AppModule) OnChanOpenInit( } // OnChanOpenTry implements the IBCModule interface +// +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-cotry1 +// Spec tag: [CCV-PCF-COTRY.1] func (am AppModule) OnChanOpenTry( ctx sdk.Context, order channeltypes.Order, @@ -110,6 +117,9 @@ func validateCCVChannelParams( } // OnChanOpenAck implements the IBCModule interface +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-coack1 +// Spec tag: [CCV-PCF-COACK.1] func (am AppModule) OnChanOpenAck( ctx sdk.Context, portID, @@ -121,6 +131,9 @@ func (am AppModule) OnChanOpenAck( } // OnChanOpenConfirm implements the IBCModule interface +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-coconfirm1 +// Spec tag: [CCV-PCF-COCONFIRM.1] func (am AppModule) OnChanOpenConfirm( ctx sdk.Context, portID, @@ -153,7 +166,7 @@ func (am AppModule) OnChanCloseConfirm( } // OnRecvPacket implements the IBCModule interface. A successful acknowledgement -// is returned if the packet data is succesfully decoded and the receive application +// is returned if the packet data is successfully decoded and the receive application // logic returns without error. func (am AppModule) OnRecvPacket( ctx sdk.Context, diff --git a/x/ccv/provider/ibc_module_test.go b/x/ccv/provider/ibc_module_test.go new file mode 100644 index 0000000000..c2ebbe636f --- /dev/null +++ b/x/ccv/provider/ibc_module_test.go @@ -0,0 +1,336 @@ +package provider_test + +import ( + "testing" + + sdk "github.com/cosmos/cosmos-sdk/types" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + conntypes "github.com/cosmos/ibc-go/v3/modules/core/03-connection/types" + channeltypes "github.com/cosmos/ibc-go/v3/modules/core/04-channel/types" + host "github.com/cosmos/ibc-go/v3/modules/core/24-host" + ibctmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" + testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + "github.com/cosmos/interchain-security/x/ccv/provider" + providerkeeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" + providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" + ccv "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +// TestOnChanOpenInit tests the provider's OnChanOpenInit method against spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-coinit1 +// Spec Tag: [CCV-PCF-COINIT.1] +func TestOnChanOpenInit(t *testing.T) { + + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx( + t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + providerModule := provider.NewAppModule(&providerKeeper) + + // OnChanOpenInit must error for provider even with correct arguments + err := providerModule.OnChanOpenInit( + ctx, + channeltypes.ORDERED, + []string{"connection-1"}, + ccv.ProviderPortID, + "channel-1", + nil, + channeltypes.NewCounterparty(ccv.ConsumerPortID, "channel-1"), + ccv.Version, + ) + require.Error(t, err, "OnChanOpenInit must error on provider chain") +} + +// TestOnChanOpenTry validates the provider's OnChanOpenTry implementation against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-cotry1 +// Spec tag: [CCV-PCF-COTRY.1] +func TestOnChanOpenTry(t *testing.T) { + + // Params for the ChanOpenTry method + type params struct { + ctx sdk.Context + order channeltypes.Order + connectionHops []string + portID string + channelID string + chanCap *capabilitytypes.Capability + counterparty channeltypes.Counterparty + counterpartyVersion string + } + + testCases := []struct { + name string + mutateParams func(*params, *providerkeeper.Keeper) + expPass bool + }{ + { + "success", func(*params, *providerkeeper.Keeper) {}, true, + }, + { + "invalid order", func(params *params, keeper *providerkeeper.Keeper) { + params.order = channeltypes.UNORDERED + }, false, + }, + { + "invalid port ID", func(params *params, keeper *providerkeeper.Keeper) { + params.portID = "bad port" + }, false, + }, + { + "invalid counter party port ID", func(params *params, keeper *providerkeeper.Keeper) { + params.counterparty.PortId = "bad port" + }, false, + }, + { + "invalid counter party version", func(params *params, keeper *providerkeeper.Keeper) { + params.counterpartyVersion = "invalidVersion" + }, false, + }, + { + "unexpected client ID mapped to chain ID", func(params *params, keeper *providerkeeper.Keeper) { + keeper.SetConsumerClientId( + params.ctx, + "consumerChainID", + "invalidClientID", + ) + }, false, + }, + { + "other CCV channel exists for this consumer chain", + func(params *params, keeper *providerkeeper.Keeper) { + keeper.SetChainToChannel( + params.ctx, + "consumerChainID", + "some existing channel ID", + ) + }, false, + }, + } + + for _, tc := range testCases { + + // Setup + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx( + t, testkeeper.NewInMemKeeperParams(t)) + providerModule := provider.NewAppModule(&providerKeeper) + + providerKeeper.SetPort(ctx, ccv.ProviderPortID) + providerKeeper.SetConsumerClientId(ctx, "consumerChainID", "clientIDToConsumer") + + // Instantiate valid params as default. Individual test cases mutate these as needed. + params := params{ + ctx: ctx, + order: channeltypes.ORDERED, + connectionHops: []string{"connectionIDToConsumer"}, + portID: ccv.ProviderPortID, + channelID: "providerChannelID", + chanCap: &capabilitytypes.Capability{}, + counterparty: channeltypes.NewCounterparty(ccv.ConsumerPortID, "consumerChannelID"), + counterpartyVersion: ccv.Version, + } + + // Expected mock calls + moduleAcct := authtypes.ModuleAccount{BaseAccount: &authtypes.BaseAccount{}} + moduleAcct.BaseAccount.Address = authtypes.NewModuleAddress(authtypes.FeeCollectorName).String() + + // Number of calls is not asserted, since not all code paths are hit for failures + gomock.InOrder( + mocks.MockScopedKeeper.EXPECT().ClaimCapability( + params.ctx, params.chanCap, host.ChannelCapabilityPath(params.portID, params.channelID)).AnyTimes(), + mocks.MockConnectionKeeper.EXPECT().GetConnection(ctx, "connectionIDToConsumer").Return( + conntypes.ConnectionEnd{ClientId: "clientIDToConsumer"}, true, + ).AnyTimes(), + mocks.MockClientKeeper.EXPECT().GetClientState(ctx, "clientIDToConsumer").Return( + &ibctmtypes.ClientState{ChainId: "consumerChainID"}, true, + ).AnyTimes(), + mocks.MockAccountKeeper.EXPECT().GetModuleAccount(ctx, "").Return(&moduleAcct).AnyTimes(), + ) + + tc.mutateParams(¶ms, &providerKeeper) + + metadata, err := providerModule.OnChanOpenTry( + params.ctx, + params.order, + params.connectionHops, + params.portID, + params.channelID, + params.chanCap, + params.counterparty, + params.counterpartyVersion, + ) + + if tc.expPass { + require.NoError(t, err) + md := &providertypes.HandshakeMetadata{} + err = md.Unmarshal([]byte(metadata)) + require.NoError(t, err) + require.Equal(t, moduleAcct.BaseAccount.Address, md.ProviderFeePoolAddr, + "returned dist account metadata must match expected") + require.Equal(t, ccv.Version, md.Version, "returned ccv version metadata must match expected") + ctrl.Finish() + } else { + require.Error(t, err) + } + } +} + +// TestOnChanOpenAck tests the provider's OnChanOpenAck method against spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-coack1 +// Spec tag: [CCV-PCF-COACK.1] +func TestOnChanOpenAck(t *testing.T) { + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx( + t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + providerModule := provider.NewAppModule(&providerKeeper) + + // OnChanOpenAck must error for provider even with correct arguments + err := providerModule.OnChanOpenAck( + ctx, + ccv.ProviderPortID, + "providerChannelID", + "consumerChannelID", + ccv.Version, + ) + require.Error(t, err, "OnChanOpenAck must error on provider chain") +} + +// TestOnChanOpenConfirm tests the provider's OnChanOpenConfirm method against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-coconfirm1 +// Spec tag: [CCV-PCF-COCONFIRM.1] +// +// TODO: Validate spec requirement that duplicate channels attempting to become canonical CCV channel are closed. +// See: https://github.com/cosmos/interchain-security/issues/327 +func TestOnChanOpenConfirm(t *testing.T) { + + testCases := []struct { + name string + mockExpectations func(sdk.Context, testkeeper.MockedKeepers) []*gomock.Call + setDuplicateChannel bool + expPass bool + }{ + { + name: "channel not found", + mockExpectations: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) []*gomock.Call { + return []*gomock.Call{ + mocks.MockChannelKeeper.EXPECT().GetChannel( + ctx, ccv.ProviderPortID, gomock.Any()).Return(channeltypes.Channel{}, + false, // Found is false + ).Times(1), + } + }, + expPass: false, + }, + { + name: "too many connection hops", + mockExpectations: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) []*gomock.Call { + return []*gomock.Call{ + mocks.MockChannelKeeper.EXPECT().GetChannel( + ctx, ccv.ProviderPortID, gomock.Any()).Return(channeltypes.Channel{ + State: channeltypes.OPEN, + ConnectionHops: []string{"connectionID", "another"}, // Two hops is two many + }, false, + ).Times(1), + } + }, + expPass: false, + }, + { + name: "connection not found", + mockExpectations: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) []*gomock.Call { + return []*gomock.Call{ + mocks.MockChannelKeeper.EXPECT().GetChannel( + ctx, ccv.ProviderPortID, gomock.Any()).Return(channeltypes.Channel{ + State: channeltypes.OPEN, + ConnectionHops: []string{"connectionID"}, + }, true, + ).Times(1), + mocks.MockConnectionKeeper.EXPECT().GetConnection(ctx, "connectionID").Return( + conntypes.ConnectionEnd{}, false, // Found is false + ).Times(1), + } + }, + expPass: false, + }, + { + name: "client state not found", + mockExpectations: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) []*gomock.Call { + return []*gomock.Call{ + mocks.MockChannelKeeper.EXPECT().GetChannel(ctx, ccv.ProviderPortID, gomock.Any()).Return( + channeltypes.Channel{ + State: channeltypes.OPEN, + ConnectionHops: []string{"connectionID"}, + }, + true, + ).Times(1), + mocks.MockConnectionKeeper.EXPECT().GetConnection(ctx, "connectionID").Return( + conntypes.ConnectionEnd{ClientId: "clientID"}, true, + ).Times(1), + mocks.MockClientKeeper.EXPECT().GetClientState(ctx, "clientID").Return( + nil, false, // Found is false + ).Times(1), + } + }, + expPass: false, + }, + { + name: "CCV channel already exists, error returned, but dup channel is not closed", + mockExpectations: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) []*gomock.Call { + // Error is returned after all expected mock calls are hit for SetConsumerChain + return testkeeper.GetMocksForSetConsumerChain(ctx, &mocks, "consumerChainID") + }, + setDuplicateChannel: true, // Only case where duplicate channel is setup + expPass: false, + }, + { + name: "success", + mockExpectations: func(ctx sdk.Context, mocks testkeeper.MockedKeepers) []*gomock.Call { + // Full SetConsumerChain method should run without error, hitting all expected mocks + return testkeeper.GetMocksForSetConsumerChain(ctx, &mocks, "consumerChainID") + }, + expPass: true, + }, + } + + for _, tc := range testCases { + + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx( + t, testkeeper.NewInMemKeeperParams(t)) + + gomock.InOrder(tc.mockExpectations(ctx, mocks)...) + + if tc.setDuplicateChannel { + providerKeeper.SetChainToChannel(ctx, "consumerChainID", "existingChannelID") + } + + providerModule := provider.NewAppModule(&providerKeeper) + + err := providerModule.OnChanOpenConfirm(ctx, "providerPortID", "channelID") + + if tc.expPass { + + require.NoError(t, err) + // Validate channel mappings + channelID, found := providerKeeper.GetChainToChannel(ctx, "consumerChainID") + require.True(t, found) + require.Equal(t, "channelID", channelID) + + chainID, found := providerKeeper.GetChannelToChain(ctx, "channelID") + require.True(t, found) + require.Equal(t, "consumerChainID", chainID) + + height, found := providerKeeper.GetInitChainHeight(ctx, "consumerChainID") + require.True(t, found) + require.Equal(t, ctx.BlockHeight(), int64(height)) + + } else { + require.Error(t, err) + } + ctrl.Finish() + } +} diff --git a/x/ccv/provider/keeper/genesis.go b/x/ccv/provider/keeper/genesis.go index adba5471a8..3f91a0f02b 100644 --- a/x/ccv/provider/keeper/genesis.go +++ b/x/ccv/provider/keeper/genesis.go @@ -2,6 +2,7 @@ package keeper import ( "fmt" + "time" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/interchain-security/x/ccv/provider/types" @@ -14,7 +15,7 @@ func (k Keeper) InitGenesis(ctx sdk.Context, genState *types.GenesisState) { // Only try to bind to port if it is not already bound, since we may already own // port capability from capability InitGenesis if !k.IsBound(ctx, ccv.ProviderPortID) { - // transfer module binds to the transfer port on InitChain + // CCV module binds to the provider port on InitChain // and claims the returned capability err := k.BindPort(ctx, ccv.ProviderPortID) if err != nil { @@ -22,39 +23,146 @@ func (k Keeper) InitGenesis(ctx sdk.Context, genState *types.GenesisState) { } } + k.SetValidatorSetUpdateId(ctx, genState.ValsetUpdateId) + for _, v2h := range genState.ValsetUpdateIdToHeight { + k.SetValsetUpdateBlockHeight(ctx, v2h.ValsetUpdateId, v2h.Height) + } + + for _, cccp := range genState.ConsumerAdditionProposals { + // prevent implicit memory aliasing + cccp := cccp + if err := k.SetPendingConsumerAdditionProp(ctx, &cccp); err != nil { + panic(fmt.Errorf("pending create consumer chain proposal could not be persisted: %w", err)) + } + } + for _, sccp := range genState.ConsumerRemovalProposals { + k.SetPendingConsumerRemovalProp(ctx, sccp.ChainId, sccp.StopTime) + } + for _, ubdOp := range genState.UnbondingOps { + if err := k.SetUnbondingOp(ctx, ubdOp); err != nil { + panic(fmt.Errorf("unbonding op could not be persisted: %w", err)) + } + } + + if genState.MatureUnbondingOps != nil { + if err := k.AppendMaturedUnbondingOps(ctx, genState.MatureUnbondingOps.Ids); err != nil { + panic(err) + } + } + // Set initial state for each consumer chain - for _, cc := range genState.ConsumerStates { - k.SetChainToChannel(ctx, cc.ChainId, cc.ChannelId) - k.SetChannelToChain(ctx, cc.ChannelId, cc.ChainId) + for _, cs := range genState.ConsumerStates { + chainID := cs.ChainId + k.SetConsumerClientId(ctx, chainID, cs.ClientId) + if err := k.SetConsumerGenesis(ctx, chainID, cs.ConsumerGenesis); err != nil { + panic(fmt.Errorf("consumer chain genesis could not be persisted: %w", err)) + } + if cs.LockUnbondingOnTimeout { + k.SetLockUnbondingOnTimeout(ctx, chainID) + } + // check if the CCV channel was established + if cs.ChannelId != "" { + k.SetChannelToChain(ctx, cs.ChannelId, chainID) + k.SetChainToChannel(ctx, chainID, cs.ChannelId) + k.SetInitChainHeight(ctx, chainID, cs.InitialHeight) + + k.SetSlashAcks(ctx, cs.ChainId, cs.SlashDowntimeAck) + for _, ubdOpIndex := range cs.UnbondingOpsIndex { + k.SetUnbondingOpIndex(ctx, chainID, ubdOpIndex.ValsetUpdateId, ubdOpIndex.UnbondingOpIndex) + } + } else { + for _, vsc := range cs.PendingValsetChanges { + k.AppendPendingVSC(ctx, chainID, vsc) + } + } } k.SetParams(ctx, genState.Params) } func (k Keeper) ExportGenesis(ctx sdk.Context) *types.GenesisState { - store := ctx.KVStore(k.storeKey) - iterator := sdk.KVStorePrefixIterator(store, []byte{types.ChannelToChainBytePrefix}) - defer iterator.Close() - - if !iterator.Valid() { - return types.DefaultGenesisState() - } - var consumerStates []types.ConsumerState + // export states for each consumer chains + k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID, clientID string) bool { + gen, found := k.GetConsumerGenesis(ctx, chainID) + if !found { + panic(fmt.Errorf("cannot find genesis for consumer chain %s with client %s", chainID, clientID)) + } - for ; iterator.Valid(); iterator.Next() { - // channelID is extracted from bytes in key following the single byte prefix - channelID := string(iterator.Key()[1:]) - chainID := string(iterator.Value()) + // initial consumer chain states + cs := types.ConsumerState{ + ChainId: chainID, + ClientId: clientID, + ConsumerGenesis: gen, + LockUnbondingOnTimeout: k.GetLockUnbondingOnTimeout(ctx, chainID), + } - cc := types.ConsumerState{ - ChainId: chainID, - ChannelId: channelID, + // try to find channel id for the current consumer chain + channelId, found := k.GetChainToChannel(ctx, chainID) + if found { + cs.ChannelId = channelId + cs.InitialHeight, found = k.GetInitChainHeight(ctx, chainID) + if !found { + panic(fmt.Errorf("cannot find genesis for consumer chain %s with client %s", chainID, clientID)) + } + cs.SlashDowntimeAck = k.GetSlashAcks(ctx, chainID) + k.IterateOverUnbondingOpIndex(ctx, chainID, func(vscID uint64, ubdIndex []uint64) bool { + cs.UnbondingOpsIndex = append(cs.UnbondingOpsIndex, + types.UnbondingOpIndex{ValsetUpdateId: vscID, UnbondingOpIndex: ubdIndex}, + ) + return true + }) + } else { + if pendingVSC, found := k.GetPendingVSCs(ctx, chainID); found { + cs.PendingValsetChanges = pendingVSC + } } - consumerStates = append(consumerStates, cc) + + consumerStates = append(consumerStates, cs) + return true + }) + + // export provider chain states + vscID := k.GetValidatorSetUpdateId(ctx) + vscIDToHeights := []types.ValsetUpdateIdToHeight{} + k.IterateValsetUpdateBlockHeight(ctx, func(vscID, height uint64) bool { + vscIDToHeights = append(vscIDToHeights, types.ValsetUpdateIdToHeight{ValsetUpdateId: vscID, Height: height}) + return true + }) + + ubdOps := []ccv.UnbondingOp{} + k.IterateOverUnbondingOps(ctx, func(id uint64, ubdOp ccv.UnbondingOp) bool { + ubdOps = append(ubdOps, ubdOp) + return true + }) + + matureUbdOps, err := k.GetMaturedUnbondingOps(ctx) + if err != nil { + panic(err) } + addProps := []types.ConsumerAdditionProposal{} + k.IteratePendingConsumerAdditionProps(ctx, func(_ time.Time, prop types.ConsumerAdditionProposal) bool { + addProps = append(addProps, prop) + return true + }) + + remProps := []types.ConsumerRemovalProposal{} + k.IteratePendingConsumerRemovalProps(ctx, func(_ time.Time, prop types.ConsumerRemovalProposal) bool { + remProps = append(remProps, prop) + return true + }) + params := k.GetParams(ctx) - return types.NewGenesisState(consumerStates, params) + return types.NewGenesisState( + vscID, + vscIDToHeights, + consumerStates, + ubdOps, + &ccv.MaturedUnbondingOps{Ids: matureUbdOps}, + addProps, + remProps, + params, + ) } diff --git a/x/ccv/provider/keeper/genesis_test.go b/x/ccv/provider/keeper/genesis_test.go new file mode 100644 index 0000000000..2ef06517d3 --- /dev/null +++ b/x/ccv/provider/keeper/genesis_test.go @@ -0,0 +1,153 @@ +package keeper_test + +import ( + "testing" + "time" + + sdk "github.com/cosmos/cosmos-sdk/types" + host "github.com/cosmos/ibc-go/v3/modules/core/24-host" + testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + + consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + "github.com/cosmos/interchain-security/x/ccv/provider/keeper" + "github.com/cosmos/interchain-security/x/ccv/provider/types" + providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" + ccv "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" +) + +func TestIniAndExportGenesis(t *testing.T) { + // create a provider chain genesis populated with two consumer chains + cChainIDs := []string{"c0", "c1"} + expClientID := "client" + oneHourFromNow := time.Now().UTC().Add(time.Hour) + initHeight, vscID := uint64(5), uint64(1) + ubdIndex := []uint64{0, 1, 2} + params := providertypes.DefaultParams() + + // create genesis struct + pGenesis := providertypes.NewGenesisState(vscID, + []providertypes.ValsetUpdateIdToHeight{{ValsetUpdateId: vscID, Height: initHeight}}, + []providertypes.ConsumerState{ + providertypes.NewConsumerStates( + cChainIDs[0], + expClientID, + "channel", + initHeight, + true, + *consumertypes.DefaultGenesisState(), + []providertypes.UnbondingOpIndex{ + {ValsetUpdateId: vscID, UnbondingOpIndex: ubdIndex}, + }, + nil, + []string{"slashedValidatorConsAddress"}, + ), + providertypes.NewConsumerStates( + cChainIDs[1], + expClientID, + "", + 0, + false, + *consumertypes.DefaultGenesisState(), + nil, + []ccv.ValidatorSetChangePacketData{{ValsetUpdateId: vscID}}, + nil, + ), + }, + []ccv.UnbondingOp{{ + Id: vscID, + UnbondingConsumerChains: []string{cChainIDs[0]}, + }}, + &ccv.MaturedUnbondingOps{Ids: ubdIndex}, + []providertypes.ConsumerAdditionProposal{types.ConsumerAdditionProposal{ + ChainId: cChainIDs[0], + SpawnTime: oneHourFromNow, + }}, + []providertypes.ConsumerRemovalProposal{types.ConsumerRemovalProposal{ + ChainId: cChainIDs[0], + StopTime: oneHourFromNow, + }}, + params, + ) + + // Instantiate in-mem provider keeper with mocks + pk, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + + gomock.InOrder( + mocks.MockScopedKeeper.EXPECT().GetCapability( + ctx, host.PortPath(ccv.ProviderPortID), + ).Return(nil, true).Times(1), + ) + + // init provider chain + pk.InitGenesis(ctx, pGenesis) + + // check local provider chain states + ubdOps, found := pk.GetUnbondingOp(ctx, vscID) + require.True(t, found) + require.Equal(t, pGenesis.UnbondingOps[0], ubdOps) + matureUbdOps, err := pk.GetMaturedUnbondingOps(ctx) + require.NoError(t, err) + require.Equal(t, ubdIndex, matureUbdOps) + chainID, found := pk.GetChannelToChain(ctx, pGenesis.ConsumerStates[0].ChannelId) + require.True(t, found) + require.Equal(t, cChainIDs[0], chainID) + require.Equal(t, vscID, pk.GetValidatorSetUpdateId(ctx)) + height, found := pk.GetValsetUpdateBlockHeight(ctx, vscID) + require.True(t, found) + require.Equal(t, initHeight, height) + addProp, found := pk.GetPendingConsumerAdditionProp(ctx, oneHourFromNow, cChainIDs[0]) + require.True(t, found) + require.Equal(t, pGenesis.ConsumerAdditionProposals[0], addProp) + require.True(t, pk.GetPendingConsumerRemovalProp(ctx, cChainIDs[0], oneHourFromNow)) + require.Equal(t, pGenesis.Params, pk.GetParams(ctx)) + + // check provider chain's consumer chain states + assertConsumerChainStates(ctx, t, pk, pGenesis.ConsumerStates...) + + // check the exported genesis + require.Equal(t, pGenesis, pk.ExportGenesis(ctx)) + +} + +func assertConsumerChainStates(ctx sdk.Context, t *testing.T, pk keeper.Keeper, consumerStates ...providertypes.ConsumerState) { + for _, cs := range consumerStates { + chainID := cs.ChainId + gen, found := pk.GetConsumerGenesis(ctx, chainID) + require.True(t, found) + require.Equal(t, *consumertypes.DefaultGenesisState(), gen) + + clientID, found := pk.GetConsumerClientId(ctx, chainID) + require.True(t, found) + require.Equal(t, cs.ClientId, clientID) + + if expChan := cs.ChannelId; expChan != "" { + gotChan, found := pk.GetChainToChannel(ctx, chainID) + require.True(t, found) + require.Equal(t, expChan, gotChan) + } + + if cs.InitialHeight != 0 { + _, found = pk.GetInitChainHeight(ctx, chainID) + require.True(t, found) + } + + require.Equal(t, cs.LockUnbondingOnTimeout, pk.GetLockUnbondingOnTimeout(ctx, chainID)) + + if expVSC := cs.GetPendingValsetChanges(); expVSC != nil { + gotVSC, found := pk.GetPendingVSCs(ctx, chainID) + require.True(t, found) + require.Equal(t, expVSC, gotVSC) + } + + for _, ubdOpIdx := range cs.UnbondingOpsIndex { + ubdIndex, found := pk.GetUnbondingOpIndex(ctx, chainID, ubdOpIdx.ValsetUpdateId) + require.True(t, found) + require.Equal(t, ubdOpIdx.UnbondingOpIndex, ubdIndex) + } + + require.Equal(t, cs.SlashDowntimeAck, pk.GetSlashAcks(ctx, chainID)) + } +} diff --git a/x/ccv/provider/keeper/keeper.go b/x/ccv/provider/keeper/keeper.go index 52980b321e..18d40b7fea 100644 --- a/x/ccv/provider/keeper/keeper.go +++ b/x/ccv/provider/keeper/keeper.go @@ -77,26 +77,26 @@ func (k Keeper) Logger(ctx sdk.Context) log.Logger { return ctx.Logger().With("module", "x/"+host.ModuleName+"-"+types.ModuleName) } -// IsBound checks if the transfer module is already bound to the desired port +// IsBound checks if the CCV module is already bound to the desired port func (k Keeper) IsBound(ctx sdk.Context, portID string) bool { _, ok := k.scopedKeeper.GetCapability(ctx, host.PortPath(portID)) return ok } -// BindPort defines a wrapper function for the ort Keeper's function in +// BindPort defines a wrapper function for the port Keeper's function in // order to expose it to module's InitGenesis function func (k Keeper) BindPort(ctx sdk.Context, portID string) error { cap := k.portKeeper.BindPort(ctx, portID) return k.ClaimCapability(ctx, cap, host.PortPath(portID)) } -// GetPort returns the portID for the transfer module. Used in ExportGenesis +// GetPort returns the portID for the CCV module. Used in ExportGenesis func (k Keeper) GetPort(ctx sdk.Context) string { store := ctx.KVStore(k.storeKey) return string(store.Get(types.PortKey())) } -// SetPort sets the portID for the transfer module. Used in InitGenesis +// SetPort sets the portID for the CCV module. Used in InitGenesis func (k Keeper) SetPort(ctx sdk.Context, portID string) { store := ctx.KVStore(k.storeKey) store.Set(types.PortKey(), []byte(portID)) @@ -135,10 +135,10 @@ func (k Keeper) DeleteChainToChannel(ctx sdk.Context, chainID string) { store.Delete(types.ChainToChannelKey(chainID)) } -// IterateConsumerChains iterates over all of the consumer chains that the provider module controls. -// It calls the provided callback function which takes in a chainID and returns +// IterateConsumerChains iterates over all of the consumer chains that the provider module controls +// It calls the provided callback function which takes in a chainID and client ID to return // a stop boolean which will stop the iteration. -func (k Keeper) IterateConsumerChains(ctx sdk.Context, cb func(ctx sdk.Context, chainID string) (stop bool)) { +func (k Keeper) IterateConsumerChains(ctx sdk.Context, cb func(ctx sdk.Context, chainID, clientID string) (stop bool)) { store := ctx.KVStore(k.storeKey) iterator := sdk.KVStorePrefixIterator(store, []byte{types.ChainToClientBytePrefix}) defer iterator.Close() @@ -150,9 +150,9 @@ func (k Keeper) IterateConsumerChains(ctx sdk.Context, cb func(ctx sdk.Context, for ; iterator.Valid(); iterator.Next() { // remove 1 byte prefix from key to retrieve chainID chainID := string(iterator.Key()[1:]) + clientID := string(iterator.Value()) - stop := cb(ctx, chainID) - if stop { + if !cb(ctx, chainID, clientID) { return } } @@ -228,6 +228,11 @@ func (k Keeper) GetConsumerGenesis(ctx sdk.Context, chainID string) (consumertyp return data, true } +func (k Keeper) DeleteConsumerGenesis(ctx sdk.Context, chainID string) { + store := ctx.KVStore(k.storeKey) + store.Delete(types.ConsumerGenesisKey(chainID)) +} + // VerifyConsumerChain verifies that the chain trying to connect on the channel handshake // is the expected consumer chain. func (k Keeper) VerifyConsumerChain(ctx sdk.Context, channelID string, connectionHops []string) error { @@ -266,14 +271,14 @@ func (k Keeper) SetConsumerChain(ctx sdk.Context, channelID string) error { return sdkerrors.Wrap(channeltypes.ErrTooManyConnectionHops, "must have direct connection to consumer chain") } connectionID := channel.ConnectionHops[0] - chainID, tmClient, err := k.getUnderlyingClient(ctx, connectionID) + _, tmClient, err := k.getUnderlyingClient(ctx, connectionID) if err != nil { return err } // Verify that there isn't already a CCV channel for the consumer chain - // If there is, then close the channel. - if prevChannel, ok := k.GetChannelToChain(ctx, chainID); ok { - return sdkerrors.Wrapf(ccv.ErrDuplicateChannel, "CCV channel with ID: %s already created for consumer chain %s", prevChannel, chainID) + chainID := tmClient.ChainId + if prevChannelID, ok := k.GetChainToChannel(ctx, chainID); ok { + return sdkerrors.Wrapf(ccv.ErrDuplicateChannel, "CCV channel with ID: %s already created for consumer chain %s", prevChannelID, chainID) } // the CCV channel is established: @@ -312,13 +317,35 @@ func (k Keeper) DeleteUnbondingOp(ctx sdk.Context, id uint64) { store.Delete(types.UnbondingOpKey(id)) } +func (k Keeper) IterateOverUnbondingOps(ctx sdk.Context, cb func(id uint64, ubdOp ccv.UnbondingOp) bool) { + store := ctx.KVStore(k.storeKey) + iterator := sdk.KVStorePrefixIterator(store, []byte{types.UnbondingOpBytePrefix}) + + defer iterator.Close() + for ; iterator.Valid(); iterator.Next() { + id := binary.BigEndian.Uint64(iterator.Key()[1:]) + bz := iterator.Value() + if bz == nil { + panic(fmt.Errorf("unbonding operation is nil for id %d", id)) + } + ubdOp := types.MustUnmarshalUnbondingOp(k.cdc, bz) + + if !cb(id, ubdOp) { + break + } + } +} + // This index allows retreiving UnbondingDelegationEntries by chainID and valsetUpdateID func (k Keeper) SetUnbondingOpIndex(ctx sdk.Context, chainID string, valsetUpdateID uint64, IDs []uint64) { store := ctx.KVStore(k.storeKey) - bz, err := json.Marshal(IDs) + index := ccv.UnbondingOpsIndex{ + Ids: IDs, + } + bz, err := index.Marshal() if err != nil { - panic("Failed to JSON marshal") + panic("Failed to marshal UnbondingOpsIndex") } store.Set(types.UnbondingOpIndexKey(chainID, valsetUpdateID), bz) @@ -340,13 +367,12 @@ func (k Keeper) IterateOverUnbondingOpIndex(ctx sdk.Context, chainID string, cb } vscID = binary.BigEndian.Uint64(vscBytes) - var ids []uint64 - err = json.Unmarshal(iterator.Value(), &ids) - if err != nil { + var index ccv.UnbondingOpsIndex + if err = index.Unmarshal(iterator.Value()); err != nil { panic("Failed to unmarshal JSON") } - if !cb(vscID, ids) { + if !cb(vscID, index.GetIds()) { return } } @@ -361,13 +387,12 @@ func (k Keeper) GetUnbondingOpIndex(ctx sdk.Context, chainID string, valsetUpdat return []uint64{}, false } - var ids []uint64 - err := json.Unmarshal(bz, &ids) - if err != nil { - panic("Failed to JSON unmarshal") + var idx ccv.UnbondingOpsIndex + if err := idx.Unmarshal(bz); err != nil { + panic("Failed to unmarshal UnbondingOpsIndex") } - return ids, true + return idx.GetIds(), true } // This index allows retreiving UnbondingDelegationEntries by chainID and valsetUpdateID @@ -401,11 +426,12 @@ func (k Keeper) GetMaturedUnbondingOps(ctx sdk.Context) (ids []uint64, err error if bz == nil { return nil, nil } - err = json.Unmarshal(bz, &ids) - if err != nil { + + var ops ccv.MaturedUnbondingOps + if err := ops.Unmarshal(bz); err != nil { return nil, err } - return ids, nil + return ops.GetIds(), nil } // AppendMaturedUnbondingOps adds a list of ids to the list of matured unbonding operation ids @@ -417,11 +443,13 @@ func (k Keeper) AppendMaturedUnbondingOps(ctx sdk.Context, ids []uint64) error { if err != nil { return err } - // append works also on a nil list - existingIds = append(existingIds, ids...) + + maturedOps := ccv.MaturedUnbondingOps{ + Ids: append(existingIds, ids...), + } store := ctx.KVStore(k.storeKey) - bz, err := json.Marshal(existingIds) + bz, err := maturedOps.Marshal() if err != nil { return err } @@ -440,21 +468,27 @@ func (k Keeper) EmptyMaturedUnbondingOps(ctx sdk.Context) ([]uint64, error) { return ids, nil } -func (k Keeper) getUnderlyingClient(ctx sdk.Context, connectionID string) (string, *ibctmtypes.ClientState, error) { - // Retrieve the underlying client state. +// Retrieves the underlying client state corresponding to a connection ID. +func (k Keeper) getUnderlyingClient(ctx sdk.Context, connectionID string) ( + clientID string, tmClient *ibctmtypes.ClientState, err error) { + conn, ok := k.connectionKeeper.GetConnection(ctx, connectionID) if !ok { - return "", nil, sdkerrors.Wrapf(conntypes.ErrConnectionNotFound, "connection not found for connection ID: %s", connectionID) + return "", nil, sdkerrors.Wrapf(conntypes.ErrConnectionNotFound, + "connection not found for connection ID: %s", connectionID) } - client, ok := k.clientKeeper.GetClientState(ctx, conn.ClientId) + clientID = conn.ClientId + clientState, ok := k.clientKeeper.GetClientState(ctx, clientID) if !ok { - return "", nil, sdkerrors.Wrapf(clienttypes.ErrClientNotFound, "client not found for client ID: %s", conn.ClientId) + return "", nil, sdkerrors.Wrapf(clienttypes.ErrClientNotFound, + "client not found for client ID: %s", conn.ClientId) } - tmClient, ok := client.(*ibctmtypes.ClientState) + tmClient, ok = clientState.(*ibctmtypes.ClientState) if !ok { - return "", nil, sdkerrors.Wrapf(clienttypes.ErrInvalidClientType, "invalid client type. expected %s, got %s", ibcexported.Tendermint, client.ClientType()) + return "", nil, sdkerrors.Wrapf(clienttypes.ErrInvalidClientType, + "invalid client type. expected %s, got %s", ibcexported.Tendermint, clientState.ClientType()) } - return conn.ClientId, tmClient, nil + return clientID, tmClient, nil } // chanCloseInit defines a wrapper function for the channel Keeper's function @@ -521,7 +555,7 @@ func (k *Keeper) Hooks() StakingHooks { func (h StakingHooks) AfterUnbondingInitiated(ctx sdk.Context, ID uint64) { var consumerChainIDS []string - h.k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID string) (stop bool) { + h.k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID, clientID string) (stop bool) { consumerChainIDS = append(consumerChainIDS, chainID) return false }) @@ -571,6 +605,23 @@ func (k Keeper) GetValsetUpdateBlockHeight(ctx sdk.Context, valsetUpdateId uint6 return binary.BigEndian.Uint64(bz), true } +// IterateSlashAcks iterates through the slash acks set in the store +func (k Keeper) IterateValsetUpdateBlockHeight(ctx sdk.Context, cb func(valsetUpdateId, height uint64) bool) { + store := ctx.KVStore(k.storeKey) + iterator := sdk.KVStorePrefixIterator(store, []byte{types.ValsetUpdateBlockHeightBytePrefix}) + + defer iterator.Close() + for ; iterator.Valid(); iterator.Next() { + + valsetUpdateId := binary.BigEndian.Uint64(iterator.Key()[1:]) + height := binary.BigEndian.Uint64(iterator.Value()) + + if !cb(valsetUpdateId, height) { + return + } + } +} + // DeleteValsetUpdateBlockHeight deletes the block height value for a given vaset update id func (k Keeper) DeleteValsetUpdateBlockHeight(ctx sdk.Context, valsetUpdateId uint64) { store := ctx.KVStore(k.storeKey) @@ -580,12 +631,15 @@ func (k Keeper) DeleteValsetUpdateBlockHeight(ctx sdk.Context, valsetUpdateId ui // SetSlashAcks sets the slash acks under the given chain ID func (k Keeper) SetSlashAcks(ctx sdk.Context, chainID string, acks []string) { store := ctx.KVStore(k.storeKey) - buf := &bytes.Buffer{} - err := json.NewEncoder(buf).Encode(acks) + + sa := types.SlashAcks{ + Addresses: acks, + } + bz, err := sa.Marshal() if err != nil { - panic("failed to encode json") + panic("failed to marshal SlashAcks") } - store.Set(types.SlashAcksKey(chainID), buf.Bytes()) + store.Set(types.SlashAcksKey(chainID), bz) } // GetSlashAcks returns the slash acks stored under the given chain ID @@ -595,15 +649,12 @@ func (k Keeper) GetSlashAcks(ctx sdk.Context, chainID string) []string { if bz == nil { return nil } - var acks []string - buf := bytes.NewBuffer(bz) - - err := json.NewDecoder(buf).Decode(&acks) - if err != nil { + var acks types.SlashAcks + if err := acks.Unmarshal(bz); err != nil { panic(fmt.Errorf("failed to decode json: %w", err)) } - return acks + return acks.GetAddresses() } // EmptySlashAcks empties and returns the slash acks for a given chain ID @@ -627,15 +678,13 @@ func (k Keeper) IterateSlashAcks(ctx sdk.Context, cb func(chainID string, acks [ chainID := string(iterator.Key()[1:]) - var data []string - buf := bytes.NewBuffer(iterator.Value()) - - err := json.NewDecoder(buf).Decode(&data) + var sa types.SlashAcks + err := sa.Unmarshal(iterator.Value()) if err != nil { - panic(fmt.Errorf("failed to decode json: %w", err)) + panic(fmt.Errorf("failed to unmarshal SlashAcks: %w", err)) } - if !cb(chainID, data) { + if !cb(chainID, sa.GetAddresses()) { return } } diff --git a/x/ccv/provider/keeper/keeper_test.go b/x/ccv/provider/keeper/keeper_test.go index 6bd62e2b8d..c201f53e33 100644 --- a/x/ccv/provider/keeper/keeper_test.go +++ b/x/ccv/provider/keeper/keeper_test.go @@ -3,7 +3,6 @@ package keeper_test import ( "testing" - capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" evidencetypes "github.com/cosmos/cosmos-sdk/x/evidence/types" "github.com/golang/mock/gomock" @@ -24,7 +23,8 @@ import ( // TestValsetUpdateBlockHeight tests the getter, setter, and deletion methods for valset updates mapped to block height func TestValsetUpdateBlockHeight(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() blockHeight, found := providerKeeper.GetValsetUpdateBlockHeight(ctx, uint64(0)) require.False(t, found) @@ -49,7 +49,8 @@ func TestValsetUpdateBlockHeight(t *testing.T) { // TestSlashAcks tests the getter, setter, iteration, and deletion methods for stored slash acknowledgements func TestSlashAcks(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() var chainsAcks [][]string @@ -91,7 +92,8 @@ func TestSlashAcks(t *testing.T) { // TestAppendSlashAck tests the append method for stored slash acknowledgements func TestAppendSlashAck(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() p := []string{"alice", "bob", "charlie"} chains := []string{"c1", "c2"} @@ -110,7 +112,8 @@ func TestAppendSlashAck(t *testing.T) { // TestPendingVSCs tests the getter, appending, and deletion methods for stored pending VSCs func TestPendingVSCs(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() chainID := "consumer" @@ -164,7 +167,8 @@ func TestPendingVSCs(t *testing.T) { // TestInitHeight tests the getter and setter methods for the stored block heights (on provider) when a given consumer chain was started func TestInitHeight(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() tc := []struct { chainID string @@ -186,13 +190,12 @@ func TestInitHeight(t *testing.T) { // TestHandleSlashPacketDoubleSigning tests the handling of a double-signing related slash packet, with mocks and unit tests func TestHandleSlashPacketDoubleSigning(t *testing.T) { - ctrl := gomock.NewController(t) - defer ctrl.Finish() chainId := "consumer" infractionHeight := int64(5) - cdc, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) + keeperParams := testkeeper.NewInMemKeeperParams(t) + ctx := keeperParams.Ctx slashPacket := ccv.NewSlashPacketData( abci.Validator{Address: ed25519.GenPrivKey().PubKey().Address(), @@ -201,8 +204,11 @@ func TestHandleSlashPacketDoubleSigning(t *testing.T) { stakingtypes.DoubleSign, ) - mockStakingKeeper := testkeeper.NewMockStakingKeeper(ctrl) - mockSlashingKeeper := testkeeper.NewMockSlashingKeeper(ctrl) + ctrl := gomock.NewController(t) + defer ctrl.Finish() + mocks := testkeeper.NewMockedKeepers(ctrl) + mockSlashingKeeper := mocks.MockSlashingKeeper + mockStakingKeeper := mocks.MockStakingKeeper // Setup expected mock calls gomock.InOrder( @@ -235,19 +241,7 @@ func TestHandleSlashPacketDoubleSigning(t *testing.T) { evidencetypes.DoubleSignJailEndTime).Times(1), ) - providerKeeper := testkeeper.GetProviderKeeperWithMocks( - cdc, - storeKey, - paramsSubspace, - capabilitykeeper.ScopedKeeper{}, - testkeeper.NewMockChannelKeeper(ctrl), - testkeeper.NewMockPortKeeper(ctrl), - testkeeper.NewMockConnectionKeeper(ctrl), - testkeeper.NewMockClientKeeper(ctrl), - mockStakingKeeper, - mockSlashingKeeper, - testkeeper.NewMockAccountKeeper(ctrl), - ) + providerKeeper := testkeeper.NewInMemProviderKeeper(keeperParams, mocks) providerKeeper.SetInitChainHeight(ctx, chainId, uint64(infractionHeight)) @@ -257,7 +251,10 @@ func TestHandleSlashPacketDoubleSigning(t *testing.T) { } func TestIterateOverUnbondingOpIndex(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + chainID := "6" // mock an unbonding index @@ -280,7 +277,9 @@ func TestIterateOverUnbondingOpIndex(t *testing.T) { } func TestMaturedUnbondingOps(t *testing.T) { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() ids, err := providerKeeper.GetMaturedUnbondingOps(ctx) require.NoError(t, err) diff --git a/x/ccv/provider/keeper/params_test.go b/x/ccv/provider/keeper/params_test.go index b67c856398..6a8298ad6f 100644 --- a/x/ccv/provider/keeper/params_test.go +++ b/x/ccv/provider/keeper/params_test.go @@ -4,8 +4,6 @@ import ( "testing" "time" - capabilitykeeper "github.com/cosmos/cosmos-sdk/x/capability/keeper" - paramstypes "github.com/cosmos/cosmos-sdk/x/params/types" clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" commitmenttypes "github.com/cosmos/ibc-go/v3/modules/core/23-commitment/types" ibctmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" @@ -14,33 +12,15 @@ import ( "github.com/stretchr/testify/require" ) +// TestParams tests the default params of the keeper, and getting/setting new params. func TestParams(t *testing.T) { defaultParams := types.DefaultParams() - // Constuct our own params subspace - cdc, storeKey, paramsSubspace, ctx := testkeeper.SetupInMemKeeper(t) - keyTable := paramstypes.NewKeyTable(paramstypes.NewParamSetPair(types.KeyTemplateClient, &ibctmtypes.ClientState{}, func(value interface{}) error { return nil })) - paramsSubspace = paramsSubspace.WithKeyTable(keyTable) - - expectedClientState := - ibctmtypes.NewClientState("", ibctmtypes.DefaultTrustLevel, 0, 0, - time.Second*10, clienttypes.Height{}, commitmenttypes.GetSDKSpecs(), []string{"upgrade", "upgradedIBCState"}, true, true) - - paramsSubspace.Set(ctx, types.KeyTemplateClient, expectedClientState) - - providerKeeper := testkeeper.GetProviderKeeperWithMocks( - cdc, - storeKey, - paramsSubspace, - capabilitykeeper.ScopedKeeper{}, - &testkeeper.MockChannelKeeper{}, - &testkeeper.MockPortKeeper{}, - &testkeeper.MockConnectionKeeper{}, - &testkeeper.MockClientKeeper{}, - &testkeeper.MockStakingKeeper{}, - &testkeeper.MockSlashingKeeper{}, - &testkeeper.MockAccountKeeper{}, - ) + // Construct an in-mem keeper with a populated template client state + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() params := providerKeeper.GetParams(ctx) require.Equal(t, defaultParams, params) diff --git a/x/ccv/provider/keeper/proposal.go b/x/ccv/provider/keeper/proposal.go index 2597770af9..a37c823483 100644 --- a/x/ccv/provider/keeper/proposal.go +++ b/x/ccv/provider/keeper/proposal.go @@ -20,37 +20,100 @@ import ( consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" ) -// CreateConsumerChainProposal will receive the consumer chain's client state from the proposal. +// HandleConsumerAdditionProposal will receive the consumer chain's client state from the proposal. // If the spawn time has already passed, then set the consumer chain. Otherwise store the client // as a pending client, and set once spawn time has passed. -func (k Keeper) CreateConsumerChainProposal(ctx sdk.Context, p *types.CreateConsumerChainProposal) error { +// +// Note: This method implements SpawnConsumerChainProposalHandler in spec. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-spccprop1 +// Spec tag: [CCV-PCF-SPCCPROP.1] +func (k Keeper) HandleConsumerAdditionProposal(ctx sdk.Context, p *types.ConsumerAdditionProposal) error { if !ctx.BlockTime().Before(p.SpawnTime) { // lockUbdOnTimeout is set to be false, regardless of what the proposal says, until we can specify and test issues around this use case more thoroughly return k.CreateConsumerClient(ctx, p.ChainId, p.InitialHeight, false) } - err := k.SetPendingCreateProposal(ctx, p) + err := k.SetPendingConsumerAdditionProp(ctx, p) + if err != nil { + return err + } + + return nil +} + +// CreateConsumerClient will create the CCV client for the given consumer chain. The CCV channel must be built +// on top of the CCV client to ensure connection with the right consumer chain. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-crclient1 +// Spec tag: [CCV-PCF-CRCLIENT.1] +func (k Keeper) CreateConsumerClient(ctx sdk.Context, chainID string, initialHeight clienttypes.Height, lockUbdOnTimeout bool) error { + // check that a client for this chain does not exist + if _, found := k.GetConsumerClientId(ctx, chainID); found { + // drop the proposal + return nil + } + + // Use the unbonding period on the provider to compute the unbonding period on the consumer + unbondingPeriod := utils.ComputeConsumerUnbondingPeriod(k.stakingKeeper.UnbondingTime(ctx)) + + // Create client state by getting template client from parameters and filling in zeroed fields from proposal. + clientState := k.GetTemplateClient(ctx) + clientState.ChainId = chainID + clientState.LatestHeight = initialHeight + clientState.TrustingPeriod = unbondingPeriod / utils.TrustingPeriodFraction + clientState.UnbondingPeriod = unbondingPeriod + + // TODO: Allow for current validators to set different keys + consensusState := ibctmtypes.NewConsensusState( + ctx.BlockTime(), + commitmenttypes.NewMerkleRoot([]byte(ibctmtypes.SentinelRoot)), + ctx.BlockHeader().NextValidatorsHash, + ) + + clientID, err := k.clientKeeper.CreateClient(ctx, clientState, consensusState) + if err != nil { + return err + } + k.SetConsumerClientId(ctx, chainID, clientID) + + consumerGen, err := k.MakeConsumerGenesis(ctx) + if err != nil { + return err + } + err = k.SetConsumerGenesis(ctx, chainID, consumerGen) if err != nil { return err } + // store LockUnbondingOnTimeout flag + if lockUbdOnTimeout { + k.SetLockUnbondingOnTimeout(ctx, chainID) + } return nil } -// StopConsumerChainProposal stops a consumer chain and released the outstanding unbonding operations. +// HandleConsumerRemovalProposal stops a consumer chain and released the outstanding unbonding operations. // If the stop time hasn't already passed, it stores the proposal as a pending proposal. -func (k Keeper) StopConsumerChainProposal(ctx sdk.Context, p *types.StopConsumerChainProposal) error { +// +// This method implements StopConsumerChainProposalHandler from spec. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-stccprop1 +// Spec tag: [CCV-PCF-STCCPROP.1] +func (k Keeper) HandleConsumerRemovalProposal(ctx sdk.Context, p *types.ConsumerRemovalProposal) error { if !ctx.BlockTime().Before(p.StopTime) { return k.StopConsumerChain(ctx, p.ChainId, false, true) } - k.SetPendingStopProposal(ctx, p.ChainId, p.StopTime) + k.SetPendingConsumerRemovalProp(ctx, p.ChainId, p.StopTime) return nil } // StopConsumerChain cleans up the states for the given consumer chain ID and, if the given lockUbd is false, // it completes the outstanding unbonding operations lock by the consumer chain. +// +// This method implements StopConsumerChain from spec. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-stcc1 +// Spec tag: [CCV-PCF-STCC.1] func (k Keeper) StopConsumerChain(ctx sdk.Context, chainID string, lockUbd, closeChan bool) (err error) { // check that a client for chainID exists if _, found := k.GetConsumerClientId(ctx, chainID); !found { @@ -60,6 +123,7 @@ func (k Keeper) StopConsumerChain(ctx sdk.Context, chainID string, lockUbd, clos // clean up states k.DeleteConsumerClientId(ctx, chainID) + k.DeleteConsumerGenesis(ctx, chainID) k.DeleteLockUnbondingOnTimeout(ctx, chainID) // close channel and delete the mappings between chain ID and channel ID @@ -71,11 +135,12 @@ func (k Keeper) StopConsumerChain(ctx sdk.Context, chainID string, lockUbd, clos k.DeleteChannelToChain(ctx, channelID) } - // TODO remove pending VSC packets once https://github.com/cosmos/interchain-security/issues/27 is fixed k.DeleteInitChainHeight(ctx, chainID) k.EmptySlashAcks(ctx, chainID) + k.EmptyPendingVSC(ctx, chainID) // release unbonding operations if they aren't locked + var vscIDs []uint64 if !lockUbd { // iterate over the consumer chain's unbonding operation VSC ids k.IterateOverUnbondingOpIndex(ctx, chainID, func(vscID uint64, ids []uint64) bool { @@ -105,63 +170,25 @@ func (k Keeper) StopConsumerChain(ctx sdk.Context, chainID string, lockUbd, clos if err := k.AppendMaturedUnbondingOps(ctx, maturedIds); err != nil { panic(fmt.Errorf("mature unbonding ops could not be appended: %w", err)) } - // clean up index - k.DeleteUnbondingOpIndex(ctx, chainID, vscID) + + vscIDs = append(vscIDs, vscID) return true }) } - if err != nil { - return err - } - - return nil -} - -// CreateConsumerClient will create the CCV client for the given consumer chain. The CCV channel must be built -// on top of the CCV client to ensure connection with the right consumer chain. -func (k Keeper) CreateConsumerClient(ctx sdk.Context, chainID string, initialHeight clienttypes.Height, lockUbdOnTimeout bool) error { - // check that a client for this chain does not exist - if _, found := k.GetConsumerClientId(ctx, chainID); found { - // drop the proposal - return nil - } - // Use the unbonding period on the provider to - // compute the unbonding period on the consumer - unbondingTime := utils.ComputeConsumerUnbondingPeriod(k.stakingKeeper.UnbondingTime(ctx)) - - // create clientstate by getting template client from parameters and filling in zeroed fields from proposal. - clientState := k.GetTemplateClient(ctx) - clientState.ChainId = chainID - clientState.LatestHeight = initialHeight - clientState.TrustingPeriod = unbondingTime / utils.TrustingPeriodFraction - clientState.UnbondingPeriod = unbondingTime - - // TODO: Allow for current validators to set different keys - consensusState := ibctmtypes.NewConsensusState(ctx.BlockTime(), commitmenttypes.NewMerkleRoot([]byte(ibctmtypes.SentinelRoot)), ctx.BlockHeader().NextValidatorsHash) - clientID, err := k.clientKeeper.CreateClient(ctx, clientState, consensusState) if err != nil { return err } - k.SetConsumerClientId(ctx, chainID, clientID) - consumerGen, err := k.MakeConsumerGenesis(ctx) - if err != nil { - return err + // clean up indexes + for _, id := range vscIDs { + k.DeleteUnbondingOpIndex(ctx, chainID, id) } - err = k.SetConsumerGenesis(ctx, chainID, consumerGen) - if err != nil { - return err - } - - // store LockUnbondingOnTimeout flag - if lockUbdOnTimeout { - k.SetLockUnbondingOnTimeout(ctx, chainID) - } return nil } +// MakeConsumerGenesis constructs a consumer genesis state. func (k Keeper) MakeConsumerGenesis(ctx sdk.Context) (gen consumertypes.GenesisState, err error) { unbondingTime := k.stakingKeeper.UnbondingTime(ctx) height := clienttypes.GetSelfHeight(ctx) @@ -220,40 +247,44 @@ func (k Keeper) MakeConsumerGenesis(ctx sdk.Context) (gen consumertypes.GenesisS return gen, nil } -// SetPendingCreateProposal stores a pending proposal to create a consumer chain client -func (k Keeper) SetPendingCreateProposal(ctx sdk.Context, clientInfo *types.CreateConsumerChainProposal) error { +// SetPendingConsumerAdditionProp stores a pending proposal to create a consumer chain client +func (k Keeper) SetPendingConsumerAdditionProp(ctx sdk.Context, clientInfo *types.ConsumerAdditionProposal) error { store := ctx.KVStore(k.storeKey) bz, err := k.cdc.Marshal(clientInfo) if err != nil { return err } - store.Set(types.PendingCreateProposalKey(clientInfo.SpawnTime, clientInfo.ChainId), bz) + store.Set(types.PendingCAPKey(clientInfo.SpawnTime, clientInfo.ChainId), bz) return nil } -// GetPendingCreateProposal retrieves a pending proposal to create a consumer chain client (by spawn time and chain id) -func (k Keeper) GetPendingCreateProposal(ctx sdk.Context, spawnTime time.Time, chainID string) types.CreateConsumerChainProposal { +// GetPendingConsumerAdditionProp retrieves a pending proposal to create a consumer chain client (by spawn time and chain id) +func (k Keeper) GetPendingConsumerAdditionProp(ctx sdk.Context, spawnTime time.Time, + chainID string) (prop types.ConsumerAdditionProposal, found bool) { store := ctx.KVStore(k.storeKey) - bz := store.Get(types.PendingCreateProposalKey(spawnTime, chainID)) + bz := store.Get(types.PendingCAPKey(spawnTime, chainID)) if len(bz) == 0 { - return types.CreateConsumerChainProposal{} + return prop, false } - var clientInfo types.CreateConsumerChainProposal - k.cdc.MustUnmarshal(bz, &clientInfo) + k.cdc.MustUnmarshal(bz, &prop) - return clientInfo + return prop, true } -func (k Keeper) PendingCreateProposalIterator(ctx sdk.Context) sdk.Iterator { +// PendingConsumerAdditionPropIterator returns an iterator for iterating through pending consumer addition proposals +func (k Keeper) PendingConsumerAdditionPropIterator(ctx sdk.Context) sdk.Iterator { store := ctx.KVStore(k.storeKey) - return sdk.KVStorePrefixIterator(store, []byte{types.PendingCreateProposalBytePrefix}) + return sdk.KVStorePrefixIterator(store, []byte{types.PendingCAPBytePrefix}) } -// IteratePendingCreateProposal iterates over the pending proposals to create consumer chain clients in order -// and creates the consumer client if the spawn time has passed. -func (k Keeper) IteratePendingCreateProposal(ctx sdk.Context) { - propsToExecute := k.CreateProposalsToExecute(ctx) +// BeginBlockInit iterates over the pending consumer addition proposals in order, and creates +// clients for props in which the spawn time has passed. Executed proposals are deleted. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-bblock-init1 +// Spec tag:[CCV-PCF-BBLOCK-INIT.1] +func (k Keeper) BeginBlockInit(ctx sdk.Context) { + propsToExecute := k.ConsumerAdditionPropsToExecute(ctx) for _, prop := range propsToExecute { // lockUbdOnTimeout is set to be false, regardless of what the proposal says, until we can specify and test issues around this use case more thoroughly @@ -263,86 +294,102 @@ func (k Keeper) IteratePendingCreateProposal(ctx sdk.Context) { } } // delete the executed proposals - k.DeletePendingCreateProposal(ctx, propsToExecute...) + k.DeletePendingConsumerAdditionProps(ctx, propsToExecute...) } -// CreateProposalsToExecute iterates over the pending proposals and returns an ordered list of proposals to be executed, -// ie. consumer clients to be created. A prop is included in the returned list if its proposed spawn time has passed. +// ConsumerAdditionPropsToExecute iterates over the pending consumer addition proposals +// and returns an ordered list of proposals to be executed, ie. consumer clients to be created. +// A prop is included in the returned list if its proposed spawn time has passed. // -// Note: this method is split out from IteratePendingCreateProposal to be easily unit tested. -func (k Keeper) CreateProposalsToExecute(ctx sdk.Context) []types.CreateConsumerChainProposal { +// Note: this method is split out from BeginBlockInit to be easily unit tested. +func (k Keeper) ConsumerAdditionPropsToExecute(ctx sdk.Context) []types.ConsumerAdditionProposal { // store the (to be) executed proposals in order - propsToExecute := []types.CreateConsumerChainProposal{} + propsToExecute := []types.ConsumerAdditionProposal{} - iterator := k.PendingCreateProposalIterator(ctx) + iterator := k.PendingConsumerAdditionPropIterator(ctx) defer iterator.Close() - if !iterator.Valid() { - return propsToExecute - } + k.IteratePendingConsumerAdditionProps(ctx, func(spawnTime time.Time, prop types.ConsumerAdditionProposal) bool { + if !ctx.BlockTime().Before(spawnTime) { + propsToExecute = append(propsToExecute, prop) + return true + } + return false + }) + + return propsToExecute +} + +func (k Keeper) IteratePendingConsumerAdditionProps(ctx sdk.Context, cb func(spawnTime time.Time, prop types.ConsumerAdditionProposal) bool) { + iterator := k.PendingConsumerAdditionPropIterator(ctx) + defer iterator.Close() for ; iterator.Valid(); iterator.Next() { key := iterator.Key() - spawnTime, _, err := types.ParsePendingCreateProposalKey(key) + spawnTime, _, err := types.ParsePendingCAPKey(key) if err != nil { panic(fmt.Errorf("failed to parse pending client key: %w", err)) } - var prop types.CreateConsumerChainProposal + var prop types.ConsumerAdditionProposal k.cdc.MustUnmarshal(iterator.Value(), &prop) - if !ctx.BlockTime().Before(spawnTime) { - propsToExecute = append(propsToExecute, prop) - } else { - // No more proposals to check, since they're stored/ordered by timestamp. - break + if !cb(spawnTime, prop) { + return } } - return propsToExecute } -// DeletePendingCreateProposal deletes the given create consumer proposals -func (k Keeper) DeletePendingCreateProposal(ctx sdk.Context, proposals ...types.CreateConsumerChainProposal) { +// DeletePendingConsumerAdditionProps deletes the given consumer addition proposals. +// This method should be called once the proposal has been acted upon. +func (k Keeper) DeletePendingConsumerAdditionProps(ctx sdk.Context, proposals ...types.ConsumerAdditionProposal) { store := ctx.KVStore(k.storeKey) for _, p := range proposals { - store.Delete(types.PendingCreateProposalKey(p.SpawnTime, p.ChainId)) + store.Delete(types.PendingCAPKey(p.SpawnTime, p.ChainId)) } } -// SetPendingStopProposal sets the consumer chain ID for the given timestamp -func (k Keeper) SetPendingStopProposal(ctx sdk.Context, chainID string, timestamp time.Time) { +// SetPendingConsumerRemovalProp stores a pending proposal to remove and stop a consumer chain +func (k Keeper) SetPendingConsumerRemovalProp(ctx sdk.Context, chainID string, timestamp time.Time) { store := ctx.KVStore(k.storeKey) - store.Set(types.PendingStopProposalKey(timestamp, chainID), []byte{}) + store.Set(types.PendingCRPKey(timestamp, chainID), []byte{}) } -// GetPendingStopProposal returns a boolean if a pending stop proposal exists for the given consumer chain ID and the timestamp -func (k Keeper) GetPendingStopProposal(ctx sdk.Context, chainID string, timestamp time.Time) bool { +// GetPendingConsumerRemovalProp returns a boolean if a pending consumer removal proposal +// exists for the given consumer chain ID and timestamp +func (k Keeper) GetPendingConsumerRemovalProp(ctx sdk.Context, chainID string, timestamp time.Time) bool { store := ctx.KVStore(k.storeKey) - bz := store.Get(types.PendingStopProposalKey(timestamp, chainID)) + bz := store.Get(types.PendingCRPKey(timestamp, chainID)) return bz != nil } -// DeletePendingStopProposals deletes the given stop proposals -func (k Keeper) DeletePendingStopProposals(ctx sdk.Context, proposals ...types.StopConsumerChainProposal) { +// DeletePendingConsumerRemovalProps deletes the given pending consumer removal proposals. +// This method should be called once the proposal has been acted upon. +func (k Keeper) DeletePendingConsumerRemovalProps(ctx sdk.Context, proposals ...types.ConsumerRemovalProposal) { store := ctx.KVStore(k.storeKey) for _, p := range proposals { - store.Delete(types.PendingStopProposalKey(p.StopTime, p.ChainId)) + store.Delete(types.PendingCRPKey(p.StopTime, p.ChainId)) } } -func (k Keeper) PendingStopProposalIterator(ctx sdk.Context) sdk.Iterator { +// PendingConsumerRemovalPropIterator returns an iterator for iterating through pending consumer removal proposals +func (k Keeper) PendingConsumerRemovalPropIterator(ctx sdk.Context) sdk.Iterator { store := ctx.KVStore(k.storeKey) - return sdk.KVStorePrefixIterator(store, []byte{types.PendingStopProposalBytePrefix}) + return sdk.KVStorePrefixIterator(store, []byte{types.PendingCRPBytePrefix}) } -// IteratePendingStopProposal iterates over the pending stop proposals in order and stop the chain if the stop time has passed, -// otherwise it will break out of loop and return. -func (k Keeper) IteratePendingStopProposal(ctx sdk.Context) { - propsToExecute := k.StopProposalsToExecute(ctx) +// BeginBlockCCR iterates over the pending consumer removal proposals +// in order and stop/removes the chain if the stop time has passed, +// otherwise it will break out of loop and return. Executed proposals are deleted. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-bblock-ccr1 +// Spec tag: [CCV-PCF-BBLOCK-CCR.1] +func (k Keeper) BeginBlockCCR(ctx sdk.Context) { + propsToExecute := k.ConsumerRemovalPropsToExecute(ctx) for _, prop := range propsToExecute { err := k.StopConsumerChain(ctx, prop.ChainId, false, true) @@ -351,42 +398,49 @@ func (k Keeper) IteratePendingStopProposal(ctx sdk.Context) { } } // delete the executed proposals - k.DeletePendingStopProposals(ctx, propsToExecute...) + k.DeletePendingConsumerRemovalProps(ctx, propsToExecute...) } -// StopProposalsToExecute iterates over the pending stop proposals and returns an ordered list of stop proposals to be executed, -// ie. consumer chains to stop. A prop is included in the returned list if its proposed stop time has passed. +// ConsumerRemovalPropsToExecute iterates over the pending consumer removal proposals +// and returns an ordered list of consumer removal proposals to be executed, +// ie. consumer chains to be stopped and removed from the provider chain. +// A prop is included in the returned list if its proposed stop time has passed. // -// Note: this method is split out from IteratePendingCreateProposal to be easily unit tested. -func (k Keeper) StopProposalsToExecute(ctx sdk.Context) []types.StopConsumerChainProposal { +// Note: this method is split out from BeginBlockCCR to be easily unit tested. +func (k Keeper) ConsumerRemovalPropsToExecute(ctx sdk.Context) []types.ConsumerRemovalProposal { - // store the (to be) executed stop proposals in order - propsToExecute := []types.StopConsumerChainProposal{} + // store the (to be) executed consumer removal proposals in order + propsToExecute := []types.ConsumerRemovalProposal{} - iterator := k.PendingStopProposalIterator(ctx) - defer iterator.Close() + k.IteratePendingConsumerRemovalProps(ctx, func(stopTime time.Time, prop types.ConsumerRemovalProposal) bool { + if !ctx.BlockTime().Before(stopTime) { + propsToExecute = append(propsToExecute, prop) + return true + } else { + // No more proposals to check, since they're stored/ordered by timestamp. + return false + } + }) - if !iterator.Valid() { - return propsToExecute - } + return propsToExecute +} + +func (k Keeper) IteratePendingConsumerRemovalProps(ctx sdk.Context, cb func(stopTime time.Time, prop types.ConsumerRemovalProposal) bool) { + iterator := k.PendingConsumerRemovalPropIterator(ctx) + defer iterator.Close() for ; iterator.Valid(); iterator.Next() { key := iterator.Key() - stopTime, chainID, err := types.ParsePendingStopProposalKey(key) + stopTime, chainID, err := types.ParsePendingCRPKey(key) if err != nil { - panic(fmt.Errorf("failed to parse pending stop proposal key: %w", err)) + panic(fmt.Errorf("failed to parse pending consumer removal proposal key: %w", err)) } - if !ctx.BlockTime().Before(stopTime) { - propsToExecute = append(propsToExecute, - types.StopConsumerChainProposal{ChainId: chainID, StopTime: stopTime}) - } else { - // No more proposals to check, since they're stored/ordered by timestamp. - break + if !cb(stopTime, types.ConsumerRemovalProposal{ChainId: chainID, StopTime: stopTime}) { + return } } - return propsToExecute } // CloseChannel closes the channel for the given channel ID on the condition diff --git a/x/ccv/provider/keeper/proposal_test.go b/x/ccv/provider/keeper/proposal_test.go index f62f246458..1e309e737b 100644 --- a/x/ccv/provider/keeper/proposal_test.go +++ b/x/ccv/provider/keeper/proposal_test.go @@ -1,207 +1,774 @@ package keeper_test import ( + "encoding/json" "testing" "time" + _go "github.com/confio/ics23/go" + sdk "github.com/cosmos/cosmos-sdk/types" + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" + ibctmtypes "github.com/cosmos/ibc-go/v3/modules/light-clients/07-tendermint/types" + "github.com/golang/mock/gomock" + abci "github.com/tendermint/tendermint/abci/types" + "github.com/stretchr/testify/require" testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + providerkeeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" "github.com/cosmos/interchain-security/x/ccv/provider/types" + providertypes "github.com/cosmos/interchain-security/x/ccv/provider/types" ) -func TestPendingStopProposalDeletion(t *testing.T) { +// +// Initialization sub-protocol related tests of proposal.go +// + +// Tests the HandleConsumerAdditionProposal method against the SpawnConsumerChainProposalHandler spec. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-spccprop1 +// Spec tag: [CCV-PCF-SPCCPROP.1] +func TestHandleConsumerAdditionProposal(t *testing.T) { + + type testCase struct { + description string + prop *providertypes.ConsumerAdditionProposal + // Time when prop is handled + blockTime time.Time + // Whether it's expected that the spawn time has passed and client should be created + expCreatedClient bool + } + + // Snapshot times asserted in tests + now := time.Now().UTC() + hourFromNow := now.Add(time.Hour).UTC() + + tests := []testCase{ + { + description: "ctx block time is after proposal's spawn time, expected that client is created", + prop: providertypes.NewConsumerAdditionProposal( + "title", + "description", + "chainID", + clienttypes.NewHeight(2, 3), + []byte("gen_hash"), + []byte("bin_hash"), + now, // Spawn time + ).(*providertypes.ConsumerAdditionProposal), + blockTime: hourFromNow, + expCreatedClient: true, + }, + { + description: `ctx block time is before proposal's spawn time, + expected that no client is created and the proposal is persisted as pending`, + prop: providertypes.NewConsumerAdditionProposal( + "title", + "description", + "chainID", + clienttypes.NewHeight(2, 3), + []byte("gen_hash"), + []byte("bin_hash"), + hourFromNow, // Spawn time + ).(*types.ConsumerAdditionProposal), + blockTime: now, + expCreatedClient: false, + }, + } + + for _, tc := range tests { + // Common setup + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + ctx = ctx.WithBlockTime(tc.blockTime) + + if tc.expCreatedClient { + // Mock calls are only asserted if we expect a client to be created. + gomock.InOrder( + testkeeper.GetMocksForCreateConsumerClient(ctx, &mocks, "chainID", clienttypes.NewHeight(2, 3))..., + ) + } + + tc.prop.LockUnbondingOnTimeout = false // Full functionality not implemented yet. + + err := providerKeeper.HandleConsumerAdditionProposal(ctx, tc.prop) + require.NoError(t, err) + + if tc.expCreatedClient { + testCreatedConsumerClient(t, ctx, providerKeeper, tc.prop.ChainId, "clientID") + } else { + // check that stored pending prop is exactly the same as the initially instantiated prop + gotProposal, found := providerKeeper.GetPendingConsumerAdditionProp(ctx, tc.prop.SpawnTime, tc.prop.ChainId) + require.True(t, found) + require.Equal(t, *tc.prop, gotProposal) + // double check that a client for this chain does not exist + _, found = providerKeeper.GetConsumerClientId(ctx, tc.prop.ChainId) + require.False(t, found) + } + ctrl.Finish() + } +} + +// Tests the CreateConsumerClient method against the spec, +// with more granularity than what's covered in TestHandleCreateConsumerChainProposal. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-crclient1 +// Spec tag: [CCV-PCF-CRCLIENT.1] +func TestCreateConsumerClient(t *testing.T) { + + type testCase struct { + description string + // Any state-mutating setup on keeper and expected mock calls, specific to this test case + setup func(*providerkeeper.Keeper, sdk.Context, *testkeeper.MockedKeepers) + // Whether a client should be created + expClientCreated bool + } + tests := []testCase{ + { + description: "No state mutation, new client should be created", + setup: func(providerKeeper *providerkeeper.Keeper, ctx sdk.Context, mocks *testkeeper.MockedKeepers) { + + // Valid client creation is asserted with mock expectations here + gomock.InOrder( + testkeeper.GetMocksForCreateConsumerClient(ctx, mocks, "chainID", clienttypes.NewHeight(4, 5))..., + ) + }, + expClientCreated: true, + }, + { + description: "client for this chain already exists, new one is not created", + setup: func(providerKeeper *providerkeeper.Keeper, ctx sdk.Context, mocks *testkeeper.MockedKeepers) { + + providerKeeper.SetConsumerClientId(ctx, "chainID", "clientID") + + // Expect none of the client creation related calls to happen + mocks.MockStakingKeeper.EXPECT().UnbondingTime(gomock.Any()).Times(0) + mocks.MockClientKeeper.EXPECT().CreateClient(gomock.Any(), gomock.Any(), gomock.Any()).Times(0) + mocks.MockClientKeeper.EXPECT().GetSelfConsensusState(gomock.Any(), gomock.Any()).Times(0) + mocks.MockStakingKeeper.EXPECT().IterateLastValidatorPowers(gomock.Any(), gomock.Any()).Times(0) + + }, + expClientCreated: false, + }, + } + + for _, tc := range tests { + // Common setup + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + + // Test specific setup + tc.setup(&providerKeeper, ctx, &mocks) + + // Call method with same arbitrary values as defined above in mock expectations. + err := providerKeeper.CreateConsumerClient( + ctx, "chainID", clienttypes.NewHeight(4, 5), false) // LockUbdOnTimeout always false for now + + require.NoError(t, err) + + if tc.expClientCreated { + testCreatedConsumerClient(t, ctx, providerKeeper, "chainID", "clientID") + } + + // Assert mock calls from setup functions + ctrl.Finish() + } +} + +// Executes test assertions for a created consumer client. +// +// Note: Separated from TestCreateConsumerClient to also be called from TestCreateConsumerChainProposal. +func testCreatedConsumerClient(t *testing.T, + ctx sdk.Context, providerKeeper providerkeeper.Keeper, expectedChainID string, expectedClientID string) { + + // ClientID should be stored. + clientId, found := providerKeeper.GetConsumerClientId(ctx, expectedChainID) + require.True(t, found, "consumer client not found") + require.Equal(t, expectedClientID, clientId) + + // Lock unbonding on timeout flag always false for now. + lockUbdOnTimeout := providerKeeper.GetLockUnbondingOnTimeout(ctx, expectedChainID) + require.False(t, lockUbdOnTimeout) + + // Only assert that consumer genesis was set, + // more granular tests on consumer genesis should be defined in TestMakeConsumerGenesis + _, ok := providerKeeper.GetConsumerGenesis(ctx, expectedChainID) + require.True(t, ok) +} + +// TestPendingConsumerAdditionPropDeletion tests the getting/setting +// and deletion keeper methods for pending consumer addition props +func TestPendingConsumerAdditionPropDeletion(t *testing.T) { testCases := []struct { - types.StopConsumerChainProposal + types.ConsumerAdditionProposal ExpDeleted bool }{ { - StopConsumerChainProposal: types.StopConsumerChainProposal{ChainId: "8", StopTime: time.Now().UTC()}, - ExpDeleted: true, + ConsumerAdditionProposal: types.ConsumerAdditionProposal{ChainId: "0", SpawnTime: time.Now().UTC()}, + ExpDeleted: true, }, { - StopConsumerChainProposal: types.StopConsumerChainProposal{ChainId: "9", StopTime: time.Now().UTC().Add(time.Hour)}, - ExpDeleted: false, + ConsumerAdditionProposal: types.ConsumerAdditionProposal{ChainId: "1", SpawnTime: time.Now().UTC().Add(time.Hour)}, + ExpDeleted: false, }, } - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() for _, tc := range testCases { - providerKeeper.SetPendingStopProposal(ctx, tc.ChainId, tc.StopTime) + err := providerKeeper.SetPendingConsumerAdditionProp(ctx, &tc.ConsumerAdditionProposal) + require.NoError(t, err) } ctx = ctx.WithBlockTime(time.Now().UTC()) - propsToExecute := providerKeeper.StopProposalsToExecute(ctx) - // Delete stop proposals, same as what would be done by IteratePendingStopProposal - providerKeeper.DeletePendingStopProposals(ctx, propsToExecute...) + propsToExecute := providerKeeper.ConsumerAdditionPropsToExecute(ctx) + // Delete consumer addition proposals, same as what would be done by BeginBlockInit + providerKeeper.DeletePendingConsumerAdditionProps(ctx, propsToExecute...) numDeleted := 0 for _, tc := range testCases { - res := providerKeeper.GetPendingStopProposal(ctx, tc.ChainId, tc.StopTime) + res, found := providerKeeper.GetPendingConsumerAdditionProp(ctx, tc.SpawnTime, tc.ChainId) if !tc.ExpDeleted { - require.NotEmpty(t, res, "stop proposal was deleted: %s %s", tc.ChainId, tc.StopTime.String()) + require.True(t, found) + require.NotEmpty(t, res, "consumer addition proposal was deleted: %s %s", tc.ChainId, tc.SpawnTime.String()) continue } - require.Empty(t, res, "stop proposal was not deleted %s %s", tc.ChainId, tc.StopTime.String()) + require.Empty(t, res, "consumer addition proposal was not deleted %s %s", tc.ChainId, tc.SpawnTime.String()) require.Equal(t, propsToExecute[numDeleted].ChainId, tc.ChainId) numDeleted += 1 } } -// Tests that pending stop proposals are accessed in order by timestamp via the iterator -func TestPendingStopProposalsOrder(t *testing.T) { +// TestPendingConsumerAdditionPropOrder tests that pending consumer addition proposals +// are accessed in order by timestamp via the iterator +func TestPendingConsumerAdditionPropOrder(t *testing.T) { now := time.Now().UTC() // props with unique chain ids and spawn times - sampleProp1 := types.StopConsumerChainProposal{ChainId: "1", StopTime: now} - sampleProp2 := types.StopConsumerChainProposal{ChainId: "2", StopTime: now.Add(1 * time.Hour)} - sampleProp3 := types.StopConsumerChainProposal{ChainId: "3", StopTime: now.Add(2 * time.Hour)} - sampleProp4 := types.StopConsumerChainProposal{ChainId: "4", StopTime: now.Add(3 * time.Hour)} - sampleProp5 := types.StopConsumerChainProposal{ChainId: "5", StopTime: now.Add(4 * time.Hour)} + sampleProp1 := types.ConsumerAdditionProposal{ChainId: "1", SpawnTime: now} + sampleProp2 := types.ConsumerAdditionProposal{ChainId: "2", SpawnTime: now.Add(1 * time.Hour)} + sampleProp3 := types.ConsumerAdditionProposal{ChainId: "3", SpawnTime: now.Add(2 * time.Hour)} + sampleProp4 := types.ConsumerAdditionProposal{ChainId: "4", SpawnTime: now.Add(3 * time.Hour)} + sampleProp5 := types.ConsumerAdditionProposal{ChainId: "5", SpawnTime: now.Add(4 * time.Hour)} testCases := []struct { - propSubmitOrder []types.StopConsumerChainProposal + propSubmitOrder []types.ConsumerAdditionProposal accessTime time.Time - expectedOrderedProps []types.StopConsumerChainProposal + expectedOrderedProps []types.ConsumerAdditionProposal }{ { - propSubmitOrder: []types.StopConsumerChainProposal{ + propSubmitOrder: []types.ConsumerAdditionProposal{ sampleProp1, sampleProp2, sampleProp3, sampleProp4, sampleProp5, }, accessTime: now.Add(30 * time.Minute), - expectedOrderedProps: []types.StopConsumerChainProposal{ + expectedOrderedProps: []types.ConsumerAdditionProposal{ sampleProp1, }, }, { - propSubmitOrder: []types.StopConsumerChainProposal{ + propSubmitOrder: []types.ConsumerAdditionProposal{ sampleProp3, sampleProp2, sampleProp1, sampleProp5, sampleProp4, }, accessTime: now.Add(3 * time.Hour).Add(30 * time.Minute), - expectedOrderedProps: []types.StopConsumerChainProposal{ + expectedOrderedProps: []types.ConsumerAdditionProposal{ sampleProp1, sampleProp2, sampleProp3, sampleProp4, }, }, { - propSubmitOrder: []types.StopConsumerChainProposal{ + propSubmitOrder: []types.ConsumerAdditionProposal{ sampleProp5, sampleProp4, sampleProp3, sampleProp2, sampleProp1, }, accessTime: now.Add(5 * time.Hour), - expectedOrderedProps: []types.StopConsumerChainProposal{ + expectedOrderedProps: []types.ConsumerAdditionProposal{ sampleProp1, sampleProp2, sampleProp3, sampleProp4, sampleProp5, }, }, } for _, tc := range testCases { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() + ctx = ctx.WithBlockTime(tc.accessTime) for _, prop := range tc.propSubmitOrder { - providerKeeper.SetPendingStopProposal(ctx, prop.ChainId, prop.StopTime) + err := providerKeeper.SetPendingConsumerAdditionProp(ctx, &prop) + require.NoError(t, err) } - propsToExecute := providerKeeper.StopProposalsToExecute(ctx) + propsToExecute := providerKeeper.ConsumerAdditionPropsToExecute(ctx) require.Equal(t, tc.expectedOrderedProps, propsToExecute) } } -func TestPendingCreateProposalsDeletion(t *testing.T) { +// +// Consumer Chain Removal sub-protocol related tests of proposal.go +// + +// TestHandleConsumerRemovalProposal tests HandleConsumerRemovalProposal against its corresponding spec method. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-stccprop1 +// Spec tag: [CCV-PCF-STCCPROP.1] +func TestHandleConsumerRemovalProposal(t *testing.T) { + + type testCase struct { + description string + // Consumer removal proposal to handle + prop *types.ConsumerRemovalProposal + // Time when prop is handled + blockTime time.Time + // Whether consumer chain should have been stopped + expStop bool + } + + // Snapshot times asserted in tests + now := time.Now().UTC() + hourFromNow := now.Add(time.Hour).UTC() + + tests := []testCase{ + { + description: "valid proposal: stop time reached", + prop: providertypes.NewConsumerRemovalProposal( + "title", + "description", + "chainID", + now, + ).(*providertypes.ConsumerRemovalProposal), + blockTime: hourFromNow, // After stop time. + expStop: true, + }, + { + description: "valid proposal: stop time has not yet been reached", + prop: providertypes.NewConsumerRemovalProposal( + "title", + "description", + "chainID", + hourFromNow, + ).(*providertypes.ConsumerRemovalProposal), + blockTime: now, // Before proposal's stop time + expStop: false, + }, + } + + for _, tc := range tests { + + // Common setup + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + ctx = ctx.WithBlockTime(tc.blockTime) + + // Mock expectations and setup for stopping the consumer chain, if applicable + if tc.expStop { + testkeeper.SetupForStoppingConsumerChain(t, ctx, &providerKeeper, mocks) + } + // Note: when expStop is false, no mocks are setup, + // meaning no external keeper methods are allowed to be called. + + err := providerKeeper.HandleConsumerRemovalProposal(ctx, tc.prop) + require.NoError(t, err) + + if tc.expStop { + // Expect no pending proposal to exist + found := providerKeeper.GetPendingConsumerRemovalProp(ctx, tc.prop.ChainId, tc.prop.StopTime) + require.False(t, found) + + testConsumerStateIsCleaned(t, ctx, providerKeeper, tc.prop.ChainId, "channelID") + } else { + // Proposal should be stored as pending + found := providerKeeper.GetPendingConsumerRemovalProp(ctx, tc.prop.ChainId, tc.prop.StopTime) + require.True(t, found) + } + + // Assert mock calls from setup function + ctrl.Finish() + } +} + +// Tests the StopConsumerChain method against the spec, +// with more granularity than what's covered in TestHandleConsumerRemovalProposal, or e2e tests. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-stcc1 +// Spec tag: [CCV-PCF-STCC.1] +func TestStopConsumerChain(t *testing.T) { + type testCase struct { + description string + // State-mutating setup specific to this test case + setup func(sdk.Context, *providerkeeper.Keeper, testkeeper.MockedKeepers) + // Whether we should expect the method to return an error + expErr bool + } + + tests := []testCase{ + { + description: "fail due to an invalid unbonding index", + setup: func(ctx sdk.Context, providerKeeper *providerkeeper.Keeper, mocks testkeeper.MockedKeepers) { + // set invalid unbonding op index + providerKeeper.SetUnbondingOpIndex(ctx, "chainID", 0, []uint64{0}) + + // StopConsumerChain should return error, but state is still cleaned (asserted with mocks). + testkeeper.SetupForStoppingConsumerChain(t, ctx, providerKeeper, mocks) + }, + expErr: true, + }, + { + description: "proposal dropped, client doesn't exist", + setup: func(ctx sdk.Context, providerKeeper *providerkeeper.Keeper, mocks testkeeper.MockedKeepers) { + // No mocks, meaning no external keeper methods are allowed to be called. + }, + expErr: false, + }, + { + description: "valid stop of consumer chain, all mock calls hit", + setup: func(ctx sdk.Context, providerKeeper *providerkeeper.Keeper, mocks testkeeper.MockedKeepers) { + testkeeper.SetupForStoppingConsumerChain(t, ctx, providerKeeper, mocks) + }, + expErr: false, + }, + } + + for _, tc := range tests { + + // Common setup + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + + // Setup specific to test case + tc.setup(ctx, &providerKeeper, mocks) + + err := providerKeeper.StopConsumerChain(ctx, "chainID", false, true) + + if tc.expErr { + require.Error(t, err) + } else { + require.NoError(t, err) + } + + testConsumerStateIsCleaned(t, ctx, providerKeeper, "chainID", "channelID") + + ctrl.Finish() + } +} + +// testConsumerStateIsCleaned executes test assertions for a stopped consumer chain's state being cleaned. +func testConsumerStateIsCleaned(t *testing.T, ctx sdk.Context, providerKeeper providerkeeper.Keeper, + expectedChainID string, expectedChannelID string) { + + _, found := providerKeeper.GetConsumerClientId(ctx, expectedChainID) + require.False(t, found) + found = providerKeeper.GetLockUnbondingOnTimeout(ctx, expectedChainID) + require.False(t, found) + _, found = providerKeeper.GetChainToChannel(ctx, expectedChainID) + require.False(t, found) + _, found = providerKeeper.GetChannelToChain(ctx, expectedChannelID) + require.False(t, found) + _, found = providerKeeper.GetInitChainHeight(ctx, expectedChainID) + require.False(t, found) + acks := providerKeeper.GetSlashAcks(ctx, expectedChainID) + require.Empty(t, acks) +} + +// TestPendingConsumerRemovalPropDeletion tests the getting/setting +// and deletion methods for pending consumer removal props +func TestPendingConsumerRemovalPropDeletion(t *testing.T) { testCases := []struct { - types.CreateConsumerChainProposal + types.ConsumerRemovalProposal ExpDeleted bool }{ { - CreateConsumerChainProposal: types.CreateConsumerChainProposal{ChainId: "0", SpawnTime: time.Now().UTC()}, - ExpDeleted: true, + ConsumerRemovalProposal: types.ConsumerRemovalProposal{ChainId: "8", StopTime: time.Now().UTC()}, + ExpDeleted: true, }, { - CreateConsumerChainProposal: types.CreateConsumerChainProposal{ChainId: "1", SpawnTime: time.Now().UTC().Add(time.Hour)}, - ExpDeleted: false, + ConsumerRemovalProposal: types.ConsumerRemovalProposal{ChainId: "9", StopTime: time.Now().UTC().Add(time.Hour)}, + ExpDeleted: false, }, } - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() for _, tc := range testCases { - err := providerKeeper.SetPendingCreateProposal(ctx, &tc.CreateConsumerChainProposal) - require.NoError(t, err) + providerKeeper.SetPendingConsumerRemovalProp(ctx, tc.ChainId, tc.StopTime) } ctx = ctx.WithBlockTime(time.Now().UTC()) - propsToExecute := providerKeeper.CreateProposalsToExecute(ctx) - // Delete create proposals, same as what would be done by IteratePendingCreateProposal - providerKeeper.DeletePendingCreateProposal(ctx, propsToExecute...) + propsToExecute := providerKeeper.ConsumerRemovalPropsToExecute(ctx) + // Delete consumer removal proposals, same as what would be done by BeginBlockCCR + providerKeeper.DeletePendingConsumerRemovalProps(ctx, propsToExecute...) numDeleted := 0 for _, tc := range testCases { - res := providerKeeper.GetPendingCreateProposal(ctx, tc.SpawnTime, tc.ChainId) + res := providerKeeper.GetPendingConsumerRemovalProp(ctx, tc.ChainId, tc.StopTime) if !tc.ExpDeleted { - require.NotEmpty(t, res, "create proposal was deleted: %s %s", tc.ChainId, tc.SpawnTime.String()) + require.NotEmpty(t, res, "consumer removal prop was deleted: %s %s", tc.ChainId, tc.StopTime.String()) continue } - require.Empty(t, res, "create proposal was not deleted %s %s", tc.ChainId, tc.SpawnTime.String()) + require.Empty(t, res, "consumer removal prop was not deleted %s %s", tc.ChainId, tc.StopTime.String()) require.Equal(t, propsToExecute[numDeleted].ChainId, tc.ChainId) numDeleted += 1 } } -// Tests that pending create proposals are accessed in order by timestamp via the iterator -func TestPendingCreateProposalsOrder(t *testing.T) { +// Tests that pending consumer removal proposals are accessed in order by timestamp via the iterator +func TestPendingConsumerRemovalPropOrder(t *testing.T) { now := time.Now().UTC() // props with unique chain ids and spawn times - sampleProp1 := types.CreateConsumerChainProposal{ChainId: "1", SpawnTime: now} - sampleProp2 := types.CreateConsumerChainProposal{ChainId: "2", SpawnTime: now.Add(1 * time.Hour)} - sampleProp3 := types.CreateConsumerChainProposal{ChainId: "3", SpawnTime: now.Add(2 * time.Hour)} - sampleProp4 := types.CreateConsumerChainProposal{ChainId: "4", SpawnTime: now.Add(3 * time.Hour)} - sampleProp5 := types.CreateConsumerChainProposal{ChainId: "5", SpawnTime: now.Add(4 * time.Hour)} + sampleProp1 := types.ConsumerRemovalProposal{ChainId: "1", StopTime: now} + sampleProp2 := types.ConsumerRemovalProposal{ChainId: "2", StopTime: now.Add(1 * time.Hour)} + sampleProp3 := types.ConsumerRemovalProposal{ChainId: "3", StopTime: now.Add(2 * time.Hour)} + sampleProp4 := types.ConsumerRemovalProposal{ChainId: "4", StopTime: now.Add(3 * time.Hour)} + sampleProp5 := types.ConsumerRemovalProposal{ChainId: "5", StopTime: now.Add(4 * time.Hour)} testCases := []struct { - propSubmitOrder []types.CreateConsumerChainProposal + propSubmitOrder []types.ConsumerRemovalProposal accessTime time.Time - expectedOrderedProps []types.CreateConsumerChainProposal + expectedOrderedProps []types.ConsumerRemovalProposal }{ { - propSubmitOrder: []types.CreateConsumerChainProposal{ + propSubmitOrder: []types.ConsumerRemovalProposal{ sampleProp1, sampleProp2, sampleProp3, sampleProp4, sampleProp5, }, accessTime: now.Add(30 * time.Minute), - expectedOrderedProps: []types.CreateConsumerChainProposal{ + expectedOrderedProps: []types.ConsumerRemovalProposal{ sampleProp1, }, }, { - propSubmitOrder: []types.CreateConsumerChainProposal{ + propSubmitOrder: []types.ConsumerRemovalProposal{ sampleProp3, sampleProp2, sampleProp1, sampleProp5, sampleProp4, }, accessTime: now.Add(3 * time.Hour).Add(30 * time.Minute), - expectedOrderedProps: []types.CreateConsumerChainProposal{ + expectedOrderedProps: []types.ConsumerRemovalProposal{ sampleProp1, sampleProp2, sampleProp3, sampleProp4, }, }, { - propSubmitOrder: []types.CreateConsumerChainProposal{ + propSubmitOrder: []types.ConsumerRemovalProposal{ sampleProp5, sampleProp4, sampleProp3, sampleProp2, sampleProp1, }, accessTime: now.Add(5 * time.Hour), - expectedOrderedProps: []types.CreateConsumerChainProposal{ + expectedOrderedProps: []types.ConsumerRemovalProposal{ sampleProp1, sampleProp2, sampleProp3, sampleProp4, sampleProp5, }, }, } for _, tc := range testCases { - providerKeeper, ctx := testkeeper.GetProviderKeeperAndCtx(t) + providerKeeper, ctx, ctrl, _ := testkeeper.GetProviderKeeperAndCtx(t, testkeeper.NewInMemKeeperParams(t)) + defer ctrl.Finish() ctx = ctx.WithBlockTime(tc.accessTime) for _, prop := range tc.propSubmitOrder { - err := providerKeeper.SetPendingCreateProposal(ctx, &prop) - require.NoError(t, err) + providerKeeper.SetPendingConsumerRemovalProp(ctx, prop.ChainId, prop.StopTime) } - propsToExecute := providerKeeper.CreateProposalsToExecute(ctx) + propsToExecute := providerKeeper.ConsumerRemovalPropsToExecute(ctx) require.Equal(t, tc.expectedOrderedProps, propsToExecute) } } + +// TestMakeConsumerGenesis tests the MakeConsumerGenesis keeper method +// +// Note: the initial intention of this test wasn't very clear, it was migrated with best effort +func TestMakeConsumerGenesis(t *testing.T) { + + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState( + &ibctmtypes.ClientState{ + TrustLevel: ibctmtypes.DefaultTrustLevel, + MaxClockDrift: 10000000000, + ProofSpecs: []*_go.ProofSpec{ + { + LeafSpec: &_go.LeafOp{ + Hash: _go.HashOp_SHA256, + PrehashKey: _go.HashOp_NO_HASH, + PrehashValue: _go.HashOp_SHA256, + Length: _go.LengthOp_VAR_PROTO, + Prefix: []byte{0x00}, + }, + InnerSpec: &_go.InnerSpec{ + ChildOrder: []int32{0, 1}, + ChildSize: 33, + MinPrefixLength: 4, + MaxPrefixLength: 12, + Hash: _go.HashOp_SHA256, + }, + MaxDepth: 0, + MinDepth: 0, + }, + { + LeafSpec: &_go.LeafOp{ + Hash: _go.HashOp_SHA256, + PrehashKey: _go.HashOp_NO_HASH, + PrehashValue: _go.HashOp_SHA256, + Length: _go.LengthOp_VAR_PROTO, + Prefix: []byte{0x00}, + }, + InnerSpec: &_go.InnerSpec{ + ChildOrder: []int32{0, 1}, + ChildSize: 32, + MinPrefixLength: 1, + MaxPrefixLength: 1, + Hash: _go.HashOp_SHA256, + }, + MaxDepth: 0, + }, + }, + UpgradePath: []string{"upgrade", "upgradedIBCState"}, + AllowUpdateAfterExpiry: true, + AllowUpdateAfterMisbehaviour: true, + }, + ) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() + + // + // Other setup not covered by custom template client state + // + ctx = ctx.WithChainID("testchain1") // chainID is obtained from ctx + ctx = ctx.WithBlockHeight(5) // RevisionHeight obtained from ctx + gomock.InOrder(testkeeper.GetMocksForMakeConsumerGenesis(ctx, &mocks, 1814400000000000)...) + + actualGenesis, err := providerKeeper.MakeConsumerGenesis(ctx) + require.NoError(t, err) + + jsonString := `{"params":{"enabled":true, "blocks_per_distribution_transmission":1000, "lock_unbonding_on_timeout": false},"new_chain":true,"provider_client_state":{"chain_id":"testchain1","trust_level":{"numerator":1,"denominator":3},"trusting_period":907200000000000,"unbonding_period":1814400000000000,"max_clock_drift":10000000000,"frozen_height":{},"latest_height":{"revision_height":5},"proof_specs":[{"leaf_spec":{"hash":1,"prehash_value":1,"length":1,"prefix":"AA=="},"inner_spec":{"child_order":[0,1],"child_size":33,"min_prefix_length":4,"max_prefix_length":12,"hash":1}},{"leaf_spec":{"hash":1,"prehash_value":1,"length":1,"prefix":"AA=="},"inner_spec":{"child_order":[0,1],"child_size":32,"min_prefix_length":1,"max_prefix_length":1,"hash":1}}],"upgrade_path":["upgrade","upgradedIBCState"],"allow_update_after_expiry":true,"allow_update_after_misbehaviour":true},"provider_consensus_state":{"timestamp":"2020-01-02T00:00:10Z","root":{"hash":"LpGpeyQVLUo9HpdsgJr12NP2eCICspcULiWa5u9udOA="},"next_validators_hash":"E30CE736441FB9101FADDAF7E578ABBE6DFDB67207112350A9A904D554E1F5BE"},"unbonding_sequences":null,"initial_val_set":[{"pub_key":{"type":"tendermint/PubKeyEd25519","value":"dcASx5/LIKZqagJWN0frOlFtcvz91frYmj/zmoZRWro="},"power":1}]}` + + var expectedGenesis consumertypes.GenesisState + err = json.Unmarshal([]byte(jsonString), &expectedGenesis) + require.NoError(t, err) + + // Zeroing out different fields that are challenging to mock + actualGenesis.InitialValSet = []abci.ValidatorUpdate{} + expectedGenesis.InitialValSet = []abci.ValidatorUpdate{} + actualGenesis.ProviderConsensusState = &ibctmtypes.ConsensusState{} + expectedGenesis.ProviderConsensusState = &ibctmtypes.ConsensusState{} + + require.Equal(t, actualGenesis, expectedGenesis, "consumer chain genesis created incorrectly") +} + +// TestBeginBlockInit directly tests BeginBlockInit against the spec using helpers defined above. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-bblock-init1 +// Spec tag:[CCV-PCF-BBLOCK-INIT.1] +func TestBeginBlockInit(t *testing.T) { + + now := time.Now().UTC() + + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() + ctx = ctx.WithBlockTime(now) + + pendingProps := []*providertypes.ConsumerAdditionProposal{ + providertypes.NewConsumerAdditionProposal( + "title", "description", "chain1", clienttypes.NewHeight(3, 4), []byte{}, []byte{}, + now.Add(-time.Hour).UTC()).(*providertypes.ConsumerAdditionProposal), + providertypes.NewConsumerAdditionProposal( + "title", "description", "chain2", clienttypes.NewHeight(3, 4), []byte{}, []byte{}, + now.UTC()).(*providertypes.ConsumerAdditionProposal), + providertypes.NewConsumerAdditionProposal( + "title", "description", "chain3", clienttypes.NewHeight(3, 4), []byte{}, []byte{}, + now.Add(time.Hour).UTC()).(*providertypes.ConsumerAdditionProposal), + } + + gomock.InOrder( + // Expect client creation for the 1st and second proposals (spawn time already passed) + append(testkeeper.GetMocksForCreateConsumerClient(ctx, &mocks, "chain1", clienttypes.NewHeight(3, 4)), + testkeeper.GetMocksForCreateConsumerClient(ctx, &mocks, "chain2", clienttypes.NewHeight(3, 4))...)..., + ) + + for _, prop := range pendingProps { + err := providerKeeper.SetPendingConsumerAdditionProp(ctx, prop) + require.NoError(t, err) + } + + providerKeeper.BeginBlockInit(ctx) + + // Only the 3rd (final) proposal is still stored as pending + _, found := providerKeeper.GetPendingConsumerAdditionProp( + ctx, pendingProps[0].SpawnTime, pendingProps[0].ChainId) + require.False(t, found) + _, found = providerKeeper.GetPendingConsumerAdditionProp( + ctx, pendingProps[1].SpawnTime, pendingProps[1].ChainId) + require.False(t, found) + _, found = providerKeeper.GetPendingConsumerAdditionProp( + ctx, pendingProps[2].SpawnTime, pendingProps[2].ChainId) + require.True(t, found) +} + +// TestBeginBlockCCR tests BeginBlockCCR against the spec. +// +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-bblock-ccr1 +// Spec tag: [CCV-PCF-BBLOCK-CCR.1] +func TestBeginBlockCCR(t *testing.T) { + now := time.Now().UTC() + + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + defer ctrl.Finish() + ctx = ctx.WithBlockTime(now) + + pendingProps := []*providertypes.ConsumerRemovalProposal{ + providertypes.NewConsumerRemovalProposal( + "title", "description", "chain1", now.Add(-time.Hour).UTC(), + ).(*providertypes.ConsumerRemovalProposal), + providertypes.NewConsumerRemovalProposal( + "title", "description", "chain2", now, + ).(*providertypes.ConsumerRemovalProposal), + providertypes.NewConsumerRemovalProposal( + "title", "description", "chain3", now.Add(time.Hour).UTC(), + ).(*providertypes.ConsumerRemovalProposal), + } + + // + // Mock expectations + // + expectations := []*gomock.Call{} + for _, prop := range pendingProps { + // A consumer chain is setup corresponding to each prop, making these mocks necessary + expectations = append(expectations, testkeeper.GetMocksForCreateConsumerClient(ctx, &mocks, + prop.ChainId, clienttypes.NewHeight(2, 3))...) + expectations = append(expectations, testkeeper.GetMocksForSetConsumerChain(ctx, &mocks, prop.ChainId)...) + } + // Only first two consumer chains should be stopped + expectations = append(expectations, testkeeper.GetMocksForStopConsumerChain(ctx, &mocks)...) + expectations = append(expectations, testkeeper.GetMocksForStopConsumerChain(ctx, &mocks)...) + + gomock.InOrder(expectations...) + + // + // Remaining setup + // + for _, prop := range pendingProps { + // Setup a valid consumer chain for each prop + err := providerKeeper.CreateConsumerClient(ctx, prop.ChainId, clienttypes.NewHeight(2, 3), false) + require.NoError(t, err) + err = providerKeeper.SetConsumerChain(ctx, "channelID") + require.NoError(t, err) + + // Set removal props for all consumer chains + providerKeeper.SetPendingConsumerRemovalProp(ctx, prop.ChainId, prop.StopTime) + } + + // + // Test execution + // + providerKeeper.BeginBlockCCR(ctx) + + // Only the 3rd (final) proposal is still stored as pending + found := providerKeeper.GetPendingConsumerRemovalProp( + ctx, pendingProps[0].ChainId, pendingProps[0].StopTime) + require.False(t, found) + found = providerKeeper.GetPendingConsumerRemovalProp( + ctx, pendingProps[1].ChainId, pendingProps[1].StopTime) + require.False(t, found) + found = providerKeeper.GetPendingConsumerRemovalProp( + ctx, pendingProps[2].ChainId, pendingProps[2].StopTime) + require.True(t, found) +} diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index 283c3ad0c1..19938e0284 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -122,7 +122,7 @@ func (k Keeper) SendValidatorUpdates(ctx sdk.Context) { valUpdateID := k.GetValidatorSetUpdateId(ctx) // get the validator updates from the staking module valUpdates := k.stakingKeeper.GetValidatorUpdates(ctx) - k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID string) (stop bool) { + k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID, clientID string) (stop bool) { // check whether there is an established CCV channel to this consumer chain if channelID, found := k.GetChainToChannel(ctx, chainID); found { // Send pending VSC packets to consumer chain diff --git a/x/ccv/provider/module.go b/x/ccv/provider/module.go index 39f11b850f..8b6721f3d4 100644 --- a/x/ccv/provider/module.go +++ b/x/ccv/provider/module.go @@ -128,12 +128,15 @@ func (am AppModule) RegisterServices(cfg module.Configurator) { providertypes.RegisterQueryServer(cfg.QueryServer(), am.keeper) } -// InitGenesis performs genesis initialization for the provider module. It returns -// no validator updates. +// InitGenesis performs genesis initialization for the provider module. It returns no validator updates. +// Note: This method along with ValidateGenesis satisfies the CCV spec: +// https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-initg1 func (am AppModule) InitGenesis(ctx sdk.Context, cdc codec.JSONCodec, data json.RawMessage) []abci.ValidatorUpdate { var genesisState providertypes.GenesisState cdc.MustUnmarshalJSON(data, &genesisState) + am.keeper.InitGenesis(ctx, &genesisState) + // initialize validator update id // TODO: Include in genesis and initialize from genesis value am.keeper.SetValidatorSetUpdateId(ctx, 1) @@ -152,10 +155,10 @@ func (AppModule) ConsensusVersion() uint64 { return 1 } // BeginBlock implements the AppModule interface func (am AppModule) BeginBlock(ctx sdk.Context, req abci.RequestBeginBlock) { - // Check if there are any consumer chains that are due to be started - am.keeper.IteratePendingCreateProposal(ctx) - // Check if there are any consumer chains that are due to be stopped - am.keeper.IteratePendingStopProposal(ctx) + // Create clients to consumer chains that are due to be spawned via pending consumer addition proposals + am.keeper.BeginBlockInit(ctx) + // Stop and remove state for any consumer chains that are due to be stopped via pending consumer removal proposals + am.keeper.BeginBlockCCR(ctx) } // EndBlock implements the AppModule interface diff --git a/x/ccv/provider/module_test.go b/x/ccv/provider/module_test.go new file mode 100644 index 0000000000..c8ab78de94 --- /dev/null +++ b/x/ccv/provider/module_test.go @@ -0,0 +1,165 @@ +package provider_test + +import ( + "testing" + + capabilitytypes "github.com/cosmos/cosmos-sdk/x/capability/types" + host "github.com/cosmos/ibc-go/v3/modules/core/24-host" + testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + "github.com/cosmos/interchain-security/x/ccv/provider" + "github.com/cosmos/interchain-security/x/ccv/provider/types" + ccv "github.com/cosmos/interchain-security/x/ccv/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/golang/mock/gomock" +) + +// Tests the provider's InitGenesis implementation against the spec. +// See: https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/methods.md#ccv-pcf-initg1 +// Spec tag: [CCV-PCF-INITG.1] +// +// Note: Genesis validation for the provider is tested in TestValidateGenesisState +func TestInitGenesis(t *testing.T) { + + type testCase struct { + name string + // Whether port capability is already bound to the CCV provider module + isBound bool + // Provider's storage of consumer state to test against + consumerStates []types.ConsumerState + // Error returned from ClaimCapability during port binding, default: nil + errFromClaimCap error + // Whether method call should panic, default: false + expPanic bool + } + + tests := []testCase{ + { + name: "already bound port, no consumer states", + isBound: true, + consumerStates: []types.ConsumerState{}, + }, + { + name: "no bound port, no consumer states", + isBound: false, + consumerStates: []types.ConsumerState{}, + }, + { + name: "no bound port, multiple consumer states", + isBound: false, + consumerStates: []types.ConsumerState{ + { + ChainId: "chainId1", + ChannelId: "channelIdToChain1", + }, + { + ChainId: "chainId2", + ChannelId: "channelIdToChain2", + }, + { + ChainId: "chainId3", + ChannelId: "channelIdToChain3", + }, + }, + }, + { + name: "already bound port, one consumer state", + isBound: true, + consumerStates: []types.ConsumerState{ + { + ChainId: "chainId77", + ChannelId: "channelIdToChain77", + }, + }, + }, + { + name: "capability not owned, method should panic", + isBound: false, + consumerStates: []types.ConsumerState{ + { + ChainId: "chainId77", + ChannelId: "channelIdToChain77", + }, + }, + errFromClaimCap: capabilitytypes.ErrCapabilityNotOwned, + expPanic: true, + }, + } + + for _, tc := range tests { + // + // Setup + // + keeperParams := testkeeper.NewInMemKeeperParams(t) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + + appModule := provider.NewAppModule(&providerKeeper) + genState := types.NewGenesisState( + providerKeeper.GetValidatorSetUpdateId(ctx), + nil, + tc.consumerStates, + nil, + nil, + nil, + nil, types.DefaultParams(), + ) + + cdc := keeperParams.Cdc + jsonBytes := cdc.MustMarshalJSON(genState) + + // + // Assert mocked logic before method executes + // + orderedCalls := []*gomock.Call{ + mocks.MockScopedKeeper.EXPECT().GetCapability( + ctx, host.PortPath(ccv.ProviderPortID), + ).Return( + &capabilitytypes.Capability{}, + tc.isBound, // Capability is returned successfully if port capability is already bound to this module. + ), + } + + // If port capability is not already bound, port will be bound and capability claimed. + if !tc.isBound { + dummyCap := &capabilitytypes.Capability{} + + orderedCalls = append(orderedCalls, + mocks.MockPortKeeper.EXPECT().BindPort(ctx, ccv.ProviderPortID).Return(dummyCap), + mocks.MockScopedKeeper.EXPECT().ClaimCapability( + ctx, dummyCap, host.PortPath(ccv.ProviderPortID)).Return(tc.errFromClaimCap), + ) + } + + gomock.InOrder(orderedCalls...) + + // + // Execute method, then assert expected results + // + if tc.expPanic { + require.Panics(t, assert.PanicTestFunc(func() { + appModule.InitGenesis(ctx, cdc, jsonBytes) + }), tc.name) + continue // Nothing else to verify + } + + valUpdates := appModule.InitGenesis(ctx, cdc, jsonBytes) + + numStatesCounted := 0 + for _, state := range tc.consumerStates { + numStatesCounted += 1 + channelID, found := providerKeeper.GetChainToChannel(ctx, state.ChainId) + require.True(t, found) + require.Equal(t, state.ChannelId, channelID) + + chainID, found := providerKeeper.GetChannelToChain(ctx, state.ChannelId) + require.True(t, found) + require.Equal(t, state.ChainId, chainID) + } + require.Equal(t, len(tc.consumerStates), numStatesCounted) + + require.Empty(t, valUpdates, "InitGenesis should return no validator updates") + + ctrl.Finish() + } +} diff --git a/x/ccv/provider/proposal_handler.go b/x/ccv/provider/proposal_handler.go index fb8491ce3d..55c97a26c1 100644 --- a/x/ccv/provider/proposal_handler.go +++ b/x/ccv/provider/proposal_handler.go @@ -8,14 +8,14 @@ import ( "github.com/cosmos/interchain-security/x/ccv/provider/types" ) -// NewConsumerChainProposalHandler defines the CCV provider proposal handler +// NewConsumerChainProposalHandler defines the handler for consumer addition and consumer removal proposals. func NewConsumerChainProposalHandler(k keeper.Keeper) govtypes.Handler { return func(ctx sdk.Context, content govtypes.Content) error { switch c := content.(type) { - case *types.CreateConsumerChainProposal: - return k.CreateConsumerChainProposal(ctx, c) - case *types.StopConsumerChainProposal: - return k.StopConsumerChainProposal(ctx, c) + case *types.ConsumerAdditionProposal: + return k.HandleConsumerAdditionProposal(ctx, c) + case *types.ConsumerRemovalProposal: + return k.HandleConsumerRemovalProposal(ctx, c) default: return sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized ccv proposal content type: %T", c) } diff --git a/x/ccv/provider/proposal_handler_test.go b/x/ccv/provider/proposal_handler_test.go new file mode 100644 index 0000000000..92c806a1ed --- /dev/null +++ b/x/ccv/provider/proposal_handler_test.go @@ -0,0 +1,90 @@ +package provider_test + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + distributiontypes "github.com/cosmos/cosmos-sdk/x/distribution/types" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/require" + + clienttypes "github.com/cosmos/ibc-go/v3/modules/core/02-client/types" + + "testing" + "time" + + govtypes "github.com/cosmos/cosmos-sdk/x/gov/types" + testkeeper "github.com/cosmos/interchain-security/testutil/keeper" + "github.com/cosmos/interchain-security/x/ccv/provider" + "github.com/cosmos/interchain-security/x/ccv/provider/types" +) + +// TestConsumerChainProposalHandler tests the highest level handler for proposals concerning both +// creating and stopping consumer chains. +func TestConsumerChainProposalHandler(t *testing.T) { + + // Snapshot times asserted in tests + now := time.Now().UTC() + hourFromNow := now.Add(time.Hour).UTC() + + testCases := []struct { + name string + content govtypes.Content + blockTime time.Time + expValidConsumerAddition bool + expValidConsumerRemoval bool + }{ + { + name: "valid consumer addition proposal", + content: types.NewConsumerAdditionProposal( + "title", "description", "chainID", + clienttypes.NewHeight(2, 3), []byte("gen_hash"), []byte("bin_hash"), now), + blockTime: hourFromNow, // ctx blocktime is after proposal's spawn time + expValidConsumerAddition: true, + }, + { + name: "valid consumer removal proposal", + content: types.NewConsumerRemovalProposal( + "title", "description", "chainID", now), + blockTime: hourFromNow, + expValidConsumerRemoval: true, + }, + { + name: "nil proposal", + content: nil, + blockTime: hourFromNow, + }, + { + name: "unsupported proposal type", + content: distributiontypes.NewCommunityPoolSpendProposal( + "title", "desc", []byte{}, + sdk.NewCoins(sdk.NewCoin("communityfunds", sdk.NewInt(10)))), + }, + } + + for _, tc := range testCases { + + // Setup + keeperParams := testkeeper.NewInMemKeeperParams(t) + keeperParams.SetTemplateClientState(nil) + providerKeeper, ctx, ctrl, mocks := testkeeper.GetProviderKeeperAndCtx(t, keeperParams) + ctx = ctx.WithBlockTime(tc.blockTime) + + // Mock expectations depending on expected outcome + if tc.expValidConsumerAddition { + gomock.InOrder(testkeeper.GetMocksForCreateConsumerClient(ctx, &mocks, "chainID", clienttypes.NewHeight(2, 3))...) + } + if tc.expValidConsumerRemoval { + testkeeper.SetupForStoppingConsumerChain(t, ctx, &providerKeeper, mocks) + } + + // Execution + proposalHandler := provider.NewConsumerChainProposalHandler(providerKeeper) + err := proposalHandler(ctx, tc.content) + + if tc.expValidConsumerAddition || tc.expValidConsumerRemoval { + require.NoError(t, err) + } else { + require.Error(t, err) + } + ctrl.Finish() + } +} diff --git a/x/ccv/provider/types/codec.go b/x/ccv/provider/types/codec.go index 143cd391a0..25250f68c1 100644 --- a/x/ccv/provider/types/codec.go +++ b/x/ccv/provider/types/codec.go @@ -16,7 +16,7 @@ func RegisterLegacyAminoCodec(cdc *codec.LegacyAmino) { func RegisterInterfaces(registry codectypes.InterfaceRegistry) { registry.RegisterImplementations( (*govtypes.Content)(nil), - &CreateConsumerChainProposal{}, + &ConsumerAdditionProposal{}, ) } diff --git a/x/ccv/provider/types/consumer.go b/x/ccv/provider/types/consumer.go new file mode 100644 index 0000000000..4376678cc2 --- /dev/null +++ b/x/ccv/provider/types/consumer.go @@ -0,0 +1,30 @@ +package types + +import ( + consumertypes "github.com/cosmos/interchain-security/x/ccv/consumer/types" + ccv "github.com/cosmos/interchain-security/x/ccv/types" +) + +func NewConsumerStates( + chainID, + clientID, + channelID string, + initialHeight uint64, + lockUbdTimeout bool, + genesis consumertypes.GenesisState, + unbondingOpsIndexes []UnbondingOpIndex, + pendingValsetChanges []ccv.ValidatorSetChangePacketData, + slashDowntimeAck []string, +) ConsumerState { + return ConsumerState{ + ChainId: chainID, + ClientId: clientID, + ChannelId: channelID, + InitialHeight: initialHeight, + LockUnbondingOnTimeout: true, + UnbondingOpsIndex: unbondingOpsIndexes, + PendingValsetChanges: pendingValsetChanges, + ConsumerGenesis: genesis, + SlashDowntimeAck: slashDowntimeAck, + } +} diff --git a/x/ccv/provider/types/errors.go b/x/ccv/provider/types/errors.go index d438f0262c..decd79474b 100644 --- a/x/ccv/provider/types/errors.go +++ b/x/ccv/provider/types/errors.go @@ -6,8 +6,8 @@ import ( // Provider sentinel errors var ( - ErrInvalidCreateProposal = sdkerrors.Register(ModuleName, 1, "invalid create consumer chain proposal") - ErrInvalidStopProposal = sdkerrors.Register(ModuleName, 2, "invalid stop consumer chain proposal") - ErrUnknownConsumerChainId = sdkerrors.Register(ModuleName, 3, "no consumer chain with this chain id") - ErrUnknownConsumerChannelId = sdkerrors.Register(ModuleName, 4, "no consumer chain with this channel id") + ErrInvalidConsumerAdditionProposal = sdkerrors.Register(ModuleName, 1, "invalid consumer addition proposal") + ErrInvalidConsumerRemovalProp = sdkerrors.Register(ModuleName, 2, "invalid consumer removal proposal") + ErrUnknownConsumerChainId = sdkerrors.Register(ModuleName, 3, "no consumer chain with this chain id") + ErrUnknownConsumerChannelId = sdkerrors.Register(ModuleName, 4, "no consumer chain with this channel id") ) diff --git a/x/ccv/provider/types/genesis.go b/x/ccv/provider/types/genesis.go index 1cedfe0e0c..a3eab5c29a 100644 --- a/x/ccv/provider/types/genesis.go +++ b/x/ccv/provider/types/genesis.go @@ -5,12 +5,29 @@ import ( sdkerrors "github.com/cosmos/cosmos-sdk/types/errors" host "github.com/cosmos/ibc-go/v3/modules/core/24-host" + ccv "github.com/cosmos/interchain-security/x/ccv/types" + types "github.com/cosmos/interchain-security/x/ccv/types" ) -func NewGenesisState(consumerStates []ConsumerState, params Params) *GenesisState { +func NewGenesisState( + vscID uint64, + vscIdToHeights []ValsetUpdateIdToHeight, + consumerStates []ConsumerState, + unbondingOps []types.UnbondingOp, + matureUbdOps *ccv.MaturedUnbondingOps, + additionProposals []ConsumerAdditionProposal, + removalProposals []ConsumerRemovalProposal, + params Params, +) *GenesisState { return &GenesisState{ - ConsumerStates: consumerStates, - Params: params, + ValsetUpdateId: vscID, + ValsetUpdateIdToHeight: vscIdToHeights, + ConsumerStates: consumerStates, + UnbondingOps: unbondingOps, + MatureUnbondingOps: matureUbdOps, + ConsumerAdditionProposals: additionProposals, + ConsumerRemovalProposals: removalProposals, + Params: params, } } diff --git a/x/ccv/provider/types/genesis.pb.go b/x/ccv/provider/types/genesis.pb.go index 180e5788d9..9283d01754 100644 --- a/x/ccv/provider/types/genesis.pb.go +++ b/x/ccv/provider/types/genesis.pb.go @@ -5,7 +5,8 @@ package types import ( fmt "fmt" - _ "github.com/cosmos/interchain-security/x/ccv/types" + types1 "github.com/cosmos/interchain-security/x/ccv/consumer/types" + types "github.com/cosmos/interchain-security/x/ccv/types" _ "github.com/gogo/protobuf/gogoproto" proto "github.com/gogo/protobuf/proto" io "io" @@ -26,8 +27,22 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // GenesisState defines the CCV provider chain genesis state type GenesisState struct { - ConsumerStates []ConsumerState `protobuf:"bytes,1,rep,name=consumer_states,json=consumerStates,proto3" json:"consumer_states" yaml:"consumer_states"` - Params Params `protobuf:"bytes,2,opt,name=params,proto3" json:"params"` + // empty for a completely new chain + ValsetUpdateId uint64 `protobuf:"varint,1,opt,name=valset_update_id,json=valsetUpdateId,proto3" json:"valset_update_id,omitempty"` + // empty for a completely new chain + ConsumerStates []ConsumerState `protobuf:"bytes,2,rep,name=consumer_states,json=consumerStates,proto3" json:"consumer_states" yaml:"consumer_states"` + // UnbondingOps defines the consumer chains that are still unbonding + // empty for a completely new chain + UnbondingOps []types.UnbondingOp `protobuf:"bytes,3,rep,name=unbonding_ops,json=unbondingOps,proto3" json:"unbonding_ops"` + // empty for a completely new chain + MatureUnbondingOps *types.MaturedUnbondingOps `protobuf:"bytes,4,opt,name=mature_unbonding_ops,json=matureUnbondingOps,proto3" json:"mature_unbonding_ops,omitempty"` + // empty for a completely new chain + ValsetUpdateIdToHeight []ValsetUpdateIdToHeight `protobuf:"bytes,5,rep,name=valset_update_id_to_height,json=valsetUpdateIdToHeight,proto3" json:"valset_update_id_to_height"` + // empty for a completely new chain + ConsumerAdditionProposals []ConsumerAdditionProposal `protobuf:"bytes,6,rep,name=consumer_addition_proposals,json=consumerAdditionProposals,proto3" json:"consumer_addition_proposals"` + // empty for a completely new chain + ConsumerRemovalProposals []ConsumerRemovalProposal `protobuf:"bytes,7,rep,name=consumer_removal_proposals,json=consumerRemovalProposals,proto3" json:"consumer_removal_proposals"` + Params Params `protobuf:"bytes,8,opt,name=params,proto3" json:"params"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -63,6 +78,13 @@ func (m *GenesisState) XXX_DiscardUnknown() { var xxx_messageInfo_GenesisState proto.InternalMessageInfo +func (m *GenesisState) GetValsetUpdateId() uint64 { + if m != nil { + return m.ValsetUpdateId + } + return 0 +} + func (m *GenesisState) GetConsumerStates() []ConsumerState { if m != nil { return m.ConsumerStates @@ -70,6 +92,41 @@ func (m *GenesisState) GetConsumerStates() []ConsumerState { return nil } +func (m *GenesisState) GetUnbondingOps() []types.UnbondingOp { + if m != nil { + return m.UnbondingOps + } + return nil +} + +func (m *GenesisState) GetMatureUnbondingOps() *types.MaturedUnbondingOps { + if m != nil { + return m.MatureUnbondingOps + } + return nil +} + +func (m *GenesisState) GetValsetUpdateIdToHeight() []ValsetUpdateIdToHeight { + if m != nil { + return m.ValsetUpdateIdToHeight + } + return nil +} + +func (m *GenesisState) GetConsumerAdditionProposals() []ConsumerAdditionProposal { + if m != nil { + return m.ConsumerAdditionProposals + } + return nil +} + +func (m *GenesisState) GetConsumerRemovalProposals() []ConsumerRemovalProposal { + if m != nil { + return m.ConsumerRemovalProposals + } + return nil +} + func (m *GenesisState) GetParams() Params { if m != nil { return m.Params @@ -77,10 +134,25 @@ func (m *GenesisState) GetParams() Params { return Params{} } -// ConsumerState defines the state that the provider chain stores for each consumer chain +// consumer chain type ConsumerState struct { + // The provider's identifier for this consumer chain. ChainId string `protobuf:"bytes,1,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` + // The provider's channel identifier to this consumer chain. ChannelId string `protobuf:"bytes,2,opt,name=channel_id,json=channelId,proto3" json:"channel_id,omitempty"` + // ClientID defines the IBC client ID for the consumer chain + ClientId string `protobuf:"bytes,3,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` + InitialHeight uint64 `protobuf:"varint,4,opt,name=initial_height,json=initialHeight,proto3" json:"initial_height,omitempty"` + // LockUnbondingOnTimeout defines whether the unbonding funds should be released for this + // chain in case of a IBC channel timeout + LockUnbondingOnTimeout bool `protobuf:"varint,5,opt,name=lock_unbonding_on_timeout,json=lockUnbondingOnTimeout,proto3" json:"lock_unbonding_on_timeout,omitempty"` + // ConsumerGenesis defines the initial consumer chain genesis states + ConsumerGenesis types1.GenesisState `protobuf:"bytes,6,opt,name=consumer_genesis,json=consumerGenesis,proto3" json:"consumer_genesis"` + // PendingValsetChanges defines the pending validator set changes for the consumer chain + PendingValsetChanges []types.ValidatorSetChangePacketData `protobuf:"bytes,7,rep,name=pending_valset_changes,json=pendingValsetChanges,proto3" json:"pending_valset_changes"` + SlashDowntimeAck []string `protobuf:"bytes,8,rep,name=slash_downtime_ack,json=slashDowntimeAck,proto3" json:"slash_downtime_ack,omitempty"` + // UnbondingOpsIndex defines the unbonding operations on the consumer chain + UnbondingOpsIndex []UnbondingOpIndex `protobuf:"bytes,9,rep,name=unbonding_ops_index,json=unbondingOpsIndex,proto3" json:"unbonding_ops_index"` } func (m *ConsumerState) Reset() { *m = ConsumerState{} } @@ -130,9 +202,168 @@ func (m *ConsumerState) GetChannelId() string { return "" } +func (m *ConsumerState) GetClientId() string { + if m != nil { + return m.ClientId + } + return "" +} + +func (m *ConsumerState) GetInitialHeight() uint64 { + if m != nil { + return m.InitialHeight + } + return 0 +} + +func (m *ConsumerState) GetLockUnbondingOnTimeout() bool { + if m != nil { + return m.LockUnbondingOnTimeout + } + return false +} + +func (m *ConsumerState) GetConsumerGenesis() types1.GenesisState { + if m != nil { + return m.ConsumerGenesis + } + return types1.GenesisState{} +} + +func (m *ConsumerState) GetPendingValsetChanges() []types.ValidatorSetChangePacketData { + if m != nil { + return m.PendingValsetChanges + } + return nil +} + +func (m *ConsumerState) GetSlashDowntimeAck() []string { + if m != nil { + return m.SlashDowntimeAck + } + return nil +} + +func (m *ConsumerState) GetUnbondingOpsIndex() []UnbondingOpIndex { + if m != nil { + return m.UnbondingOpsIndex + } + return nil +} + +// UnbondingOpIndex defines the genesis information for each unbonding operations index +// referenced by chain id and valset udpate id +type UnbondingOpIndex struct { + ValsetUpdateId uint64 `protobuf:"varint,1,opt,name=valset_update_id,json=valsetUpdateId,proto3" json:"valset_update_id,omitempty"` + UnbondingOpIndex []uint64 `protobuf:"varint,2,rep,packed,name=unbonding_op_index,json=unbondingOpIndex,proto3" json:"unbonding_op_index,omitempty"` +} + +func (m *UnbondingOpIndex) Reset() { *m = UnbondingOpIndex{} } +func (m *UnbondingOpIndex) String() string { return proto.CompactTextString(m) } +func (*UnbondingOpIndex) ProtoMessage() {} +func (*UnbondingOpIndex) Descriptor() ([]byte, []int) { + return fileDescriptor_48411d9c7900d48e, []int{2} +} +func (m *UnbondingOpIndex) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *UnbondingOpIndex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_UnbondingOpIndex.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *UnbondingOpIndex) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnbondingOpIndex.Merge(m, src) +} +func (m *UnbondingOpIndex) XXX_Size() int { + return m.Size() +} +func (m *UnbondingOpIndex) XXX_DiscardUnknown() { + xxx_messageInfo_UnbondingOpIndex.DiscardUnknown(m) +} + +var xxx_messageInfo_UnbondingOpIndex proto.InternalMessageInfo + +func (m *UnbondingOpIndex) GetValsetUpdateId() uint64 { + if m != nil { + return m.ValsetUpdateId + } + return 0 +} + +func (m *UnbondingOpIndex) GetUnbondingOpIndex() []uint64 { + if m != nil { + return m.UnbondingOpIndex + } + return nil +} + +// ValsetUpdateIdToHeight defines the genesis information for the mapping +// of each valset udpate id to a block height +type ValsetUpdateIdToHeight struct { + ValsetUpdateId uint64 `protobuf:"varint,1,opt,name=valset_update_id,json=valsetUpdateId,proto3" json:"valset_update_id,omitempty"` + Height uint64 `protobuf:"varint,2,opt,name=height,proto3" json:"height,omitempty"` +} + +func (m *ValsetUpdateIdToHeight) Reset() { *m = ValsetUpdateIdToHeight{} } +func (m *ValsetUpdateIdToHeight) String() string { return proto.CompactTextString(m) } +func (*ValsetUpdateIdToHeight) ProtoMessage() {} +func (*ValsetUpdateIdToHeight) Descriptor() ([]byte, []int) { + return fileDescriptor_48411d9c7900d48e, []int{3} +} +func (m *ValsetUpdateIdToHeight) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *ValsetUpdateIdToHeight) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_ValsetUpdateIdToHeight.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *ValsetUpdateIdToHeight) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValsetUpdateIdToHeight.Merge(m, src) +} +func (m *ValsetUpdateIdToHeight) XXX_Size() int { + return m.Size() +} +func (m *ValsetUpdateIdToHeight) XXX_DiscardUnknown() { + xxx_messageInfo_ValsetUpdateIdToHeight.DiscardUnknown(m) +} + +var xxx_messageInfo_ValsetUpdateIdToHeight proto.InternalMessageInfo + +func (m *ValsetUpdateIdToHeight) GetValsetUpdateId() uint64 { + if m != nil { + return m.ValsetUpdateId + } + return 0 +} + +func (m *ValsetUpdateIdToHeight) GetHeight() uint64 { + if m != nil { + return m.Height + } + return 0 +} + func init() { proto.RegisterType((*GenesisState)(nil), "interchain_security.ccv.provider.v1.GenesisState") proto.RegisterType((*ConsumerState)(nil), "interchain_security.ccv.provider.v1.ConsumerState") + proto.RegisterType((*UnbondingOpIndex)(nil), "interchain_security.ccv.provider.v1.UnbondingOpIndex") + proto.RegisterType((*ValsetUpdateIdToHeight)(nil), "interchain_security.ccv.provider.v1.ValsetUpdateIdToHeight") } func init() { @@ -140,29 +371,57 @@ func init() { } var fileDescriptor_48411d9c7900d48e = []byte{ - // 340 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xcf, 0x4a, 0xf3, 0x40, - 0x14, 0xc5, 0x33, 0xfd, 0x3e, 0xaa, 0x9d, 0xfa, 0x07, 0x82, 0x48, 0x2d, 0x38, 0x2d, 0xd1, 0x45, - 0x41, 0x9c, 0x21, 0x71, 0xd7, 0x65, 0x5d, 0x48, 0x76, 0x52, 0x5d, 0xb9, 0x29, 0xe9, 0x64, 0x48, - 0x07, 0x9a, 0x4c, 0x98, 0x99, 0x06, 0x8b, 0x2f, 0xe1, 0x63, 0x75, 0xd9, 0x95, 0xb8, 0x2a, 0xd2, - 0xbe, 0x81, 0x4f, 0x20, 0x99, 0xc6, 0x6a, 0x45, 0x21, 0xbb, 0x99, 0x7b, 0xef, 0xef, 0x9c, 0x03, - 0x07, 0xba, 0x3c, 0xd1, 0x4c, 0xd2, 0x51, 0xc0, 0x93, 0x81, 0x62, 0x74, 0x22, 0xb9, 0x9e, 0x12, - 0x4a, 0x33, 0x92, 0x4a, 0x91, 0xf1, 0x90, 0x49, 0x92, 0xb9, 0x24, 0x62, 0x09, 0x53, 0x5c, 0xe1, - 0x54, 0x0a, 0x2d, 0xec, 0xb3, 0x5f, 0x10, 0x4c, 0x69, 0x86, 0x3f, 0x11, 0x9c, 0xb9, 0xcd, 0xa3, - 0x48, 0x44, 0xc2, 0xdc, 0x93, 0xfc, 0xb5, 0x46, 0x9b, 0xe7, 0x7f, 0xb9, 0x65, 0x2e, 0x29, 0x14, - 0xb4, 0x68, 0x7a, 0x65, 0x32, 0x6d, 0xcc, 0x0c, 0xe3, 0xbc, 0x00, 0xb8, 0x77, 0xb3, 0x8e, 0x79, - 0xa7, 0x03, 0xcd, 0xec, 0x27, 0x78, 0x48, 0x45, 0xa2, 0x26, 0x31, 0x93, 0x03, 0x95, 0x4f, 0x54, - 0x03, 0xb4, 0xff, 0x75, 0xea, 0x9e, 0x87, 0x4b, 0xe4, 0xc7, 0xd7, 0x05, 0x6b, 0xc4, 0x7a, 0x68, - 0xb6, 0x68, 0x59, 0xef, 0x8b, 0xd6, 0xf1, 0x34, 0x88, 0xc7, 0x5d, 0xe7, 0x87, 0xb0, 0xd3, 0x3f, - 0xa0, 0xdf, 0xcf, 0x95, 0xed, 0xc3, 0x6a, 0x1a, 0xc8, 0x20, 0x56, 0x8d, 0x4a, 0x1b, 0x74, 0xea, - 0xde, 0x45, 0x29, 0xcf, 0x5b, 0x83, 0xf4, 0xfe, 0xe7, 0x66, 0xfd, 0x42, 0xc0, 0xf1, 0xe1, 0xfe, - 0x56, 0x16, 0xfb, 0x04, 0xee, 0xae, 0x75, 0x78, 0xd8, 0x00, 0x6d, 0xd0, 0xa9, 0xf5, 0x77, 0xcc, - 0xdf, 0x0f, 0xed, 0x53, 0x08, 0xe9, 0x28, 0x48, 0x12, 0x36, 0xce, 0x97, 0x15, 0xb3, 0xac, 0x15, - 0x13, 0x3f, 0xec, 0xdd, 0xcf, 0x96, 0x08, 0xcc, 0x97, 0x08, 0xbc, 0x2d, 0x11, 0x78, 0x5e, 0x21, - 0x6b, 0xbe, 0x42, 0xd6, 0xeb, 0x0a, 0x59, 0x0f, 0xdd, 0x88, 0xeb, 0xd1, 0x64, 0x88, 0xa9, 0x88, - 0x09, 0x15, 0x2a, 0x16, 0x8a, 0x7c, 0x05, 0xbe, 0xdc, 0x74, 0xf0, 0xb8, 0xdd, 0x82, 0x9e, 0xa6, - 0x4c, 0x0d, 0xab, 0xa6, 0x80, 0xab, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x41, 0x40, 0x06, 0x36, - 0x4a, 0x02, 0x00, 0x00, + // 794 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x55, 0x5d, 0x6f, 0xf3, 0x34, + 0x14, 0x6e, 0xd6, 0xd2, 0xb7, 0xf5, 0xfb, 0x6e, 0x14, 0x33, 0x55, 0x59, 0x5f, 0xd1, 0x55, 0x05, + 0x44, 0x25, 0x46, 0xa2, 0x14, 0x21, 0xc1, 0x80, 0x8b, 0x7d, 0x48, 0xd0, 0x0b, 0xc4, 0x94, 0x7d, + 0x5c, 0xec, 0x26, 0x72, 0x1d, 0xab, 0x35, 0x4d, 0xec, 0x28, 0x76, 0xc2, 0x26, 0x84, 0x84, 0xc4, + 0x1f, 0xe0, 0x0f, 0x21, 0x6e, 0x77, 0xb9, 0x4b, 0xae, 0x26, 0xb4, 0xfd, 0x03, 0x7e, 0x01, 0x8a, + 0xe3, 0x76, 0x69, 0xd5, 0x8e, 0xf6, 0x2e, 0x39, 0x8f, 0x9f, 0xe7, 0x3c, 0x3e, 0x3e, 0x3e, 0x06, + 0x0e, 0x65, 0x92, 0xc4, 0x78, 0x8c, 0x28, 0xf3, 0x04, 0xc1, 0x49, 0x4c, 0xe5, 0xad, 0x8d, 0x71, + 0x6a, 0x47, 0x31, 0x4f, 0xa9, 0x4f, 0x62, 0x3b, 0x75, 0xec, 0x11, 0x61, 0x44, 0x50, 0x61, 0x45, + 0x31, 0x97, 0x1c, 0x7e, 0xb8, 0x84, 0x62, 0x61, 0x9c, 0x5a, 0x53, 0x8a, 0x95, 0x3a, 0xad, 0xdd, + 0x11, 0x1f, 0x71, 0xb5, 0xde, 0xce, 0xbe, 0x72, 0x6a, 0xeb, 0xa3, 0x55, 0xd9, 0x52, 0xc7, 0xd6, + 0x0a, 0x92, 0xb7, 0xfa, 0xeb, 0x78, 0x9a, 0x25, 0xfb, 0x1f, 0x0e, 0xe6, 0x4c, 0x24, 0x61, 0xce, + 0x99, 0x7e, 0x6b, 0x8e, 0xb3, 0x0e, 0x67, 0x6e, 0xef, 0xdd, 0xbf, 0xaa, 0xe0, 0xcd, 0x77, 0x79, + 0xe4, 0x5c, 0x22, 0x49, 0x60, 0x0f, 0x34, 0x52, 0x14, 0x08, 0x22, 0xbd, 0x24, 0xf2, 0x91, 0x24, + 0x1e, 0xf5, 0x4d, 0xa3, 0x63, 0xf4, 0x2a, 0xee, 0x4e, 0x1e, 0xbf, 0x54, 0xe1, 0x81, 0x0f, 0x7f, + 0x01, 0xef, 0x4e, 0x75, 0x3d, 0x91, 0x71, 0x85, 0xb9, 0xd5, 0x29, 0xf7, 0x5e, 0xf7, 0xfb, 0xd6, + 0x1a, 0x05, 0xb5, 0x4e, 0x34, 0x57, 0xa5, 0x3d, 0x6e, 0xdf, 0x3d, 0xec, 0x97, 0xfe, 0x7d, 0xd8, + 0x6f, 0xde, 0xa2, 0x30, 0x38, 0xec, 0x2e, 0x08, 0x77, 0xdd, 0x1d, 0x5c, 0x5c, 0x2e, 0xa0, 0x0b, + 0xb6, 0x13, 0x36, 0xe4, 0xcc, 0xa7, 0x6c, 0xe4, 0xf1, 0x48, 0x98, 0x65, 0x95, 0xfa, 0x93, 0x95, + 0xa9, 0x53, 0xc7, 0xba, 0x9c, 0x12, 0x7e, 0x8c, 0x8e, 0x2b, 0x59, 0x3e, 0xf7, 0x4d, 0xf2, 0x1c, + 0x12, 0x10, 0x81, 0xdd, 0x10, 0xc9, 0x24, 0x26, 0xde, 0xbc, 0x74, 0xa5, 0x63, 0xf4, 0x5e, 0xf7, + 0xed, 0x97, 0xa4, 0x7f, 0x50, 0x3c, 0xbf, 0x90, 0x41, 0xb8, 0x30, 0x17, 0x2b, 0xc6, 0xe0, 0xaf, + 0xa0, 0xb5, 0x58, 0x5d, 0x4f, 0x72, 0x6f, 0x4c, 0xe8, 0x68, 0x2c, 0xcd, 0x77, 0xd4, 0x1e, 0xbe, + 0x5e, 0xab, 0x7c, 0x57, 0x73, 0x87, 0x71, 0xc1, 0xbf, 0x57, 0x12, 0x7a, 0x5f, 0xcd, 0x74, 0x29, + 0x0a, 0x7f, 0x37, 0xc0, 0xdb, 0x59, 0x69, 0x91, 0xef, 0x53, 0x49, 0x39, 0xf3, 0xa2, 0x98, 0x47, + 0x5c, 0xa0, 0x40, 0x98, 0x55, 0x65, 0xe0, 0xdb, 0x8d, 0xce, 0xef, 0x48, 0xcb, 0x9c, 0x69, 0x15, + 0x6d, 0x61, 0x0f, 0xaf, 0xc0, 0x05, 0xfc, 0xcd, 0x00, 0xad, 0x99, 0x8b, 0x98, 0x84, 0x3c, 0x45, + 0x41, 0xc1, 0xc4, 0x2b, 0x65, 0xe2, 0x9b, 0x8d, 0x4c, 0xb8, 0xb9, 0xca, 0x82, 0x07, 0x13, 0x2f, + 0x87, 0x05, 0x1c, 0x80, 0x6a, 0x84, 0x62, 0x14, 0x0a, 0xb3, 0xa6, 0x0e, 0xf7, 0xd3, 0xb5, 0xb2, + 0x9d, 0x29, 0x8a, 0x16, 0xd7, 0x02, 0xdd, 0x3f, 0x2b, 0x60, 0x7b, 0xae, 0x97, 0xe1, 0x1e, 0xa8, + 0xe5, 0x42, 0xfa, 0xea, 0xd4, 0xdd, 0x57, 0xea, 0x7f, 0xe0, 0xc3, 0x0f, 0x00, 0xc0, 0x63, 0xc4, + 0x18, 0x09, 0x32, 0x70, 0x4b, 0x81, 0x75, 0x1d, 0x19, 0xf8, 0xf0, 0x2d, 0xa8, 0xe3, 0x80, 0x12, + 0x26, 0x33, 0xb4, 0xac, 0xd0, 0x5a, 0x1e, 0x18, 0xf8, 0xf0, 0x63, 0xb0, 0x43, 0x19, 0x95, 0x14, + 0x05, 0xd3, 0x7e, 0xa9, 0xa8, 0x7b, 0xb9, 0xad, 0xa3, 0xfa, 0x8c, 0xbf, 0x02, 0x7b, 0x01, 0xc7, + 0x93, 0x62, 0x0f, 0x33, 0x4f, 0xd2, 0x90, 0xf0, 0x24, 0xeb, 0x30, 0xa3, 0x57, 0x73, 0x9b, 0xd9, + 0x82, 0xe7, 0xbe, 0x64, 0x17, 0x39, 0x0a, 0x87, 0xa0, 0x31, 0x3b, 0x17, 0x3d, 0x26, 0xcc, 0xaa, + 0xaa, 0x8f, 0xb3, 0xb2, 0x3e, 0xb3, 0x11, 0x94, 0x3a, 0x56, 0x71, 0x90, 0xe8, 0x2a, 0xcd, 0x46, + 0x84, 0xc6, 0xa0, 0x04, 0xcd, 0x88, 0xe4, 0xbe, 0xf4, 0x4d, 0xc8, 0xb6, 0x3f, 0x22, 0xd3, 0x73, + 0xff, 0xf2, 0xa5, 0x6b, 0x76, 0x85, 0x02, 0xea, 0x23, 0xc9, 0xe3, 0x73, 0x22, 0x4f, 0x14, 0xed, + 0x0c, 0xe1, 0x09, 0x91, 0xa7, 0x48, 0x22, 0x9d, 0x70, 0x57, 0xab, 0xe7, 0xf7, 0x23, 0x5f, 0x24, + 0xe0, 0x01, 0x80, 0x22, 0x40, 0x62, 0xec, 0xf9, 0xfc, 0x67, 0x96, 0x15, 0xc3, 0x43, 0x78, 0x62, + 0xd6, 0x3a, 0xe5, 0x5e, 0xdd, 0x6d, 0x28, 0xe4, 0x54, 0x03, 0x47, 0x78, 0x02, 0x27, 0xe0, 0xfd, + 0xb9, 0x09, 0xe0, 0x51, 0xe6, 0x93, 0x1b, 0xb3, 0xae, 0x0c, 0x7e, 0xb1, 0x56, 0xab, 0x14, 0x6e, + 0xfd, 0x20, 0x23, 0x6b, 0x77, 0xef, 0x15, 0x07, 0x8e, 0x02, 0xba, 0x3f, 0x81, 0xc6, 0xe2, 0xe2, + 0x0d, 0x86, 0xf0, 0x01, 0x80, 0x45, 0xab, 0xda, 0x69, 0x36, 0x87, 0x2b, 0x6e, 0x23, 0x59, 0xd0, + 0xed, 0x5e, 0x83, 0xe6, 0xf2, 0xb9, 0xb1, 0x41, 0xc6, 0x26, 0xa8, 0xea, 0xf6, 0xdb, 0x52, 0xb8, + 0xfe, 0x3b, 0xbe, 0xb8, 0x7b, 0x6c, 0x1b, 0xf7, 0x8f, 0x6d, 0xe3, 0x9f, 0xc7, 0xb6, 0xf1, 0xc7, + 0x53, 0xbb, 0x74, 0xff, 0xd4, 0x2e, 0xfd, 0xfd, 0xd4, 0x2e, 0x5d, 0x1f, 0x8e, 0xa8, 0x1c, 0x27, + 0x43, 0x0b, 0xf3, 0xd0, 0xc6, 0x5c, 0x84, 0x5c, 0xd8, 0xcf, 0x25, 0xfc, 0x6c, 0xf6, 0x50, 0xdd, + 0xcc, 0x3f, 0x89, 0xf2, 0x36, 0x22, 0x62, 0x58, 0x55, 0xcf, 0xd4, 0xe7, 0xff, 0x05, 0x00, 0x00, + 0xff, 0xff, 0xb9, 0x12, 0x17, 0x90, 0xd7, 0x07, 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -194,7 +453,75 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintGenesis(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x12 + dAtA[i] = 0x42 + if len(m.ConsumerRemovalProposals) > 0 { + for iNdEx := len(m.ConsumerRemovalProposals) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.ConsumerRemovalProposals[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x3a + } + } + if len(m.ConsumerAdditionProposals) > 0 { + for iNdEx := len(m.ConsumerAdditionProposals) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.ConsumerAdditionProposals[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x32 + } + } + if len(m.ValsetUpdateIdToHeight) > 0 { + for iNdEx := len(m.ValsetUpdateIdToHeight) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.ValsetUpdateIdToHeight[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x2a + } + } + if m.MatureUnbondingOps != nil { + { + size, err := m.MatureUnbondingOps.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x22 + } + if len(m.UnbondingOps) > 0 { + for iNdEx := len(m.UnbondingOps) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.UnbondingOps[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + } if len(m.ConsumerStates) > 0 { for iNdEx := len(m.ConsumerStates) - 1; iNdEx >= 0; iNdEx-- { { @@ -206,9 +533,14 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintGenesis(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0xa + dAtA[i] = 0x12 } } + if m.ValsetUpdateId != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.ValsetUpdateId)) + i-- + dAtA[i] = 0x8 + } return len(dAtA) - i, nil } @@ -232,6 +564,75 @@ func (m *ConsumerState) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if len(m.UnbondingOpsIndex) > 0 { + for iNdEx := len(m.UnbondingOpsIndex) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.UnbondingOpsIndex[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x4a + } + } + if len(m.SlashDowntimeAck) > 0 { + for iNdEx := len(m.SlashDowntimeAck) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.SlashDowntimeAck[iNdEx]) + copy(dAtA[i:], m.SlashDowntimeAck[iNdEx]) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.SlashDowntimeAck[iNdEx]))) + i-- + dAtA[i] = 0x42 + } + } + if len(m.PendingValsetChanges) > 0 { + for iNdEx := len(m.PendingValsetChanges) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.PendingValsetChanges[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x3a + } + } + { + size, err := m.ConsumerGenesis.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x32 + if m.LockUnbondingOnTimeout { + i-- + if m.LockUnbondingOnTimeout { + dAtA[i] = 1 + } else { + dAtA[i] = 0 + } + i-- + dAtA[i] = 0x28 + } + if m.InitialHeight != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.InitialHeight)) + i-- + dAtA[i] = 0x20 + } + if len(m.ClientId) > 0 { + i -= len(m.ClientId) + copy(dAtA[i:], m.ClientId) + i = encodeVarintGenesis(dAtA, i, uint64(len(m.ClientId))) + i-- + dAtA[i] = 0x1a + } if len(m.ChannelId) > 0 { i -= len(m.ChannelId) copy(dAtA[i:], m.ChannelId) @@ -249,41 +650,151 @@ func (m *ConsumerState) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { - offset -= sovGenesis(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ +func (m *UnbondingOpIndex) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err } - dAtA[offset] = uint8(v) - return base + return dAtA[:n], nil } -func (m *GenesisState) Size() (n int) { - if m == nil { - return 0 - } + +func (m *UnbondingOpIndex) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *UnbondingOpIndex) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i var l int _ = l - if len(m.ConsumerStates) > 0 { - for _, e := range m.ConsumerStates { - l = e.Size() - n += 1 + l + sovGenesis(uint64(l)) + if len(m.UnbondingOpIndex) > 0 { + dAtA5 := make([]byte, len(m.UnbondingOpIndex)*10) + var j4 int + for _, num := range m.UnbondingOpIndex { + for num >= 1<<7 { + dAtA5[j4] = uint8(uint64(num)&0x7f | 0x80) + num >>= 7 + j4++ + } + dAtA5[j4] = uint8(num) + j4++ } + i -= j4 + copy(dAtA[i:], dAtA5[:j4]) + i = encodeVarintGenesis(dAtA, i, uint64(j4)) + i-- + dAtA[i] = 0x12 } - l = m.Params.Size() - n += 1 + l + sovGenesis(uint64(l)) - return n + if m.ValsetUpdateId != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.ValsetUpdateId)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil } -func (m *ConsumerState) Size() (n int) { - if m == nil { - return 0 +func (m *ValsetUpdateIdToHeight) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err } - var l int - _ = l - l = len(m.ChainId) + return dAtA[:n], nil +} + +func (m *ValsetUpdateIdToHeight) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *ValsetUpdateIdToHeight) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Height != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.Height)) + i-- + dAtA[i] = 0x10 + } + if m.ValsetUpdateId != 0 { + i = encodeVarintGenesis(dAtA, i, uint64(m.ValsetUpdateId)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintGenesis(dAtA []byte, offset int, v uint64) int { + offset -= sovGenesis(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *GenesisState) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ValsetUpdateId != 0 { + n += 1 + sovGenesis(uint64(m.ValsetUpdateId)) + } + if len(m.ConsumerStates) > 0 { + for _, e := range m.ConsumerStates { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + if len(m.UnbondingOps) > 0 { + for _, e := range m.UnbondingOps { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + if m.MatureUnbondingOps != nil { + l = m.MatureUnbondingOps.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + if len(m.ValsetUpdateIdToHeight) > 0 { + for _, e := range m.ValsetUpdateIdToHeight { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + if len(m.ConsumerAdditionProposals) > 0 { + for _, e := range m.ConsumerAdditionProposals { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + if len(m.ConsumerRemovalProposals) > 0 { + for _, e := range m.ConsumerRemovalProposals { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + l = m.Params.Size() + n += 1 + l + sovGenesis(uint64(l)) + return n +} + +func (m *ConsumerState) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.ChainId) if l > 0 { n += 1 + l + sovGenesis(uint64(l)) } @@ -291,6 +802,70 @@ func (m *ConsumerState) Size() (n int) { if l > 0 { n += 1 + l + sovGenesis(uint64(l)) } + l = len(m.ClientId) + if l > 0 { + n += 1 + l + sovGenesis(uint64(l)) + } + if m.InitialHeight != 0 { + n += 1 + sovGenesis(uint64(m.InitialHeight)) + } + if m.LockUnbondingOnTimeout { + n += 2 + } + l = m.ConsumerGenesis.Size() + n += 1 + l + sovGenesis(uint64(l)) + if len(m.PendingValsetChanges) > 0 { + for _, e := range m.PendingValsetChanges { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + if len(m.SlashDowntimeAck) > 0 { + for _, s := range m.SlashDowntimeAck { + l = len(s) + n += 1 + l + sovGenesis(uint64(l)) + } + } + if len(m.UnbondingOpsIndex) > 0 { + for _, e := range m.UnbondingOpsIndex { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } + return n +} + +func (m *UnbondingOpIndex) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ValsetUpdateId != 0 { + n += 1 + sovGenesis(uint64(m.ValsetUpdateId)) + } + if len(m.UnbondingOpIndex) > 0 { + l = 0 + for _, e := range m.UnbondingOpIndex { + l += sovGenesis(uint64(e)) + } + n += 1 + sovGenesis(uint64(l)) + l + } + return n +} + +func (m *ValsetUpdateIdToHeight) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ValsetUpdateId != 0 { + n += 1 + sovGenesis(uint64(m.ValsetUpdateId)) + } + if m.Height != 0 { + n += 1 + sovGenesis(uint64(m.Height)) + } return n } @@ -330,6 +905,25 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ValsetUpdateId", wireType) + } + m.ValsetUpdateId = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.ValsetUpdateId |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ConsumerStates", wireType) } @@ -363,9 +957,9 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 2: + case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Params", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field UnbondingOps", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -392,65 +986,86 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if err := m.Params.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + m.UnbondingOps = append(m.UnbondingOps, types.UnbondingOp{}) + if err := m.UnbondingOps[len(m.UnbondingOps)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skipGenesis(dAtA[iNdEx:]) - if err != nil { - return err + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field MatureUnbondingOps", wireType) } - if (skippy < 0) || (iNdEx+skippy) < 0 { + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { return ErrInvalidLengthGenesis } - if (iNdEx + skippy) > l { + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { return io.ErrUnexpectedEOF } - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *ConsumerState) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowGenesis + if m.MatureUnbondingOps == nil { + m.MatureUnbondingOps = &types.MaturedUnbondingOps{} } - if iNdEx >= l { + if err := m.MatureUnbondingOps.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ValsetUpdateIdToHeight", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { return io.ErrUnexpectedEOF } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break + m.ValsetUpdateIdToHeight = append(m.ValsetUpdateIdToHeight, ValsetUpdateIdToHeight{}) + if err := m.ValsetUpdateIdToHeight[len(m.ValsetUpdateIdToHeight)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: ConsumerState: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: ConsumerState: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: + iNdEx = postIndex + case 6: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ChainId", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ConsumerAdditionProposals", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenesis @@ -460,29 +1075,31 @@ func (m *ConsumerState) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenesis } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenesis } if postIndex > l { return io.ErrUnexpectedEOF } - m.ChainId = string(dAtA[iNdEx:postIndex]) + m.ConsumerAdditionProposals = append(m.ConsumerAdditionProposals, ConsumerAdditionProposal{}) + if err := m.ConsumerAdditionProposals[len(m.ConsumerAdditionProposals)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex - case 2: + case 7: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ChannelId", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ConsumerRemovalProposals", wireType) } - var stringLen uint64 + var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowGenesis @@ -492,24 +1109,610 @@ func (m *ConsumerState) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + msglen |= int(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + if msglen < 0 { return ErrInvalidLengthGenesis } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthGenesis } if postIndex > l { return io.ErrUnexpectedEOF } - m.ChannelId = string(dAtA[iNdEx:postIndex]) + m.ConsumerRemovalProposals = append(m.ConsumerRemovalProposals, ConsumerRemovalProposal{}) + if err := m.ConsumerRemovalProposals[len(m.ConsumerRemovalProposals)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Params", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.Params.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ConsumerState) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ConsumerState: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ConsumerState: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ChainId", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ChainId = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ChannelId", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ChannelId = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ClientId", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ClientId = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 4: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field InitialHeight", wireType) + } + m.InitialHeight = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.InitialHeight |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 5: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field LockUnbondingOnTimeout", wireType) + } + var v int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.LockUnbondingOnTimeout = bool(v != 0) + case 6: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ConsumerGenesis", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := m.ConsumerGenesis.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 7: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field PendingValsetChanges", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.PendingValsetChanges = append(m.PendingValsetChanges, types.ValidatorSetChangePacketData{}) + if err := m.PendingValsetChanges[len(m.PendingValsetChanges)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field SlashDowntimeAck", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.SlashDowntimeAck = append(m.SlashDowntimeAck, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex + case 9: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field UnbondingOpsIndex", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.UnbondingOpsIndex = append(m.UnbondingOpsIndex, UnbondingOpIndex{}) + if err := m.UnbondingOpsIndex[len(m.UnbondingOpsIndex)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *UnbondingOpIndex) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: UnbondingOpIndex: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: UnbondingOpIndex: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ValsetUpdateId", wireType) + } + m.ValsetUpdateId = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.ValsetUpdateId |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType == 0 { + var v uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.UnbondingOpIndex = append(m.UnbondingOpIndex, v) + } else if wireType == 2 { + var packedLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + packedLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if packedLen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + packedLen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var elementCount int + var count int + for _, integer := range dAtA[iNdEx:postIndex] { + if integer < 128 { + count++ + } + } + elementCount = count + if elementCount != 0 && len(m.UnbondingOpIndex) == 0 { + m.UnbondingOpIndex = make([]uint64, 0, elementCount) + } + for iNdEx < postIndex { + var v uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.UnbondingOpIndex = append(m.UnbondingOpIndex, v) + } + } else { + return fmt.Errorf("proto: wrong wireType = %d for field UnbondingOpIndex", wireType) + } + default: + iNdEx = preIndex + skippy, err := skipGenesis(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthGenesis + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ValsetUpdateIdToHeight) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ValsetUpdateIdToHeight: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ValsetUpdateIdToHeight: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ValsetUpdateId", wireType) + } + m.ValsetUpdateId = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.ValsetUpdateId |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Height", wireType) + } + m.Height = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Height |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipGenesis(dAtA[iNdEx:]) diff --git a/x/ccv/provider/types/genesis_test.go b/x/ccv/provider/types/genesis_test.go index d6cf64926d..e1a7556bdc 100644 --- a/x/ccv/provider/types/genesis_test.go +++ b/x/ccv/provider/types/genesis_test.go @@ -12,6 +12,7 @@ import ( "github.com/stretchr/testify/require" ) +// Tests validation of consumer states and params within a provider genesis state func TestValidateGenesisState(t *testing.T) { testCases := []struct { name string @@ -21,7 +22,13 @@ func TestValidateGenesisState(t *testing.T) { { "valid initializing provider genesis with nil updates", types.NewGenesisState( - []types.ConsumerState{{"chainid-1", "channelid"}}, + 0, + nil, + []types.ConsumerState{{ChainId: "chainid-1", ChannelId: "channelid"}}, + nil, + nil, + nil, + nil, types.DefaultParams(), ), true, @@ -29,7 +36,13 @@ func TestValidateGenesisState(t *testing.T) { { "valid validating provider genesis with nil updates", types.NewGenesisState( - []types.ConsumerState{{"chainid-1", "channelid"}}, + 0, + nil, + []types.ConsumerState{{ChainId: "chainid-1", ChannelId: "channelid"}}, + nil, + nil, + nil, + nil, types.DefaultParams(), ), true, @@ -37,12 +50,18 @@ func TestValidateGenesisState(t *testing.T) { { "valid multiple provider genesis with multiple consumer chains", types.NewGenesisState( + 0, + nil, []types.ConsumerState{ - {"chainid-1", "channelid"}, - {"chainid-2", "channelid2"}, - {"chainid-3", "channelid3"}, - {"chainid-4", "channelid4"}, + {ChainId: "chainid-1", ChannelId: "channelid1"}, + {ChainId: "chainid-2", ChannelId: "channelid2"}, + {ChainId: "chainid-3", ChannelId: "channelid3"}, + {ChainId: "chainid-4", ChannelId: "channelid4"}, }, + nil, + nil, + nil, + nil, types.DefaultParams(), ), true, @@ -50,7 +69,13 @@ func TestValidateGenesisState(t *testing.T) { { "valid provider genesis with custom params", types.NewGenesisState( - []types.ConsumerState{{"chainid-1", "channelid"}}, + 0, + nil, + []types.ConsumerState{{ChainId: "chainid-1", ChannelId: "channelid"}}, + nil, + nil, + nil, + nil, types.NewParams(ibctmtypes.NewClientState("", ibctmtypes.DefaultTrustLevel, 0, 0, time.Second*40, clienttypes.Height{}, commitmenttypes.GetSDKSpecs(), []string{"ibc", "upgradedIBCState"}, true, false)), ), @@ -59,7 +84,13 @@ func TestValidateGenesisState(t *testing.T) { { "invalid params", types.NewGenesisState( - []types.ConsumerState{{"chainid-1", "channelid"}}, + 0, + nil, + []types.ConsumerState{{ChainId: "chainid-1", ChannelId: "channelid"}}, + nil, + nil, + nil, + nil, types.NewParams(ibctmtypes.NewClientState("", ibctmtypes.DefaultTrustLevel, 0, 0, 0, clienttypes.Height{}, nil, []string{"ibc", "upgradedIBCState"}, true, false)), ), @@ -68,7 +99,13 @@ func TestValidateGenesisState(t *testing.T) { { "invalid chain id", types.NewGenesisState( - []types.ConsumerState{{" ", "channelid"}}, + 0, + nil, + []types.ConsumerState{{ChainId: "", ChannelId: "channelid"}}, + nil, + nil, + nil, + nil, types.DefaultParams(), ), false, @@ -76,7 +113,13 @@ func TestValidateGenesisState(t *testing.T) { { "invalid channel id", types.NewGenesisState( - []types.ConsumerState{{"chainid", "invalidchannel{}"}}, + 0, + nil, + []types.ConsumerState{{ChainId: "chainid", ChannelId: "ivnalidChannel{}"}}, + nil, + nil, + nil, + nil, types.DefaultParams(), ), false, diff --git a/x/ccv/provider/types/keys.go b/x/ccv/provider/types/keys.go index 68a0e24d65..338921f33c 100644 --- a/x/ccv/provider/types/keys.go +++ b/x/ccv/provider/types/keys.go @@ -52,13 +52,13 @@ const ( // ChainToClientBytePrefix is the byte prefix for storing the consumer chainID for a given consumer clientid. ChainToClientBytePrefix - // PendingCreateProposalBytePrefix is the byte prefix for storing the pending identified consumer chain client before the spawn time occurs. + // PendingCAPBytePrefix is the byte prefix for storing pending consumer addition proposals before the spawn time occurs. // The key includes the BigEndian timestamp to allow for efficient chronological iteration - PendingCreateProposalBytePrefix + PendingCAPBytePrefix - // PendingStopProposalBytePrefix is the byte prefix for storing the pending identified consumer chain before the stop time occurs. + // PendingCRPBytePrefix is the byte prefix for storing pending consumer removal proposals before the stop time occurs. // The key includes the BigEndian timestamp to allow for efficient chronological iteration - PendingStopProposalBytePrefix + PendingCRPBytePrefix // UnbondingOpBytePrefix is the byte prefix that stores a record of all the ids of consumer chains that // need to unbond before a given delegation can unbond on this chain. @@ -123,15 +123,15 @@ func ChainToClientKey(chainID string) []byte { return append([]byte{ChainToClientBytePrefix}, []byte(chainID)...) } -// PendingCreateProposalKey returns the key under which a pending identified client is stored -func PendingCreateProposalKey(timestamp time.Time, chainID string) []byte { +// PendingCAPKey returns the key under which a pending consumer addition proposal is stored +func PendingCAPKey(timestamp time.Time, chainID string) []byte { timeBz := sdk.FormatTimeBytes(timestamp) timeBzL := len(timeBz) - prefixL := len([]byte{PendingCreateProposalBytePrefix}) + prefixL := len([]byte{PendingCAPBytePrefix}) bz := make([]byte, prefixL+8+timeBzL+len(chainID)) // copy the prefix - copy(bz[:prefixL], []byte{PendingCreateProposalBytePrefix}) + copy(bz[:prefixL], []byte{PendingCAPBytePrefix}) // copy the time length copy(bz[prefixL:prefixL+8], sdk.Uint64ToBigEndian(uint64(timeBzL))) // copy the time bytes @@ -141,9 +141,10 @@ func PendingCreateProposalKey(timestamp time.Time, chainID string) []byte { return bz } -// ParsePendingCreateProposalKey returns the time and chain ID for a pending client key or an error if unparseable -func ParsePendingCreateProposalKey(bz []byte) (time.Time, string, error) { - expectedPrefix := []byte{PendingCreateProposalBytePrefix} +// ParsePendingCAPKey returns the time and chain ID for a pending consumer addition proposal key +// or an error if unparsable +func ParsePendingCAPKey(bz []byte) (time.Time, string, error) { + expectedPrefix := []byte{PendingCAPBytePrefix} prefixL := len(expectedPrefix) if prefix := bz[:prefixL]; !bytes.Equal(prefix, expectedPrefix) { return time.Time{}, "", fmt.Errorf("invalid prefix; expected: %X, got: %X", expectedPrefix, prefix) @@ -159,15 +160,15 @@ func ParsePendingCreateProposalKey(bz []byte) (time.Time, string, error) { return timestamp, chainID, nil } -// PendingStopProposalKey returns the key under which pending consumer chain stop proposals are stored -func PendingStopProposalKey(timestamp time.Time, chainID string) []byte { +// PendingCRPKey returns the key under which pending consumer removal proposals are stored +func PendingCRPKey(timestamp time.Time, chainID string) []byte { timeBz := sdk.FormatTimeBytes(timestamp) timeBzL := len(timeBz) - prefixL := len([]byte{PendingStopProposalBytePrefix}) + prefixL := len([]byte{PendingCRPBytePrefix}) bz := make([]byte, prefixL+8+timeBzL+len(chainID)) // copy the prefix - copy(bz[:prefixL], []byte{PendingStopProposalBytePrefix}) + copy(bz[:prefixL], []byte{PendingCRPBytePrefix}) // copy the time length copy(bz[prefixL:prefixL+8], sdk.Uint64ToBigEndian(uint64(timeBzL))) // copy the time bytes @@ -177,9 +178,9 @@ func PendingStopProposalKey(timestamp time.Time, chainID string) []byte { return bz } -// ParsePendingStopProposalKey returns the time and chain ID for a pending consumer chain stop proposal key or an error if unparseable -func ParsePendingStopProposalKey(bz []byte) (time.Time, string, error) { - expectedPrefix := []byte{PendingStopProposalBytePrefix} +// ParsePendingCRPKey returns the time and chain ID for a pending consumer removal proposal key or an error if unparseable +func ParsePendingCRPKey(bz []byte) (time.Time, string, error) { + expectedPrefix := []byte{PendingCRPBytePrefix} prefixL := len(expectedPrefix) if prefix := bz[:prefixL]; !bytes.Equal(prefix, expectedPrefix) { return time.Time{}, "", fmt.Errorf("invalid prefix; expected: %X, got: %X", expectedPrefix, prefix) diff --git a/x/ccv/provider/types/keys_test.go b/x/ccv/provider/types/keys_test.go index 6306f4075e..9faca25676 100644 --- a/x/ccv/provider/types/keys_test.go +++ b/x/ccv/provider/types/keys_test.go @@ -43,8 +43,8 @@ func getSingleByteKeys() [][]byte { keys[i], i = []byte{ChainToChannelBytePrefix}, i+1 keys[i], i = []byte{ChannelToChainBytePrefix}, i+1 keys[i], i = []byte{ChainToClientBytePrefix}, i+1 - keys[i], i = []byte{PendingCreateProposalBytePrefix}, i+1 - keys[i], i = []byte{PendingStopProposalBytePrefix}, i+1 + keys[i], i = []byte{PendingCAPBytePrefix}, i+1 + keys[i], i = []byte{PendingCRPBytePrefix}, i+1 keys[i], i = []byte{UnbondingOpBytePrefix}, i+1 keys[i], i = []byte{UnbondingOpIndexBytePrefix}, i+1 keys[i], i = []byte{ValsetUpdateBlockHeightBytePrefix}, i+1 @@ -57,7 +57,8 @@ func getSingleByteKeys() [][]byte { return keys } -func TestPendingClientKeyAndParse(t *testing.T) { +// Tests the construction and parsing of keys for storing pending consumer addition proposals +func TestPendingCAPKeyAndParse(t *testing.T) { tests := []struct { timestamp time.Time chainID string @@ -69,19 +70,20 @@ func TestPendingClientKeyAndParse(t *testing.T) { } for _, test := range tests { - key := PendingCreateProposalKey(test.timestamp, test.chainID) + key := PendingCAPKey(test.timestamp, test.chainID) require.NotEmpty(t, key) // Expected bytes = prefix + time length + time bytes + length of chainID expectedBytes := 1 + 8 + len(sdk.FormatTimeBytes(time.Time{})) + len(test.chainID) require.Equal(t, expectedBytes, len(key)) - parsedTime, parsedID, err := ParsePendingCreateProposalKey(key) + parsedTime, parsedID, err := ParsePendingCAPKey(key) require.Equal(t, test.timestamp.UTC(), parsedTime.UTC()) require.Equal(t, test.chainID, parsedID) require.NoError(t, err) } } -func TestPendingStopProposalKeyAndParse(t *testing.T) { +// Tests the construction and parsing of keys for storing pending consumer removal proposals +func TestPendingCRPKeyAndParse(t *testing.T) { tests := []struct { timestamp time.Time chainID string @@ -93,12 +95,12 @@ func TestPendingStopProposalKeyAndParse(t *testing.T) { } for _, test := range tests { - key := PendingStopProposalKey(test.timestamp, test.chainID) + key := PendingCRPKey(test.timestamp, test.chainID) require.NotEmpty(t, key) // Expected bytes = prefix + time length + time bytes + length of chainID expectedBytes := 1 + 8 + len(sdk.FormatTimeBytes(time.Time{})) + len(test.chainID) require.Equal(t, expectedBytes, len(key)) - parsedTime, parsedID, err := ParsePendingStopProposalKey(key) + parsedTime, parsedID, err := ParsePendingCRPKey(key) require.Equal(t, test.timestamp.UTC(), parsedTime.UTC()) require.Equal(t, test.chainID, parsedID) require.NoError(t, err) diff --git a/x/ccv/provider/types/proposal.go b/x/ccv/provider/types/proposal.go index 88c593df1b..7462115b9a 100644 --- a/x/ccv/provider/types/proposal.go +++ b/x/ccv/provider/types/proposal.go @@ -11,21 +11,21 @@ import ( ) const ( - ProposalTypeCreateConsumerChain = "CreateConsumerChain" - ProposalTypeStopConsumerChain = "StopConsumerChain" + ProposalTypeConsumerAddition = "ConsumerAddition" + ProposalTypeConsumerRemoval = "ConsumerRemoval" ) var ( - _ govtypes.Content = &CreateConsumerChainProposal{} + _ govtypes.Content = &ConsumerAdditionProposal{} ) func init() { - govtypes.RegisterProposalType(ProposalTypeCreateConsumerChain) + govtypes.RegisterProposalType(ProposalTypeConsumerAddition) } -// NewCreateConsumerChainProposal creates a new create consumerchain proposal. -func NewCreateConsumerChainProposal(title, description, chainID string, initialHeight clienttypes.Height, genesisHash, binaryHash []byte, spawnTime time.Time) govtypes.Content { - return &CreateConsumerChainProposal{ +// NewConsumerAdditionProposal creates a new consumer addition proposal. +func NewConsumerAdditionProposal(title, description, chainID string, initialHeight clienttypes.Height, genesisHash, binaryHash []byte, spawnTime time.Time) govtypes.Content { + return &ConsumerAdditionProposal{ Title: title, Description: description, ChainId: chainID, @@ -36,49 +36,49 @@ func NewCreateConsumerChainProposal(title, description, chainID string, initialH } } -// GetTitle returns the title of a create consumerchain proposal. -func (cccp *CreateConsumerChainProposal) GetTitle() string { return cccp.Title } +// GetTitle returns the title of a consumer addition proposal. +func (cccp *ConsumerAdditionProposal) GetTitle() string { return cccp.Title } -// GetDescription returns the description of a create consumerchain proposal. -func (cccp *CreateConsumerChainProposal) GetDescription() string { return cccp.Description } +// GetDescription returns the description of a consumer addition proposal. +func (cccp *ConsumerAdditionProposal) GetDescription() string { return cccp.Description } -// ProposalRoute returns the routing key of a create consumerchain proposal. -func (cccp *CreateConsumerChainProposal) ProposalRoute() string { return RouterKey } +// ProposalRoute returns the routing key of a consumer addition proposal. +func (cccp *ConsumerAdditionProposal) ProposalRoute() string { return RouterKey } -// ProposalType returns the type of a create consumerchain proposal. -func (cccp *CreateConsumerChainProposal) ProposalType() string { - return ProposalTypeCreateConsumerChain +// ProposalType returns the type of a consumer addition proposal. +func (cccp *ConsumerAdditionProposal) ProposalType() string { + return ProposalTypeConsumerAddition } // ValidateBasic runs basic stateless validity checks -func (cccp *CreateConsumerChainProposal) ValidateBasic() error { +func (cccp *ConsumerAdditionProposal) ValidateBasic() error { if err := govtypes.ValidateAbstract(cccp); err != nil { return err } if strings.TrimSpace(cccp.ChainId) == "" { - return sdkerrors.Wrap(ErrInvalidCreateProposal, "consumer chain id must not be blank") + return sdkerrors.Wrap(ErrInvalidConsumerAdditionProposal, "consumer chain id must not be blank") } if cccp.InitialHeight.IsZero() { - return sdkerrors.Wrap(ErrInvalidCreateProposal, "initial height cannot be zero") + return sdkerrors.Wrap(ErrInvalidConsumerAdditionProposal, "initial height cannot be zero") } if len(cccp.GenesisHash) == 0 { - return sdkerrors.Wrap(ErrInvalidCreateProposal, "genesis hash cannot be empty") + return sdkerrors.Wrap(ErrInvalidConsumerAdditionProposal, "genesis hash cannot be empty") } if len(cccp.BinaryHash) == 0 { - return sdkerrors.Wrap(ErrInvalidCreateProposal, "binary hash cannot be empty") + return sdkerrors.Wrap(ErrInvalidConsumerAdditionProposal, "binary hash cannot be empty") } if cccp.SpawnTime.IsZero() { - return sdkerrors.Wrap(ErrInvalidCreateProposal, "spawn time cannot be zero") + return sdkerrors.Wrap(ErrInvalidConsumerAdditionProposal, "spawn time cannot be zero") } return nil } -// String returns the string representation of the CreateConsumerChainProposal. -func (cccp *CreateConsumerChainProposal) String() string { +// String returns the string representation of the ConsumerAdditionProposal. +func (cccp *ConsumerAdditionProposal) String() string { return fmt.Sprintf(`CreateConsumerChain Proposal Title: %s Description: %s @@ -89,34 +89,34 @@ func (cccp *CreateConsumerChainProposal) String() string { SpawnTime: %s`, cccp.Title, cccp.Description, cccp.ChainId, cccp.InitialHeight, cccp.GenesisHash, cccp.BinaryHash, cccp.SpawnTime) } -// NewStopConsumerChainProposal creates a new stop consumer chain proposal. -func NewStopConsumerChainProposal(title, description, chainID string, stopTime time.Time) (govtypes.Content, error) { - return &StopConsumerChainProposal{ +// NewConsumerRemovalProposal creates a new consumer removal proposal. +func NewConsumerRemovalProposal(title, description, chainID string, stopTime time.Time) govtypes.Content { + return &ConsumerRemovalProposal{ Title: title, Description: description, ChainId: chainID, StopTime: stopTime, - }, nil + } } -// ProposalRoute returns the routing key of a stop consumer chain proposal. -func (sccp *StopConsumerChainProposal) ProposalRoute() string { return RouterKey } +// ProposalRoute returns the routing key of a consumer removal proposal. +func (sccp *ConsumerRemovalProposal) ProposalRoute() string { return RouterKey } -// ProposalType returns the type of a stop consumer chain proposal. -func (sccp *StopConsumerChainProposal) ProposalType() string { return ProposalTypeStopConsumerChain } +// ProposalType returns the type of a consumer removal proposal. +func (sccp *ConsumerRemovalProposal) ProposalType() string { return ProposalTypeConsumerRemoval } // ValidateBasic runs basic stateless validity checks -func (sccp *StopConsumerChainProposal) ValidateBasic() error { +func (sccp *ConsumerRemovalProposal) ValidateBasic() error { if err := govtypes.ValidateAbstract(sccp); err != nil { return err } if strings.TrimSpace(sccp.ChainId) == "" { - return sdkerrors.Wrap(ErrInvalidStopProposal, "consumer chain id must not be blank") + return sdkerrors.Wrap(ErrInvalidConsumerRemovalProp, "consumer chain id must not be blank") } if sccp.StopTime.IsZero() { - return sdkerrors.Wrap(ErrInvalidStopProposal, "spawn time cannot be zero") + return sdkerrors.Wrap(ErrInvalidConsumerRemovalProp, "spawn time cannot be zero") } return nil } diff --git a/x/ccv/provider/types/proposal_test.go b/x/ccv/provider/types/proposal_test.go index 9bc193bd84..18094d4375 100644 --- a/x/ccv/provider/types/proposal_test.go +++ b/x/ccv/provider/types/proposal_test.go @@ -27,22 +27,22 @@ func TestValidateBasic(t *testing.T) { }{ { "success", - types.NewCreateConsumerChainProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()), + types.NewConsumerAdditionProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()), true, }, { "fails validate abstract - empty title", - types.NewCreateConsumerChainProposal(" ", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()), + types.NewConsumerAdditionProposal(" ", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()), false, }, { "chainID is empty", - types.NewCreateConsumerChainProposal("title", "description", " ", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()), + types.NewConsumerAdditionProposal("title", "description", " ", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Now()), false, }, { "initial height is zero", - &types.CreateConsumerChainProposal{ + &types.ConsumerAdditionProposal{ Title: "title", Description: "description", ChainId: "chainID", @@ -55,17 +55,17 @@ func TestValidateBasic(t *testing.T) { }, { "genesis hash is empty", - types.NewCreateConsumerChainProposal("title", "description", "chainID", initialHeight, []byte(""), []byte("bin_hash"), time.Now()), + types.NewConsumerAdditionProposal("title", "description", "chainID", initialHeight, []byte(""), []byte("bin_hash"), time.Now()), false, }, { "binary hash is empty", - types.NewCreateConsumerChainProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte(""), time.Now()), + types.NewConsumerAdditionProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte(""), time.Now()), false, }, { "time is zero", - types.NewCreateConsumerChainProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Time{}), + types.NewConsumerAdditionProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), time.Time{}), false, }, } @@ -81,10 +81,10 @@ func TestValidateBasic(t *testing.T) { } } -func TestMarshalCreateConsumerChainProposal(t *testing.T) { - content := types.NewCreateConsumerChainProposal("title", "description", "chainID", clienttypes.NewHeight(0, 1), []byte("gen_hash"), []byte("bin_hash"), time.Now().UTC()) +func TestMarshalConsumerAdditionProposal(t *testing.T) { + content := types.NewConsumerAdditionProposal("title", "description", "chainID", clienttypes.NewHeight(0, 1), []byte("gen_hash"), []byte("bin_hash"), time.Now().UTC()) - cccp, ok := content.(*types.CreateConsumerChainProposal) + cccp, ok := content.(*types.ConsumerAdditionProposal) require.True(t, ok) // create codec @@ -100,17 +100,17 @@ func TestMarshalCreateConsumerChainProposal(t *testing.T) { require.NoError(t, err) // unmarshal proposal - newCccp := &types.CreateConsumerChainProposal{} + newCccp := &types.ConsumerAdditionProposal{} err = cdc.UnmarshalJSON(bz, newCccp) require.NoError(t, err) require.True(t, proto.Equal(cccp, newCccp), "unmarshalled proposal does not equal original proposal") } -func TestCreateConsumerChainProposalString(t *testing.T) { +func TestConsumerAdditionProposalString(t *testing.T) { initialHeight := clienttypes.NewHeight(2, 3) spawnTime := time.Now() - proposal := types.NewCreateConsumerChainProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), spawnTime) + proposal := types.NewConsumerAdditionProposal("title", "description", "chainID", initialHeight, []byte("gen_hash"), []byte("bin_hash"), spawnTime) expect := fmt.Sprintf(`CreateConsumerChain Proposal Title: title @@ -121,5 +121,5 @@ func TestCreateConsumerChainProposalString(t *testing.T) { BinaryHash: %s SpawnTime: %s`, initialHeight, []byte("gen_hash"), []byte("bin_hash"), spawnTime) - require.Equal(t, expect, proposal.String(), "string method for CreateConsumerChainProposal returned unexpected string") + require.Equal(t, expect, proposal.String(), "string method for ConsumerAdditionProposal returned unexpected string") } diff --git a/x/ccv/provider/types/provider.pb.go b/x/ccv/provider/types/provider.pb.go index 87d5250359..df9f5228ab 100644 --- a/x/ccv/provider/types/provider.pb.go +++ b/x/ccv/provider/types/provider.pb.go @@ -29,10 +29,10 @@ var _ = time.Kitchen // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package -// CreateConsumerChainProposal is a governance proposal on the provider chain to spawn a new consumer chain. +// ConsumerAdditionProposal is a governance proposal on the provider chain to spawn a new consumer chain. // If it passes, then all validators on the provider chain are expected to validate the consumer chain at spawn time // or get slashed. It is recommended that spawn time occurs after the proposal end time. -type CreateConsumerChainProposal struct { +type ConsumerAdditionProposal struct { // the title of the proposal Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // the description of the proposal @@ -56,17 +56,17 @@ type CreateConsumerChainProposal struct { LockUnbondingOnTimeout bool `protobuf:"varint,8,opt,name=lock_unbonding_on_timeout,json=lockUnbondingOnTimeout,proto3" json:"lock_unbonding_on_timeout,omitempty"` } -func (m *CreateConsumerChainProposal) Reset() { *m = CreateConsumerChainProposal{} } -func (*CreateConsumerChainProposal) ProtoMessage() {} -func (*CreateConsumerChainProposal) Descriptor() ([]byte, []int) { +func (m *ConsumerAdditionProposal) Reset() { *m = ConsumerAdditionProposal{} } +func (*ConsumerAdditionProposal) ProtoMessage() {} +func (*ConsumerAdditionProposal) Descriptor() ([]byte, []int) { return fileDescriptor_f22ec409a72b7b72, []int{0} } -func (m *CreateConsumerChainProposal) XXX_Unmarshal(b []byte) error { +func (m *ConsumerAdditionProposal) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } -func (m *CreateConsumerChainProposal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { +func (m *ConsumerAdditionProposal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { - return xxx_messageInfo_CreateConsumerChainProposal.Marshal(b, m, deterministic) + return xxx_messageInfo_ConsumerAdditionProposal.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) @@ -76,22 +76,22 @@ func (m *CreateConsumerChainProposal) XXX_Marshal(b []byte, deterministic bool) return b[:n], nil } } -func (m *CreateConsumerChainProposal) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateConsumerChainProposal.Merge(m, src) +func (m *ConsumerAdditionProposal) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConsumerAdditionProposal.Merge(m, src) } -func (m *CreateConsumerChainProposal) XXX_Size() int { +func (m *ConsumerAdditionProposal) XXX_Size() int { return m.Size() } -func (m *CreateConsumerChainProposal) XXX_DiscardUnknown() { - xxx_messageInfo_CreateConsumerChainProposal.DiscardUnknown(m) +func (m *ConsumerAdditionProposal) XXX_DiscardUnknown() { + xxx_messageInfo_ConsumerAdditionProposal.DiscardUnknown(m) } -var xxx_messageInfo_CreateConsumerChainProposal proto.InternalMessageInfo +var xxx_messageInfo_ConsumerAdditionProposal proto.InternalMessageInfo -// StopConsumerProposal is a governance proposal on the provider chain to stop a consumer chain. +// ConsumerRemovalProposal is a governance proposal on the provider chain to remove (and stop) a consumer chain. // If it passes, all the consumer chain's state is removed from the provider chain. The outstanding unbonding // operation funds are released if the LockUnbondingOnTimeout parameter is set to false for the consumer chain ID. -type StopConsumerChainProposal struct { +type ConsumerRemovalProposal struct { // the title of the proposal Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` // the description of the proposal @@ -102,18 +102,18 @@ type StopConsumerChainProposal struct { StopTime time.Time `protobuf:"bytes,4,opt,name=stop_time,json=stopTime,proto3,stdtime" json:"stop_time"` } -func (m *StopConsumerChainProposal) Reset() { *m = StopConsumerChainProposal{} } -func (m *StopConsumerChainProposal) String() string { return proto.CompactTextString(m) } -func (*StopConsumerChainProposal) ProtoMessage() {} -func (*StopConsumerChainProposal) Descriptor() ([]byte, []int) { +func (m *ConsumerRemovalProposal) Reset() { *m = ConsumerRemovalProposal{} } +func (m *ConsumerRemovalProposal) String() string { return proto.CompactTextString(m) } +func (*ConsumerRemovalProposal) ProtoMessage() {} +func (*ConsumerRemovalProposal) Descriptor() ([]byte, []int) { return fileDescriptor_f22ec409a72b7b72, []int{1} } -func (m *StopConsumerChainProposal) XXX_Unmarshal(b []byte) error { +func (m *ConsumerRemovalProposal) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) } -func (m *StopConsumerChainProposal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { +func (m *ConsumerRemovalProposal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { if deterministic { - return xxx_messageInfo_StopConsumerChainProposal.Marshal(b, m, deterministic) + return xxx_messageInfo_ConsumerRemovalProposal.Marshal(b, m, deterministic) } else { b = b[:cap(b)] n, err := m.MarshalToSizedBuffer(b) @@ -123,40 +123,40 @@ func (m *StopConsumerChainProposal) XXX_Marshal(b []byte, deterministic bool) ([ return b[:n], nil } } -func (m *StopConsumerChainProposal) XXX_Merge(src proto.Message) { - xxx_messageInfo_StopConsumerChainProposal.Merge(m, src) +func (m *ConsumerRemovalProposal) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConsumerRemovalProposal.Merge(m, src) } -func (m *StopConsumerChainProposal) XXX_Size() int { +func (m *ConsumerRemovalProposal) XXX_Size() int { return m.Size() } -func (m *StopConsumerChainProposal) XXX_DiscardUnknown() { - xxx_messageInfo_StopConsumerChainProposal.DiscardUnknown(m) +func (m *ConsumerRemovalProposal) XXX_DiscardUnknown() { + xxx_messageInfo_ConsumerRemovalProposal.DiscardUnknown(m) } -var xxx_messageInfo_StopConsumerChainProposal proto.InternalMessageInfo +var xxx_messageInfo_ConsumerRemovalProposal proto.InternalMessageInfo -func (m *StopConsumerChainProposal) GetTitle() string { +func (m *ConsumerRemovalProposal) GetTitle() string { if m != nil { return m.Title } return "" } -func (m *StopConsumerChainProposal) GetDescription() string { +func (m *ConsumerRemovalProposal) GetDescription() string { if m != nil { return m.Description } return "" } -func (m *StopConsumerChainProposal) GetChainId() string { +func (m *ConsumerRemovalProposal) GetChainId() string { if m != nil { return m.ChainId } return "" } -func (m *StopConsumerChainProposal) GetStopTime() time.Time { +func (m *ConsumerRemovalProposal) GetStopTime() time.Time { if m != nil { return m.StopTime } @@ -260,11 +260,58 @@ func (m *HandshakeMetadata) GetVersion() string { return "" } +// SlashAcks contains addesses of consumer chain validators +// successfully slashed on the provider chain +type SlashAcks struct { + Addresses []string `protobuf:"bytes,1,rep,name=addresses,proto3" json:"addresses,omitempty"` +} + +func (m *SlashAcks) Reset() { *m = SlashAcks{} } +func (m *SlashAcks) String() string { return proto.CompactTextString(m) } +func (*SlashAcks) ProtoMessage() {} +func (*SlashAcks) Descriptor() ([]byte, []int) { + return fileDescriptor_f22ec409a72b7b72, []int{4} +} +func (m *SlashAcks) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *SlashAcks) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_SlashAcks.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *SlashAcks) XXX_Merge(src proto.Message) { + xxx_messageInfo_SlashAcks.Merge(m, src) +} +func (m *SlashAcks) XXX_Size() int { + return m.Size() +} +func (m *SlashAcks) XXX_DiscardUnknown() { + xxx_messageInfo_SlashAcks.DiscardUnknown(m) +} + +var xxx_messageInfo_SlashAcks proto.InternalMessageInfo + +func (m *SlashAcks) GetAddresses() []string { + if m != nil { + return m.Addresses + } + return nil +} + func init() { - proto.RegisterType((*CreateConsumerChainProposal)(nil), "interchain_security.ccv.provider.v1.CreateConsumerChainProposal") - proto.RegisterType((*StopConsumerChainProposal)(nil), "interchain_security.ccv.provider.v1.StopConsumerChainProposal") + proto.RegisterType((*ConsumerAdditionProposal)(nil), "interchain_security.ccv.provider.v1.ConsumerAdditionProposal") + proto.RegisterType((*ConsumerRemovalProposal)(nil), "interchain_security.ccv.provider.v1.ConsumerRemovalProposal") proto.RegisterType((*Params)(nil), "interchain_security.ccv.provider.v1.Params") proto.RegisterType((*HandshakeMetadata)(nil), "interchain_security.ccv.provider.v1.HandshakeMetadata") + proto.RegisterType((*SlashAcks)(nil), "interchain_security.ccv.provider.v1.SlashAcks") } func init() { @@ -272,49 +319,50 @@ func init() { } var fileDescriptor_f22ec409a72b7b72 = []byte{ - // 610 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x54, 0xbd, 0x6e, 0xd4, 0x40, - 0x10, 0x3e, 0x93, 0xbf, 0xcb, 0x5e, 0x08, 0xc2, 0x44, 0x91, 0x13, 0xa4, 0xbb, 0xe3, 0x68, 0x4e, - 0x42, 0xd8, 0xba, 0xa4, 0x22, 0x5d, 0x72, 0x12, 0x84, 0x02, 0x11, 0x5d, 0x42, 0x43, 0x81, 0xb5, - 0x5e, 0x4f, 0xec, 0x55, 0xec, 0x1d, 0x6b, 0x77, 0x6d, 0xc8, 0x13, 0x40, 0x99, 0x92, 0x32, 0xaf, - 0xc0, 0x5b, 0xa4, 0x4c, 0x49, 0x05, 0x28, 0x79, 0x11, 0xe4, 0x5d, 0x3b, 0x09, 0x12, 0x0d, 0x0d, - 0xdd, 0xcc, 0x37, 0xdf, 0x67, 0xcf, 0xcc, 0xb7, 0xbb, 0x64, 0x8b, 0x0b, 0x0d, 0x92, 0xa5, 0x94, - 0x8b, 0x50, 0x01, 0x2b, 0x25, 0xd7, 0xa7, 0x01, 0x63, 0x55, 0x50, 0x48, 0xac, 0x78, 0x0c, 0x32, - 0xa8, 0x26, 0x37, 0xb1, 0x5f, 0x48, 0xd4, 0xe8, 0x3e, 0xfd, 0x8b, 0xc6, 0x67, 0xac, 0xf2, 0x6f, - 0x78, 0xd5, 0x64, 0x73, 0x2d, 0xc1, 0x04, 0x0d, 0x3f, 0xa8, 0x23, 0x2b, 0xdd, 0x1c, 0x24, 0x88, - 0x49, 0x06, 0x81, 0xc9, 0xa2, 0xf2, 0x38, 0xd0, 0x3c, 0x07, 0xa5, 0x69, 0x5e, 0xb4, 0x04, 0x1e, - 0xb1, 0x80, 0xa1, 0x84, 0x80, 0x65, 0x1c, 0x84, 0xae, 0x7f, 0x6f, 0xa3, 0x86, 0x10, 0xd4, 0x84, - 0x8c, 0x27, 0xa9, 0xb6, 0xb0, 0x0a, 0x34, 0x88, 0x18, 0x64, 0xce, 0x2d, 0xf9, 0x36, 0xb3, 0x82, - 0xd1, 0xe7, 0x39, 0xf2, 0x78, 0x2a, 0x81, 0x6a, 0x98, 0xa2, 0x50, 0x65, 0x0e, 0x72, 0x5a, 0x77, - 0x7e, 0x20, 0xb1, 0x40, 0x45, 0x33, 0x77, 0x8d, 0x2c, 0x68, 0xae, 0x33, 0xf0, 0x9c, 0xa1, 0x33, - 0x5e, 0x9e, 0xd9, 0xc4, 0x1d, 0x92, 0x5e, 0x0c, 0x8a, 0x49, 0x5e, 0x68, 0x8e, 0xc2, 0xbb, 0x67, - 0x6a, 0x77, 0x21, 0x77, 0x83, 0x74, 0xed, 0x0a, 0x78, 0xec, 0xcd, 0x99, 0xf2, 0x92, 0xc9, 0x5f, - 0xc7, 0xee, 0x2b, 0xb2, 0xca, 0x05, 0xd7, 0x9c, 0x66, 0x61, 0x0a, 0x75, 0xab, 0xde, 0xfc, 0xd0, - 0x19, 0xf7, 0xb6, 0x36, 0x7d, 0x1e, 0x31, 0xbf, 0x9e, 0xce, 0x6f, 0x66, 0xaa, 0x26, 0xfe, 0xbe, - 0x61, 0xec, 0xcd, 0x5f, 0xfc, 0x18, 0x74, 0x66, 0xf7, 0x1b, 0x9d, 0x05, 0xdd, 0x27, 0x64, 0x25, - 0x01, 0x01, 0x8a, 0xab, 0x30, 0xa5, 0x2a, 0xf5, 0x16, 0x86, 0xce, 0x78, 0x65, 0xd6, 0x6b, 0xb0, - 0x7d, 0xaa, 0x52, 0x77, 0x40, 0x7a, 0x11, 0x17, 0x54, 0x9e, 0x5a, 0xc6, 0xa2, 0x61, 0x10, 0x0b, - 0x19, 0xc2, 0x94, 0x10, 0x55, 0xd0, 0x8f, 0x22, 0xac, 0x57, 0xed, 0x2d, 0x35, 0x8d, 0x58, 0x1f, - 0xfc, 0xd6, 0x07, 0xff, 0xa8, 0xf5, 0x61, 0xaf, 0x5b, 0x37, 0x72, 0xf6, 0x73, 0xe0, 0xcc, 0x96, - 0x8d, 0xae, 0xae, 0xb8, 0x2f, 0xc8, 0x46, 0x86, 0xec, 0x24, 0x2c, 0x45, 0x84, 0x22, 0xe6, 0x22, - 0x09, 0xd1, 0x7e, 0x10, 0x4b, 0xed, 0x75, 0x87, 0xce, 0xb8, 0x3b, 0x5b, 0xaf, 0x09, 0xef, 0xda, - 0xfa, 0x5b, 0xa3, 0xc3, 0x52, 0xef, 0x74, 0xbf, 0x9c, 0x0f, 0x3a, 0x5f, 0xcf, 0x07, 0x9d, 0xd1, - 0x37, 0x87, 0x6c, 0x1c, 0x6a, 0x2c, 0xfe, 0x9b, 0x0f, 0xbb, 0x64, 0x59, 0x69, 0x2c, 0xec, 0xe4, - 0xf3, 0xff, 0x30, 0x79, 0xb7, 0x96, 0xd5, 0x85, 0xd1, 0x07, 0xb2, 0x78, 0x40, 0x25, 0xcd, 0x95, - 0x7b, 0x44, 0x1e, 0x68, 0xc8, 0x8b, 0x8c, 0x6a, 0x08, 0xad, 0x7b, 0xa6, 0xd3, 0xde, 0xd6, 0x33, - 0xe3, 0xea, 0xdd, 0x23, 0xe9, 0xdf, 0x39, 0x84, 0xd5, 0xc4, 0x9f, 0x1a, 0xf4, 0x50, 0x53, 0x0d, - 0xb3, 0xd5, 0xf6, 0x1b, 0x16, 0x1c, 0x45, 0xe4, 0xe1, 0x3e, 0x15, 0xb1, 0x4a, 0xe9, 0x09, 0xbc, - 0x01, 0x4d, 0x63, 0xaa, 0xa9, 0xbb, 0x4d, 0xd6, 0xdb, 0xab, 0x14, 0x1e, 0x03, 0x84, 0x05, 0x62, - 0x16, 0xd2, 0x38, 0x96, 0xcd, 0x6e, 0x1e, 0xb5, 0xd5, 0x97, 0x00, 0x07, 0x88, 0xd9, 0x6e, 0x1c, - 0x4b, 0xd7, 0x23, 0x4b, 0x15, 0x48, 0x75, 0xbb, 0xa5, 0x36, 0xdd, 0x3b, 0xba, 0xb8, 0xea, 0x3b, - 0x97, 0x57, 0x7d, 0xe7, 0xd7, 0x55, 0xdf, 0x39, 0xbb, 0xee, 0x77, 0x2e, 0xaf, 0xfb, 0x9d, 0xef, - 0xd7, 0xfd, 0xce, 0xfb, 0x9d, 0x84, 0xeb, 0xb4, 0x8c, 0x7c, 0x86, 0x79, 0xc0, 0x50, 0xe5, 0xa8, - 0x82, 0xdb, 0xbb, 0xfd, 0xfc, 0xe6, 0x3d, 0xf8, 0xf4, 0xe7, 0x8b, 0xa0, 0x4f, 0x0b, 0x50, 0xd1, - 0xa2, 0xd9, 0xe0, 0xf6, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc5, 0x6c, 0x6d, 0x4d, 0x42, 0x04, - 0x00, 0x00, + // 638 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x54, 0xbf, 0x6f, 0xd4, 0x30, + 0x14, 0xbe, 0xd0, 0x5f, 0x77, 0xbe, 0x52, 0x44, 0xa8, 0x4a, 0x5a, 0xa1, 0xbb, 0xe3, 0x58, 0x0e, + 0x21, 0x12, 0x5d, 0x3b, 0xd1, 0xed, 0x5a, 0x09, 0xca, 0x80, 0xa8, 0xd2, 0xb2, 0x30, 0x10, 0x39, + 0xf6, 0x6b, 0x62, 0x35, 0xb1, 0x23, 0xdb, 0x09, 0x74, 0x67, 0x60, 0xec, 0xc8, 0xd8, 0xff, 0x80, + 0x7f, 0xa3, 0x63, 0x47, 0x26, 0x40, 0xed, 0x3f, 0x82, 0x62, 0x5f, 0x7a, 0x87, 0xc4, 0xc2, 0xc0, + 0xe6, 0xf7, 0xbd, 0xef, 0x73, 0xde, 0x7b, 0x9f, 0xf3, 0xd0, 0x36, 0xe3, 0x1a, 0x24, 0x49, 0x31, + 0xe3, 0x91, 0x02, 0x52, 0x4a, 0xa6, 0xcf, 0x02, 0x42, 0xaa, 0xa0, 0x90, 0xa2, 0x62, 0x14, 0x64, + 0x50, 0x8d, 0x6f, 0xcf, 0x7e, 0x21, 0x85, 0x16, 0xee, 0x93, 0xbf, 0x68, 0x7c, 0x42, 0x2a, 0xff, + 0x96, 0x57, 0x8d, 0xb7, 0xd6, 0x13, 0x91, 0x08, 0xc3, 0x0f, 0xea, 0x93, 0x95, 0x6e, 0xf5, 0x13, + 0x21, 0x92, 0x0c, 0x02, 0x13, 0xc5, 0xe5, 0x49, 0xa0, 0x59, 0x0e, 0x4a, 0xe3, 0xbc, 0x68, 0x08, + 0x2c, 0x26, 0x01, 0x11, 0x12, 0x02, 0x92, 0x31, 0xe0, 0xba, 0xfe, 0xbc, 0x3d, 0x4d, 0x09, 0x41, + 0x4d, 0xc8, 0x58, 0x92, 0x6a, 0x0b, 0xab, 0x40, 0x03, 0xa7, 0x20, 0x73, 0x66, 0xc9, 0xb3, 0xc8, + 0x0a, 0x86, 0x9f, 0x17, 0x90, 0xb7, 0x2f, 0xb8, 0x2a, 0x73, 0x90, 0x13, 0x4a, 0x99, 0x66, 0x82, + 0x1f, 0x4a, 0x51, 0x08, 0x85, 0x33, 0x77, 0x1d, 0x2d, 0x69, 0xa6, 0x33, 0xf0, 0x9c, 0x81, 0x33, + 0xea, 0x84, 0x36, 0x70, 0x07, 0xa8, 0x4b, 0x41, 0x11, 0xc9, 0x8a, 0x9a, 0xec, 0xdd, 0x31, 0xb9, + 0x79, 0xc8, 0xdd, 0x44, 0x6d, 0xdb, 0x3f, 0xa3, 0xde, 0x82, 0x49, 0xaf, 0x98, 0xf8, 0x35, 0x75, + 0x5f, 0xa1, 0x35, 0xc6, 0x99, 0x66, 0x38, 0x8b, 0x52, 0xa8, 0xeb, 0xf4, 0x16, 0x07, 0xce, 0xa8, + 0xbb, 0xbd, 0xe5, 0xb3, 0x98, 0xf8, 0x75, 0x6b, 0xfe, 0xb4, 0xa1, 0x6a, 0xec, 0x1f, 0x18, 0xc6, + 0xde, 0xe2, 0xe5, 0x8f, 0x7e, 0x2b, 0xbc, 0x3b, 0xd5, 0x59, 0xd0, 0x7d, 0x8c, 0x56, 0x13, 0xe0, + 0xa0, 0x98, 0x8a, 0x52, 0xac, 0x52, 0x6f, 0x69, 0xe0, 0x8c, 0x56, 0xc3, 0xee, 0x14, 0x3b, 0xc0, + 0x2a, 0x75, 0xfb, 0xa8, 0x1b, 0x33, 0x8e, 0xe5, 0x99, 0x65, 0x2c, 0x1b, 0x06, 0xb2, 0x90, 0x21, + 0xec, 0x23, 0xa4, 0x0a, 0xfc, 0x91, 0x47, 0xf5, 0x9c, 0xbd, 0x95, 0x69, 0x21, 0xd6, 0x04, 0xbf, + 0x31, 0xc1, 0x3f, 0x6e, 0x4c, 0xd8, 0x6b, 0xd7, 0x85, 0x9c, 0xff, 0xec, 0x3b, 0x61, 0xc7, 0xe8, + 0xea, 0x8c, 0xfb, 0x02, 0x6d, 0x66, 0x82, 0x9c, 0x46, 0x25, 0x8f, 0x05, 0xa7, 0x8c, 0x27, 0x91, + 0xb0, 0x17, 0x8a, 0x52, 0x7b, 0xed, 0x81, 0x33, 0x6a, 0x87, 0x1b, 0x35, 0xe1, 0x5d, 0x93, 0x7f, + 0x6b, 0x74, 0xa2, 0xd4, 0xbb, 0xed, 0x2f, 0x17, 0xfd, 0xd6, 0xd7, 0x8b, 0x7e, 0x6b, 0xf8, 0xcd, + 0x41, 0x0f, 0x1b, 0x1b, 0x42, 0xc8, 0x45, 0x85, 0xb3, 0xff, 0xe9, 0xc2, 0x04, 0x75, 0x94, 0x16, + 0x85, 0xed, 0x7b, 0xf1, 0x1f, 0xfa, 0x6e, 0xd7, 0xb2, 0x3a, 0x31, 0xfc, 0x80, 0x96, 0x0f, 0xb1, + 0xc4, 0xb9, 0x72, 0x8f, 0xd1, 0x3d, 0x0d, 0x79, 0x91, 0x61, 0x0d, 0x91, 0xf5, 0xce, 0x54, 0xda, + 0xdd, 0x7e, 0x66, 0x3c, 0x9d, 0x7f, 0x8d, 0xfe, 0xdc, 0xfb, 0xab, 0xc6, 0xfe, 0xbe, 0x41, 0x8f, + 0x34, 0xd6, 0x10, 0xae, 0x35, 0x77, 0x58, 0x70, 0x18, 0xa3, 0xfb, 0x07, 0x98, 0x53, 0x95, 0xe2, + 0x53, 0x78, 0x03, 0x1a, 0x53, 0xac, 0xb1, 0xbb, 0x83, 0x36, 0x9a, 0xbf, 0x28, 0x3a, 0x01, 0x88, + 0x0a, 0x21, 0xb2, 0x08, 0x53, 0x2a, 0xa7, 0xb3, 0x79, 0xd0, 0x64, 0x5f, 0x02, 0x1c, 0x0a, 0x91, + 0x4d, 0x28, 0x95, 0xae, 0x87, 0x56, 0x2a, 0x90, 0x6a, 0x36, 0xa5, 0x26, 0x1c, 0x3e, 0x45, 0x9d, + 0xa3, 0x0c, 0xab, 0x74, 0x42, 0x4e, 0x95, 0xfb, 0x08, 0x75, 0xea, 0x9b, 0x40, 0x29, 0x50, 0x9e, + 0x33, 0x58, 0x18, 0x75, 0xc2, 0x19, 0xb0, 0x77, 0x7c, 0x79, 0xdd, 0x73, 0xae, 0xae, 0x7b, 0xce, + 0xaf, 0xeb, 0x9e, 0x73, 0x7e, 0xd3, 0x6b, 0x5d, 0xdd, 0xf4, 0x5a, 0xdf, 0x6f, 0x7a, 0xad, 0xf7, + 0xbb, 0x09, 0xd3, 0x69, 0x19, 0xfb, 0x44, 0xe4, 0x01, 0x11, 0x2a, 0x17, 0x2a, 0x98, 0x6d, 0x80, + 0xe7, 0xb7, 0x5b, 0xe3, 0xd3, 0x9f, 0x7b, 0x43, 0x9f, 0x15, 0xa0, 0xe2, 0x65, 0x33, 0xec, 0x9d, + 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xad, 0x37, 0xb8, 0x76, 0x68, 0x04, 0x00, 0x00, } -func (m *CreateConsumerChainProposal) Marshal() (dAtA []byte, err error) { +func (m *ConsumerAdditionProposal) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -324,12 +372,12 @@ func (m *CreateConsumerChainProposal) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *CreateConsumerChainProposal) MarshalTo(dAtA []byte) (int, error) { +func (m *ConsumerAdditionProposal) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *CreateConsumerChainProposal) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *ConsumerAdditionProposal) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -400,7 +448,7 @@ func (m *CreateConsumerChainProposal) MarshalToSizedBuffer(dAtA []byte) (int, er return len(dAtA) - i, nil } -func (m *StopConsumerChainProposal) Marshal() (dAtA []byte, err error) { +func (m *ConsumerRemovalProposal) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -410,12 +458,12 @@ func (m *StopConsumerChainProposal) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *StopConsumerChainProposal) MarshalTo(dAtA []byte) (int, error) { +func (m *ConsumerRemovalProposal) MarshalTo(dAtA []byte) (int, error) { size := m.Size() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *StopConsumerChainProposal) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *ConsumerRemovalProposal) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -524,6 +572,38 @@ func (m *HandshakeMetadata) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *SlashAcks) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *SlashAcks) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *SlashAcks) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Addresses) > 0 { + for iNdEx := len(m.Addresses) - 1; iNdEx >= 0; iNdEx-- { + i -= len(m.Addresses[iNdEx]) + copy(dAtA[i:], m.Addresses[iNdEx]) + i = encodeVarintProvider(dAtA, i, uint64(len(m.Addresses[iNdEx]))) + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + func encodeVarintProvider(dAtA []byte, offset int, v uint64) int { offset -= sovProvider(v) base := offset @@ -535,7 +615,7 @@ func encodeVarintProvider(dAtA []byte, offset int, v uint64) int { dAtA[offset] = uint8(v) return base } -func (m *CreateConsumerChainProposal) Size() (n int) { +func (m *ConsumerAdditionProposal) Size() (n int) { if m == nil { return 0 } @@ -571,7 +651,7 @@ func (m *CreateConsumerChainProposal) Size() (n int) { return n } -func (m *StopConsumerChainProposal) Size() (n int) { +func (m *ConsumerRemovalProposal) Size() (n int) { if m == nil { return 0 } @@ -624,13 +704,28 @@ func (m *HandshakeMetadata) Size() (n int) { return n } +func (m *SlashAcks) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Addresses) > 0 { + for _, s := range m.Addresses { + l = len(s) + n += 1 + l + sovProvider(uint64(l)) + } + } + return n +} + func sovProvider(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } func sozProvider(x uint64) (n int) { return sovProvider(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } -func (m *CreateConsumerChainProposal) Unmarshal(dAtA []byte) error { +func (m *ConsumerAdditionProposal) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -653,10 +748,10 @@ func (m *CreateConsumerChainProposal) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: CreateConsumerChainProposal: wiretype end group for non-group") + return fmt.Errorf("proto: ConsumerAdditionProposal: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: CreateConsumerChainProposal: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ConsumerAdditionProposal: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -930,7 +1025,7 @@ func (m *CreateConsumerChainProposal) Unmarshal(dAtA []byte) error { } return nil } -func (m *StopConsumerChainProposal) Unmarshal(dAtA []byte) error { +func (m *ConsumerRemovalProposal) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { @@ -953,10 +1048,10 @@ func (m *StopConsumerChainProposal) Unmarshal(dAtA []byte) error { fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { - return fmt.Errorf("proto: StopConsumerChainProposal: wiretype end group for non-group") + return fmt.Errorf("proto: ConsumerRemovalProposal: wiretype end group for non-group") } if fieldNum <= 0 { - return fmt.Errorf("proto: StopConsumerChainProposal: illegal tag %d (wire type %d)", fieldNum, wire) + return fmt.Errorf("proto: ConsumerRemovalProposal: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: @@ -1309,6 +1404,88 @@ func (m *HandshakeMetadata) Unmarshal(dAtA []byte) error { } return nil } +func (m *SlashAcks) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowProvider + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: SlashAcks: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: SlashAcks: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Addresses", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowProvider + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthProvider + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthProvider + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Addresses = append(m.Addresses, string(dAtA[iNdEx:postIndex])) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipProvider(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthProvider + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipProvider(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/x/ccv/provider/types/query.pb.go b/x/ccv/provider/types/query.pb.go index 2bba6e200b..0bf62a2b99 100644 --- a/x/ccv/provider/types/query.pb.go +++ b/x/ccv/provider/types/query.pb.go @@ -166,8 +166,8 @@ const _ = grpc.SupportPackageIsVersion4 // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. type QueryClient interface { - // ConsumerGenesis queries the genesis state needed to start a consumer chain whose proposal - // has been accepted + // ConsumerGenesis queries the genesis state needed to start a consumer chain + // whose proposal has been accepted ConsumerGenesis(ctx context.Context, in *QueryConsumerGenesisRequest, opts ...grpc.CallOption) (*QueryConsumerGenesisResponse, error) } @@ -190,8 +190,8 @@ func (c *queryClient) ConsumerGenesis(ctx context.Context, in *QueryConsumerGene // QueryServer is the server API for Query service. type QueryServer interface { - // ConsumerGenesis queries the genesis state needed to start a consumer chain whose proposal - // has been accepted + // ConsumerGenesis queries the genesis state needed to start a consumer chain + // whose proposal has been accepted ConsumerGenesis(context.Context, *QueryConsumerGenesisRequest) (*QueryConsumerGenesisResponse, error) } diff --git a/x/ccv/types/ccv.pb.go b/x/ccv/types/ccv.pb.go index d6a17438a0..f5ae919aa0 100644 --- a/x/ccv/types/ccv.pb.go +++ b/x/ccv/types/ccv.pb.go @@ -25,10 +25,11 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package -// This packet is sent from provider chain to consumer chain if the validator set for consumer chain -// changes (due to new bonding/unbonding messages or slashing events) -// A VSCMatured packet from consumer chain will be sent asynchronously once unbonding period is over, -// and this will function as `UnbondingOver` message for this packet. +// This packet is sent from provider chain to consumer chain if the validator +// set for consumer chain changes (due to new bonding/unbonding messages or +// slashing events) A VSCMatured packet from consumer chain will be sent +// asynchronously once unbonding period is over, and this will function as +// `UnbondingOver` message for this packet. type ValidatorSetChangePacketData struct { ValidatorUpdates []types.ValidatorUpdate `protobuf:"bytes,1,rep,name=validator_updates,json=validatorUpdates,proto3" json:"validator_updates" yaml:"validator_updates"` ValsetUpdateId uint64 `protobuf:"varint,2,opt,name=valset_update_id,json=valsetUpdateId,proto3" json:"valset_update_id,omitempty"` @@ -256,11 +257,103 @@ func (m *SlashPacketData) GetInfraction() types1.InfractionType { return types1.InfractionEmpty } +// UnbondingOpsIndex defines a list of unbonding operation ids. +type UnbondingOpsIndex struct { + Ids []uint64 `protobuf:"varint,1,rep,packed,name=ids,proto3" json:"ids,omitempty"` +} + +func (m *UnbondingOpsIndex) Reset() { *m = UnbondingOpsIndex{} } +func (m *UnbondingOpsIndex) String() string { return proto.CompactTextString(m) } +func (*UnbondingOpsIndex) ProtoMessage() {} +func (*UnbondingOpsIndex) Descriptor() ([]byte, []int) { + return fileDescriptor_68bd5f3242e6f29c, []int{4} +} +func (m *UnbondingOpsIndex) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *UnbondingOpsIndex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_UnbondingOpsIndex.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *UnbondingOpsIndex) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnbondingOpsIndex.Merge(m, src) +} +func (m *UnbondingOpsIndex) XXX_Size() int { + return m.Size() +} +func (m *UnbondingOpsIndex) XXX_DiscardUnknown() { + xxx_messageInfo_UnbondingOpsIndex.DiscardUnknown(m) +} + +var xxx_messageInfo_UnbondingOpsIndex proto.InternalMessageInfo + +func (m *UnbondingOpsIndex) GetIds() []uint64 { + if m != nil { + return m.Ids + } + return nil +} + +// MaturedUnbondingOps defines a list of ids corresponding to ids of matured unbonding operations. +type MaturedUnbondingOps struct { + Ids []uint64 `protobuf:"varint,1,rep,packed,name=ids,proto3" json:"ids,omitempty"` +} + +func (m *MaturedUnbondingOps) Reset() { *m = MaturedUnbondingOps{} } +func (m *MaturedUnbondingOps) String() string { return proto.CompactTextString(m) } +func (*MaturedUnbondingOps) ProtoMessage() {} +func (*MaturedUnbondingOps) Descriptor() ([]byte, []int) { + return fileDescriptor_68bd5f3242e6f29c, []int{5} +} +func (m *MaturedUnbondingOps) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *MaturedUnbondingOps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_MaturedUnbondingOps.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *MaturedUnbondingOps) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaturedUnbondingOps.Merge(m, src) +} +func (m *MaturedUnbondingOps) XXX_Size() int { + return m.Size() +} +func (m *MaturedUnbondingOps) XXX_DiscardUnknown() { + xxx_messageInfo_MaturedUnbondingOps.DiscardUnknown(m) +} + +var xxx_messageInfo_MaturedUnbondingOps proto.InternalMessageInfo + +func (m *MaturedUnbondingOps) GetIds() []uint64 { + if m != nil { + return m.Ids + } + return nil +} + func init() { proto.RegisterType((*ValidatorSetChangePacketData)(nil), "interchain_security.ccv.v1.ValidatorSetChangePacketData") proto.RegisterType((*UnbondingOp)(nil), "interchain_security.ccv.v1.UnbondingOp") proto.RegisterType((*VSCMaturedPacketData)(nil), "interchain_security.ccv.v1.VSCMaturedPacketData") proto.RegisterType((*SlashPacketData)(nil), "interchain_security.ccv.v1.SlashPacketData") + proto.RegisterType((*UnbondingOpsIndex)(nil), "interchain_security.ccv.v1.UnbondingOpsIndex") + proto.RegisterType((*MaturedUnbondingOps)(nil), "interchain_security.ccv.v1.MaturedUnbondingOps") } func init() { @@ -268,37 +361,39 @@ func init() { } var fileDescriptor_68bd5f3242e6f29c = []byte{ - // 476 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xcf, 0x8a, 0xd3, 0x40, - 0x18, 0xef, 0xb4, 0x22, 0x74, 0x0a, 0x75, 0x0d, 0x0b, 0xc6, 0xaa, 0xd9, 0x10, 0x16, 0xc9, 0xc5, - 0x84, 0xd4, 0xdb, 0x9e, 0xb4, 0x15, 0x61, 0x11, 0x51, 0x52, 0x77, 0x41, 0x2f, 0x61, 0x32, 0x33, - 0xa6, 0x43, 0x9b, 0x99, 0x90, 0x99, 0x04, 0xfb, 0x16, 0x3e, 0xd6, 0x1e, 0xf7, 0xe6, 0x9e, 0x16, - 0x69, 0xdf, 0xc0, 0x27, 0x90, 0x4c, 0xd2, 0xb4, 0x6a, 0x3d, 0xec, 0x29, 0x93, 0xef, 0xf7, 0x87, - 0x8f, 0x1f, 0xbf, 0x0f, 0x9e, 0x32, 0xae, 0x68, 0x8e, 0xe7, 0x88, 0xf1, 0x48, 0x52, 0x5c, 0xe4, - 0x4c, 0xad, 0x7c, 0x8c, 0x4b, 0xbf, 0x0c, 0xaa, 0x8f, 0x97, 0xe5, 0x42, 0x09, 0x63, 0x74, 0x80, - 0xe5, 0x55, 0x70, 0x19, 0x8c, 0x4e, 0xb1, 0x90, 0xa9, 0x90, 0xbe, 0x54, 0x68, 0xc1, 0x78, 0xe2, - 0x97, 0x41, 0x4c, 0x15, 0x0a, 0xb6, 0xff, 0xb5, 0xc3, 0xe8, 0x38, 0x11, 0x89, 0xd0, 0x4f, 0xbf, - 0x7a, 0x35, 0xd3, 0x27, 0x8a, 0x72, 0x42, 0xf3, 0x94, 0x71, 0xe5, 0xa3, 0x18, 0x33, 0x5f, 0xad, - 0x32, 0x2a, 0x6b, 0xd0, 0xb9, 0x01, 0xf0, 0xe9, 0x25, 0x5a, 0x32, 0x82, 0x94, 0xc8, 0x67, 0x54, - 0x4d, 0xe7, 0x88, 0x27, 0xf4, 0x23, 0xc2, 0x0b, 0xaa, 0xde, 0x20, 0x85, 0x0c, 0x01, 0x1f, 0x96, - 0x5b, 0x3c, 0x2a, 0x32, 0x82, 0x14, 0x95, 0x26, 0xb0, 0x7b, 0xee, 0x60, 0x6c, 0x7b, 0x3b, 0x67, - 0xaf, 0x72, 0xf6, 0x5a, 0xa7, 0x0b, 0x4d, 0x9c, 0xd8, 0x57, 0xb7, 0x27, 0x9d, 0x5f, 0xb7, 0x27, - 0xe6, 0x0a, 0xa5, 0xcb, 0x33, 0xe7, 0x1f, 0x23, 0x27, 0x3c, 0x2a, 0xff, 0x94, 0x48, 0xc3, 0x85, - 0xd5, 0x4c, 0x52, 0xd5, 0x90, 0x22, 0x46, 0xcc, 0xae, 0x0d, 0xdc, 0x7b, 0xe1, 0xb0, 0x9e, 0xd7, - 0xc4, 0x73, 0x62, 0x3c, 0x83, 0x50, 0x2e, 0x91, 0x9c, 0x47, 0x08, 0x2f, 0xa4, 0xd9, 0xb3, 0x7b, - 0x6e, 0x3f, 0xec, 0xeb, 0xc9, 0x6b, 0xbc, 0x90, 0xce, 0x67, 0x38, 0xb8, 0xe0, 0xb1, 0xe0, 0x84, - 0xf1, 0xe4, 0x43, 0x66, 0x0c, 0x61, 0x97, 0x11, 0x13, 0x68, 0xa7, 0x2e, 0x23, 0xc6, 0x19, 0x7c, - 0x5c, 0x6c, 0xe1, 0x08, 0x0b, 0x2e, 0x8b, 0x94, 0xe6, 0x91, 0x8e, 0x5f, 0x9a, 0x5d, 0x6d, 0xf6, - 0xa8, 0x25, 0x4c, 0x1b, 0x7c, 0xaa, 0x61, 0xe7, 0x15, 0x3c, 0xbe, 0x9c, 0x4d, 0xdf, 0x23, 0x55, - 0xe4, 0x94, 0xec, 0x85, 0x75, 0x68, 0x77, 0x70, 0x68, 0x77, 0xe7, 0x07, 0x80, 0x0f, 0x66, 0xd5, - 0xaa, 0x7b, 0xea, 0x10, 0xf6, 0xdb, 0x34, 0xb4, 0x6c, 0x30, 0x1e, 0xfd, 0x3f, 0xe2, 0x89, 0xd9, - 0x84, 0x7b, 0xf4, 0x57, 0xb8, 0x4e, 0xb8, 0xb3, 0xb9, 0x43, 0x9a, 0x6f, 0x21, 0x64, 0xfc, 0x6b, - 0x8e, 0xb0, 0x62, 0x82, 0x9b, 0x3d, 0x1b, 0xb8, 0xc3, 0xf1, 0x73, 0xaf, 0xee, 0x9d, 0xb7, 0xed, - 0x59, 0xd3, 0x3b, 0xef, 0xbc, 0x65, 0x7e, 0x5a, 0x65, 0x34, 0xdc, 0x53, 0x4e, 0xde, 0x5d, 0xad, - 0x2d, 0x70, 0xbd, 0xb6, 0xc0, 0xcf, 0xb5, 0x05, 0xbe, 0x6f, 0xac, 0xce, 0xf5, 0xc6, 0xea, 0xdc, - 0x6c, 0xac, 0xce, 0x97, 0x20, 0x61, 0x6a, 0x5e, 0xc4, 0x1e, 0x16, 0xa9, 0xdf, 0xf4, 0x79, 0x57, - 0xf9, 0x17, 0xed, 0x61, 0x7c, 0xd3, 0xa7, 0xa1, 0x4b, 0x1a, 0xdf, 0xd7, 0x2d, 0x7d, 0xf9, 0x3b, - 0x00, 0x00, 0xff, 0xff, 0x47, 0xac, 0x22, 0x45, 0x42, 0x03, 0x00, 0x00, + // 509 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0xee, 0xb4, 0x8b, 0xb0, 0x53, 0xa8, 0xdd, 0xb8, 0x60, 0xac, 0x9a, 0x0d, 0x61, 0xd5, 0x5e, + 0x4c, 0x68, 0xbd, 0xed, 0x49, 0x5b, 0x11, 0x8a, 0x88, 0x92, 0xba, 0x0b, 0x7a, 0x09, 0xd3, 0x99, + 0x31, 0x1d, 0xda, 0xcc, 0x84, 0xcc, 0x24, 0x6c, 0xff, 0x85, 0x3f, 0x6b, 0x8f, 0x7b, 0x73, 0x4f, + 0x8b, 0xb4, 0xff, 0xc0, 0x5f, 0x20, 0x99, 0xa4, 0x69, 0x74, 0xeb, 0xc1, 0xd3, 0xbc, 0x79, 0xdf, + 0xf7, 0x3e, 0x1e, 0x1f, 0xdf, 0x83, 0xa7, 0x8c, 0x2b, 0x9a, 0xe0, 0x39, 0x62, 0x3c, 0x90, 0x14, + 0xa7, 0x09, 0x53, 0x2b, 0x0f, 0xe3, 0xcc, 0xcb, 0x06, 0xf9, 0xe3, 0xc6, 0x89, 0x50, 0xc2, 0xe8, + 0xed, 0x61, 0xb9, 0x39, 0x9c, 0x0d, 0x7a, 0xa7, 0x58, 0xc8, 0x48, 0x48, 0x4f, 0x2a, 0xb4, 0x60, + 0x3c, 0xf4, 0xb2, 0xc1, 0x8c, 0x2a, 0x34, 0xd8, 0xfe, 0x0b, 0x85, 0xde, 0x71, 0x28, 0x42, 0xa1, + 0x4b, 0x2f, 0xaf, 0xca, 0xee, 0x63, 0x45, 0x39, 0xa1, 0x49, 0xc4, 0xb8, 0xf2, 0xd0, 0x0c, 0x33, + 0x4f, 0xad, 0x62, 0x2a, 0x0b, 0xd0, 0xb9, 0x01, 0xf0, 0xc9, 0x05, 0x5a, 0x32, 0x82, 0x94, 0x48, + 0xa6, 0x54, 0x8d, 0xe7, 0x88, 0x87, 0xf4, 0x13, 0xc2, 0x0b, 0xaa, 0xde, 0x22, 0x85, 0x0c, 0x01, + 0x8f, 0xb2, 0x2d, 0x1e, 0xa4, 0x31, 0x41, 0x8a, 0x4a, 0x13, 0xd8, 0xad, 0x7e, 0x7b, 0x68, 0xbb, + 0x3b, 0x65, 0x37, 0x57, 0x76, 0x2b, 0xa5, 0x73, 0x4d, 0x1c, 0xd9, 0x57, 0xb7, 0x27, 0x8d, 0x5f, + 0xb7, 0x27, 0xe6, 0x0a, 0x45, 0xcb, 0x33, 0xe7, 0x8e, 0x90, 0xe3, 0x77, 0xb3, 0x3f, 0x47, 0xa4, + 0xd1, 0x87, 0x79, 0x4f, 0x52, 0x55, 0x92, 0x02, 0x46, 0xcc, 0xa6, 0x0d, 0xfa, 0x07, 0x7e, 0xa7, + 0xe8, 0x17, 0xc4, 0x09, 0x31, 0x9e, 0x42, 0x28, 0x97, 0x48, 0xce, 0x03, 0x84, 0x17, 0xd2, 0x6c, + 0xd9, 0xad, 0xfe, 0xa1, 0x7f, 0xa8, 0x3b, 0x6f, 0xf0, 0x42, 0x3a, 0x5f, 0x60, 0xfb, 0x9c, 0xcf, + 0x04, 0x27, 0x8c, 0x87, 0x1f, 0x63, 0xa3, 0x03, 0x9b, 0x8c, 0x98, 0x40, 0x2b, 0x35, 0x19, 0x31, + 0xce, 0xe0, 0xa3, 0x74, 0x0b, 0x07, 0x58, 0x70, 0x99, 0x46, 0x34, 0x09, 0xb4, 0xfd, 0xd2, 0x6c, + 0x6a, 0xb1, 0x87, 0x15, 0x61, 0x5c, 0xe2, 0x63, 0x0d, 0x3b, 0xaf, 0xe1, 0xf1, 0xc5, 0x74, 0xfc, + 0x01, 0xa9, 0x34, 0xa1, 0xa4, 0x66, 0xd6, 0xbe, 0xdd, 0xc1, 0xbe, 0xdd, 0x9d, 0x1f, 0x00, 0xde, + 0x9f, 0xe6, 0xab, 0xd6, 0xa6, 0x7d, 0x78, 0x58, 0xb9, 0xa1, 0xc7, 0xda, 0xc3, 0xde, 0xbf, 0x2d, + 0x1e, 0x99, 0xa5, 0xb9, 0xdd, 0xbf, 0xcc, 0x75, 0xfc, 0x9d, 0xcc, 0x7f, 0xb8, 0xf9, 0x0e, 0x42, + 0xc6, 0xbf, 0x25, 0x08, 0x2b, 0x26, 0xb8, 0xd9, 0xb2, 0x41, 0xbf, 0x33, 0x7c, 0xee, 0x16, 0xb9, + 0x73, 0xb7, 0x39, 0x2b, 0x73, 0xe7, 0x4e, 0x2a, 0xe6, 0xe7, 0x55, 0x4c, 0xfd, 0xda, 0xa4, 0xf3, + 0x0c, 0x1e, 0xd5, 0x6c, 0x97, 0x13, 0x4e, 0xe8, 0xa5, 0xd1, 0x85, 0x2d, 0x46, 0x8a, 0xdc, 0x1c, + 0xf8, 0x79, 0xe9, 0xbc, 0x80, 0x0f, 0x4a, 0xff, 0xea, 0xec, 0xbb, 0xc4, 0xd1, 0xfb, 0xab, 0xb5, + 0x05, 0xae, 0xd7, 0x16, 0xf8, 0xb9, 0xb6, 0xc0, 0xf7, 0x8d, 0xd5, 0xb8, 0xde, 0x58, 0x8d, 0x9b, + 0x8d, 0xd5, 0xf8, 0x3a, 0x08, 0x99, 0x9a, 0xa7, 0x33, 0x17, 0x8b, 0xc8, 0x2b, 0xef, 0x63, 0x77, + 0x42, 0x2f, 0xab, 0x43, 0xbb, 0xd4, 0xa7, 0xa6, 0x43, 0x3f, 0xbb, 0xa7, 0x53, 0xff, 0xea, 0x77, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xaa, 0xb0, 0x21, 0x92, 0x03, 0x00, 0x00, } func (m *ValidatorSetChangePacketData) Marshal() (dAtA []byte, err error) { @@ -460,6 +555,88 @@ func (m *SlashPacketData) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *UnbondingOpsIndex) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *UnbondingOpsIndex) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *UnbondingOpsIndex) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Ids) > 0 { + dAtA3 := make([]byte, len(m.Ids)*10) + var j2 int + for _, num := range m.Ids { + for num >= 1<<7 { + dAtA3[j2] = uint8(uint64(num)&0x7f | 0x80) + num >>= 7 + j2++ + } + dAtA3[j2] = uint8(num) + j2++ + } + i -= j2 + copy(dAtA[i:], dAtA3[:j2]) + i = encodeVarintCcv(dAtA, i, uint64(j2)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *MaturedUnbondingOps) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *MaturedUnbondingOps) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *MaturedUnbondingOps) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Ids) > 0 { + dAtA5 := make([]byte, len(m.Ids)*10) + var j4 int + for _, num := range m.Ids { + for num >= 1<<7 { + dAtA5[j4] = uint8(uint64(num)&0x7f | 0x80) + num >>= 7 + j4++ + } + dAtA5[j4] = uint8(num) + j4++ + } + i -= j4 + copy(dAtA[i:], dAtA5[:j4]) + i = encodeVarintCcv(dAtA, i, uint64(j4)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func encodeVarintCcv(dAtA []byte, offset int, v uint64) int { offset -= sovCcv(v) base := offset @@ -542,6 +719,38 @@ func (m *SlashPacketData) Size() (n int) { return n } +func (m *UnbondingOpsIndex) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Ids) > 0 { + l = 0 + for _, e := range m.Ids { + l += sovCcv(uint64(e)) + } + n += 1 + sovCcv(uint64(l)) + l + } + return n +} + +func (m *MaturedUnbondingOps) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Ids) > 0 { + l = 0 + for _, e := range m.Ids { + l += sovCcv(uint64(e)) + } + n += 1 + sovCcv(uint64(l)) + l + } + return n +} + func sovCcv(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -974,6 +1183,258 @@ func (m *SlashPacketData) Unmarshal(dAtA []byte) error { } return nil } +func (m *UnbondingOpsIndex) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: UnbondingOpsIndex: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: UnbondingOpsIndex: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType == 0 { + var v uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Ids = append(m.Ids, v) + } else if wireType == 2 { + var packedLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + packedLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if packedLen < 0 { + return ErrInvalidLengthCcv + } + postIndex := iNdEx + packedLen + if postIndex < 0 { + return ErrInvalidLengthCcv + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var elementCount int + var count int + for _, integer := range dAtA[iNdEx:postIndex] { + if integer < 128 { + count++ + } + } + elementCount = count + if elementCount != 0 && len(m.Ids) == 0 { + m.Ids = make([]uint64, 0, elementCount) + } + for iNdEx < postIndex { + var v uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Ids = append(m.Ids, v) + } + } else { + return fmt.Errorf("proto: wrong wireType = %d for field Ids", wireType) + } + default: + iNdEx = preIndex + skippy, err := skipCcv(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthCcv + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *MaturedUnbondingOps) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: MaturedUnbondingOps: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: MaturedUnbondingOps: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType == 0 { + var v uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Ids = append(m.Ids, v) + } else if wireType == 2 { + var packedLen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + packedLen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if packedLen < 0 { + return ErrInvalidLengthCcv + } + postIndex := iNdEx + packedLen + if postIndex < 0 { + return ErrInvalidLengthCcv + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + var elementCount int + var count int + for _, integer := range dAtA[iNdEx:postIndex] { + if integer < 128 { + count++ + } + } + elementCount = count + if elementCount != 0 && len(m.Ids) == 0 { + m.Ids = make([]uint64, 0, elementCount) + } + for iNdEx < postIndex { + var v uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowCcv + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + v |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + m.Ids = append(m.Ids, v) + } + } else { + return fmt.Errorf("proto: wrong wireType = %d for field Ids", wireType) + } + default: + iNdEx = preIndex + skippy, err := skipCcv(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthCcv + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipCcv(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/x/ccv/types/errors.go b/x/ccv/types/errors.go index 9af7b8a32a..f20f294f1e 100644 --- a/x/ccv/types/errors.go +++ b/x/ccv/types/errors.go @@ -19,7 +19,7 @@ var ( ErrInvalidVSCMaturedTime = sdkerrors.Register(ModuleName, 12, "invalid maturity time for VSC packet") ErrInvalidConsumerState = sdkerrors.Register(ModuleName, 13, "provider chain has invalid state for consumer chain") ErrInvalidConsumerClient = sdkerrors.Register(ModuleName, 14, "ccv channel is not built on correct client") - ErrInvalidProposal = sdkerrors.Register(ModuleName, 15, "invalid create consumer chain proposal") + ErrInvalidProposal = sdkerrors.Register(ModuleName, 15, "invalid proposal") ErrInvalidHandshakeMetadata = sdkerrors.Register(ModuleName, 16, "invalid provider handshake metadata") ErrChannelNotFound = sdkerrors.Register(ModuleName, 17, "channel not found") ErrClientNotFound = sdkerrors.Register(ModuleName, 18, "client not found") From b047585aba154de9a93b6ceafb8724197018b1f0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 10:51:27 -0500 Subject: [PATCH 097/127] Change num traces --- x/ccv/provider/keydel/keydel_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index ad41da1318..cf2911221d 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -7,7 +7,7 @@ import ( "github.com/stretchr/testify/require" ) -const NUM_TRACES = 4000 +const NUM_TRACES = 1000 const TRACE_LEN = 1000 const NUM_VALS = 4 const NUM_FKS = 50 From 6d8bd9370277b3f88749bba9653c4e6dcc74eb64 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 10:51:30 -0500 Subject: [PATCH 098/127] Symbol rename --- x/ccv/provider/keydel/keydel.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 274435dbb9..9466c086d1 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -120,14 +120,14 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { ret := map[FK]int{} - fkToUpdateClone := map[FK]memo{} + fkToMemoClone := map[FK]memo{} for k, v := range e.fkToMemo { - fkToUpdateClone[k] = v + fkToMemoClone[k] = v } // Iterate all local keys for which there was previously a positive update. for _, lk := range lks { - for _, u := range fkToUpdateClone { + for _, u := range fkToMemoClone { if u.lk == lk && 0 < u.power { e.fkToMemo[u.fk] = memo{fk: u.fk, lk: lk, vscid: vscid, power: 0} ret[u.fk] = 0 @@ -139,7 +139,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { // has been a power update. for _, lk := range lks { power := 0 - for _, u := range fkToUpdateClone { + for _, u := range fkToMemoClone { if u.lk == lk && 0 < u.power { power = u.power } From 80763aee0de37e4abe6f0eac1281db2daf98f2ae Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 10:55:18 -0500 Subject: [PATCH 099/127] Symbol renames --- x/ccv/provider/keydel/keydel.go | 2 +- x/ccv/provider/keydel/keydel_test.go | 181 ++++++++++++++------------- 2 files changed, 92 insertions(+), 91 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 9466c086d1..e410f852af 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -48,7 +48,7 @@ func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { inUse = true } if inUse { - return errors.New(`cannot reuse foreign key which is currently being used for lookups`) + return errors.New(`cannot reuse foreign key which is currently or recently in use`) } if oldFk, ok := e.lkToFk[lk]; ok { delete(e.fkToLk, oldFk) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index cf2911221d..e6b5e3026b 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -12,58 +12,60 @@ const TRACE_LEN = 1000 const NUM_VALS = 4 const NUM_FKS = 50 -type mapInstruction struct { +type keyMapEntry struct { lk LK fk FK } -type TraceState struct { - MapInstructions []mapInstruction - LocalUpdates []update - TP int - TC int - TM int +type traceState struct { + mapInstructions []keyMapEntry + localUpdates []update + tp int + tc int + tm int } -type Driver struct { +type driver struct { t *testing.T - e *KeyDel - trace []TraceState + kd *KeyDel + trace []traceState lastTP int lastTC int lastTM int // indexed by time (starting at 0) - mappings []map[LK]FK + mappings []map[LK]FK + // indexed by time (starting at 0) foreignUpdates [][]update - localValSets []ValSet - foreignValSet ValSet + // indexed by time (starting at 0) + localValSets []valSet + foreignValSet valSet } -func MakeDriver(t *testing.T, trace []TraceState) Driver { - d := Driver{} +func makeDriver(t *testing.T, trace []traceState) driver { + d := driver{} d.t = t e := MakeKeyDel() - d.e = &e + d.kd = &e d.trace = trace d.lastTP = 0 d.lastTC = 0 d.lastTM = 0 d.mappings = []map[LK]FK{} d.foreignUpdates = [][]update{} - d.localValSets = []ValSet{} - d.foreignValSet = ValSet{} + d.localValSets = []valSet{} + d.foreignValSet = valSet{} return d } -type ValSet struct { +type valSet struct { keyToPower map[int]int } -func MakeValSet() ValSet { - return ValSet{keyToPower: map[int]int{}} +func makeValSet() valSet { + return valSet{keyToPower: map[int]int{}} } -func (vs *ValSet) applyUpdates(updates []update) { +func (vs *valSet) applyUpdates(updates []update) { for _, u := range updates { delete(vs.keyToPower, u.key) if 0 < u.power { @@ -72,19 +74,19 @@ func (vs *ValSet) applyUpdates(updates []update) { } } -func (d *Driver) applyMapInstructions(instructions []mapInstruction) { +func (d *driver) applyMapInstructions(instructions []keyMapEntry) { for _, instruction := range instructions { - _ = d.e.SetLocalToForeign(instruction.lk, instruction.fk) + _ = d.kd.SetLocalToForeign(instruction.lk, instruction.fk) } copy := map[LK]FK{} - for lk, fk := range d.e.lkToFk { + for lk, fk := range d.kd.lkToFk { copy[lk] = fk } d.mappings = append(d.mappings, copy) } -func (d *Driver) applyLocalUpdates(localUpdates []update) { - valSet := MakeValSet() +func (d *driver) applyLocalUpdates(localUpdates []update) { + valSet := makeValSet() for lk, power := range d.localValSets[d.lastTP].keyToPower { valSet.keyToPower[lk] = power } @@ -92,21 +94,21 @@ func (d *Driver) applyLocalUpdates(localUpdates []update) { d.localValSets = append(d.localValSets, valSet) } -func (d *Driver) runTrace() { +func (d *driver) run() { { init := d.trace[0] // Set the initial map - d.applyMapInstructions(init.MapInstructions) + d.applyMapInstructions(init.mapInstructions) // Set the initial local set - d.localValSets = append(d.localValSets, MakeValSet()) - d.localValSets[init.TP].applyUpdates(init.LocalUpdates) + d.localValSets = append(d.localValSets, makeValSet()) + d.localValSets[init.tp].applyUpdates(init.localUpdates) // Set the initial foreign set - d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(init.TP, init.LocalUpdates)) + d.foreignUpdates = append(d.foreignUpdates, d.kd.ComputeUpdates(init.tp, init.localUpdates)) // The first foreign set equal to the local set at time 0 - d.foreignValSet = MakeValSet() - d.foreignValSet.applyUpdates(d.foreignUpdates[init.TC]) - d.e.Prune(init.TM) + d.foreignValSet = makeValSet() + d.foreignValSet.applyUpdates(d.foreignUpdates[init.tc]) + d.kd.Prune(init.tm) } // Sanity check the initial state @@ -116,31 +118,31 @@ func (d *Driver) runTrace() { // Check properties for each state after the initial for _, s := range d.trace[1:] { - if d.lastTP < s.TP { + if d.lastTP < s.tp { // Provider time increment: // Apply some key mappings and create some new validator power updates - d.applyMapInstructions(s.MapInstructions) - d.applyLocalUpdates(s.LocalUpdates) - d.foreignUpdates = append(d.foreignUpdates, d.e.ComputeUpdates(s.TP, s.LocalUpdates)) - d.lastTP = s.TP + d.applyMapInstructions(s.mapInstructions) + d.applyLocalUpdates(s.localUpdates) + d.foreignUpdates = append(d.foreignUpdates, d.kd.ComputeUpdates(s.tp, s.localUpdates)) + d.lastTP = s.tp } - if d.lastTC < s.TC { - for j := d.lastTC + 1; j <= s.TC; j++ { + if d.lastTC < s.tc { + for j := d.lastTC + 1; j <= s.tc; j++ { d.foreignValSet.applyUpdates(d.foreignUpdates[j]) } - d.lastTC = s.TC + d.lastTC = s.tc } - if d.lastTM < s.TM { + if d.lastTM < s.tm { // Models maturations being received on the provider. - d.e.Prune(s.TM) - d.lastTM = s.TM + d.kd.Prune(s.tm) + d.lastTM = s.tm } - require.True(d.t, d.e.internalInvariants()) + require.True(d.t, d.kd.internalInvariants()) d.checkProperties() } } -func (d *Driver) checkProperties() { +func (d *driver) checkProperties() { /* When a consumer receives and processes up to VSCID i, @@ -208,13 +210,13 @@ func (d *Driver) checkProperties() { expectQueryable[u.key] = true } } - for _, fk := range d.e.lkToFk { + for _, fk := range d.kd.lkToFk { expectQueryable[fk] = true } // Simply check every foreign key for the correct queryable-ness. for fk := 0; fk < NUM_FKS; fk++ { - _, err := d.e.GetLocal(fk) + _, err := d.kd.GetLocal(fk) actualQueryable := err == nil if expect, found := expectQueryable[fk]; found && expect { require.True(d.t, actualQueryable) @@ -230,7 +232,7 @@ func (d *Driver) checkProperties() { queries := func() { // For each fk known to the consumer for consumerFK := range d.foreignValSet.keyToPower { - queriedLK, err := d.e.GetLocal(consumerFK) + queriedLK, err := d.kd.GetLocal(consumerFK) // There must be a corresponding local key require.Nil(d.t, err) providerFKs := map[FK]bool{} @@ -258,16 +260,16 @@ func (d *Driver) checkProperties() { } -func getTrace(t *testing.T) []TraceState { +func getTrace(t *testing.T) []traceState { - mappings := func() []mapInstruction { - ret := []mapInstruction{} + mappings := func() []keyMapEntry { + ret := []keyMapEntry{} // Go several times to have overlapping validator updates for i := 0; i < 2; i++ { // include 0 to all validators include := rand.Intn(NUM_VALS + 1) for _, lk := range rand.Perm(NUM_VALS)[0:include] { - ret = append(ret, mapInstruction{lk, rand.Intn(NUM_FKS)}) + ret = append(ret, keyMapEntry{lk, rand.Intn(NUM_FKS)}) } } return ret @@ -283,19 +285,19 @@ func getTrace(t *testing.T) []TraceState { return ret } - initialMappings := []mapInstruction{} + initialMappings := []keyMapEntry{} for i := 0; i < NUM_VALS; i++ { - initialMappings = append(initialMappings, mapInstruction{i, i}) + initialMappings = append(initialMappings, keyMapEntry{i, i}) } - ret := []TraceState{ + ret := []traceState{ { // Hard code initial mapping - MapInstructions: initialMappings, - LocalUpdates: localUpdates(), - TP: 0, - TC: 0, - TM: 0, + mapInstructions: initialMappings, + localUpdates: localUpdates(), + tp: 0, + tc: 0, + tm: 0, }, } @@ -303,44 +305,44 @@ func getTrace(t *testing.T) []TraceState { choice := rand.Intn(3) last := ret[len(ret)-1] if choice == 0 { - ret = append(ret, TraceState{ - MapInstructions: mappings(), - LocalUpdates: localUpdates(), - TP: last.TP + 1, - TC: last.TC, - TM: last.TM, + ret = append(ret, traceState{ + mapInstructions: mappings(), + localUpdates: localUpdates(), + tp: last.tp + 1, + tc: last.tc, + tm: last.tm, }) } if choice == 1 { - curr := last.TC - limInclusive := last.TP + curr := last.tc + limInclusive := last.tp if curr < limInclusive { // add in [1, limInclusive - curr] // rand in [0, limInclusive - curr - 1] // bound is [0, limInclusive - curr) newTC := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTC && curr <= limInclusive) - ret = append(ret, TraceState{ - MapInstructions: nil, - LocalUpdates: nil, - TP: last.TP, - TC: newTC, - TM: last.TM, + ret = append(ret, traceState{ + mapInstructions: nil, + localUpdates: nil, + tp: last.tp, + tc: newTC, + tm: last.tm, }) } } if choice == 2 { - curr := last.TM - limInclusive := last.TC + curr := last.tm + limInclusive := last.tc if curr < limInclusive { newTM := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTM && curr <= limInclusive) - ret = append(ret, TraceState{ - MapInstructions: nil, - LocalUpdates: nil, - TP: last.TP, - TC: last.TC, - TM: newTM, + ret = append(ret, traceState{ + mapInstructions: nil, + localUpdates: nil, + tp: last.tp, + tc: last.tc, + tm: newTM, }) } } @@ -348,14 +350,13 @@ func getTrace(t *testing.T) []TraceState { return ret } -func TestPrototype(t *testing.T) { +func TestRandomHeuristic(t *testing.T) { for i := 0; i < NUM_TRACES; i++ { - trace := []TraceState{} + trace := []traceState{} for len(trace) < 2 { trace = getTrace(t) } - d := MakeDriver(t, trace) - d.runTrace() - + d := makeDriver(t, trace) + d.run() } } From 43d3f82b6e6cf3746be0ebf895c7440c7096f80e Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 11:00:44 -0500 Subject: [PATCH 100/127] Better invariant comments --- x/ccv/provider/keydel/keydel.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index e410f852af..bdc185b96a 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -163,6 +163,7 @@ func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { func (e *KeyDel) internalInvariants() bool { // No two local keys can map to the same foreign key + // (lkToFk is sane) seen := map[FK]bool{} for _, fk := range e.lkToFk { if seen[fk] { @@ -171,14 +172,17 @@ func (e *KeyDel) internalInvariants() bool { seen[fk] = true } - // All foreign keys mapped to by local keys are noted + // all values of lkToFk is a key of fkToLk + // (reverse lookup is always possible) for _, fk := range e.lkToFk { if _, ok := e.fkToLk[fk]; !ok { return false } } - // All mapped to foreign keys are actually mapped to + // All foreign keys mapping to local keys are actually + // mapped to by the local key. + // (fkToLk is sane) for fk := range e.fkToLk { good := false for _, candidateFk := range e.lkToFk { @@ -192,8 +196,10 @@ func (e *KeyDel) internalInvariants() bool { } } - // If a foreign key is directly mapped to a local key - // there is no disagreeing on the local key. + // If a foreign key is mapped to a local key (currently) + // any memo containing the same foreign key has the same + // mapping. + // (Ensures lookups are correct) for fk, lk := range e.fkToLk { if u, ok := e.fkToMemo[fk]; ok { if lk != u.lk { From 041d5d21bdab26ed25f006be764b1370c3ab88c0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 11:04:27 -0500 Subject: [PATCH 101/127] CP wording --- x/ccv/provider/keydel/keydel_test.go | 33 +++++++++++++++------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index e6b5e3026b..4f5ca48562 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -17,7 +17,7 @@ type keyMapEntry struct { fk FK } -type traceState struct { +type traceStep struct { mapInstructions []keyMapEntry localUpdates []update tp int @@ -28,7 +28,7 @@ type traceState struct { type driver struct { t *testing.T kd *KeyDel - trace []traceState + trace []traceStep lastTP int lastTC int lastTM int @@ -41,7 +41,7 @@ type driver struct { foreignValSet valSet } -func makeDriver(t *testing.T, trace []traceState) driver { +func makeDriver(t *testing.T, trace []traceStep) driver { d := driver{} d.t = t e := MakeKeyDel() @@ -145,14 +145,12 @@ func (d *driver) run() { func (d *driver) checkProperties() { /* - When a consumer receives and processes up to VSCID i, - it must have a validator set equal to that on the provider at i - mapped through the key mapping that was on the provider when i - was sent. + For a consumer who has received updates up to VSCID i, its + local validator set must be equal to the set on the provider + when i was sent, mapped through the mapping at that time. */ validatorSetReplication := func() { - // Get the current consumer val set. foreignSet := d.foreignValSet.keyToPower // Get the provider set at the corresponding time. localSet := d.localValSets[d.lastTC].keyToPower @@ -260,7 +258,9 @@ func (d *driver) checkProperties() { } -func getTrace(t *testing.T) []traceState { +// Return a randomly generated list of steps +// which can be used to execute actions for testing. +func getTrace(t *testing.T) []traceStep { mappings := func() []keyMapEntry { ret := []keyMapEntry{} @@ -290,7 +290,7 @@ func getTrace(t *testing.T) []traceState { initialMappings = append(initialMappings, keyMapEntry{i, i}) } - ret := []traceState{ + ret := []traceStep{ { // Hard code initial mapping mapInstructions: initialMappings, @@ -305,7 +305,7 @@ func getTrace(t *testing.T) []traceState { choice := rand.Intn(3) last := ret[len(ret)-1] if choice == 0 { - ret = append(ret, traceState{ + ret = append(ret, traceStep{ mapInstructions: mappings(), localUpdates: localUpdates(), tp: last.tp + 1, @@ -322,7 +322,7 @@ func getTrace(t *testing.T) []traceState { // bound is [0, limInclusive - curr) newTC := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTC && curr <= limInclusive) - ret = append(ret, traceState{ + ret = append(ret, traceStep{ mapInstructions: nil, localUpdates: nil, tp: last.tp, @@ -337,7 +337,7 @@ func getTrace(t *testing.T) []traceState { if curr < limInclusive { newTM := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTM && curr <= limInclusive) - ret = append(ret, traceState{ + ret = append(ret, traceStep{ mapInstructions: nil, localUpdates: nil, tp: last.tp, @@ -350,9 +350,12 @@ func getTrace(t *testing.T) []traceState { return ret } -func TestRandomHeuristic(t *testing.T) { +// Execute randomly generated traces (lists of actions) +// against new instances of the class, checking properties +// after each action is done. +func TestPropertiesRandomlyHeuristically(t *testing.T) { for i := 0; i < NUM_TRACES; i++ { - trace := []traceState{} + trace := []traceStep{} for len(trace) < 2 { trace = getTrace(t) } From 7687ceaee4756ad22b0e6da52c054a28ff8ed000 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 11:14:37 -0500 Subject: [PATCH 102/127] Better comments --- x/ccv/provider/keydel/keydel.go | 166 +++++++++++++++------------ x/ccv/provider/keydel/keydel_test.go | 32 +++--- 2 files changed, 106 insertions(+), 92 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index bdc185b96a..6c3222f79b 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -4,8 +4,8 @@ import ( "errors" ) -type LK = int -type FK = int +type PK = int +type CK = int type VSCID = int type update struct { @@ -14,8 +14,8 @@ type update struct { } type memo struct { - fk FK - lk LK + ck CK + pk PK vscid int power int } @@ -24,135 +24,149 @@ type memo struct { // 1. Integrate into kv store. // 2. integrate into Provider::EndBlock, // 3. integrate with create/destroy validator +// 4. TODO: document this file type KeyDel struct { - lkToFk map[LK]FK - fkToLk map[FK]LK - fkToMemo map[FK]memo + pkToCk map[PK]CK + CkToPk map[CK]PK + CkToMemo map[CK]memo } func MakeKeyDel() KeyDel { return KeyDel{ - lkToFk: map[LK]FK{}, - fkToLk: map[FK]LK{}, - fkToMemo: map[FK]memo{}, + pkToCk: map[PK]CK{}, + CkToPk: map[CK]PK{}, + CkToMemo: map[CK]memo{}, } } -func (e *KeyDel) SetLocalToForeign(lk LK, fk FK) error { +// TODO: +func (e *KeyDel) SetProviderKeyToConsumerKey(lk PK, fk CK) error { inUse := false - if _, ok := e.fkToLk[fk]; ok { + if _, ok := e.CkToPk[fk]; ok { inUse = true } - if _, ok := e.fkToMemo[fk]; ok { + if _, ok := e.CkToMemo[fk]; ok { inUse = true } if inUse { - return errors.New(`cannot reuse foreign key which is currently or recently in use`) + return errors.New(`cannot reuse key which is in use or was recently in use`) } - if oldFk, ok := e.lkToFk[lk]; ok { - delete(e.fkToLk, oldFk) + if oldFk, ok := e.pkToCk[lk]; ok { + delete(e.CkToPk, oldFk) } - e.lkToFk[lk] = fk - e.fkToLk[fk] = lk + e.pkToCk[lk] = fk + e.CkToPk[fk] = lk return nil } -func (e *KeyDel) GetLocal(fk FK) (LK, error) { - if u, ok := e.fkToMemo[fk]; ok { - return u.lk, nil - } else if lk, ok := e.fkToLk[fk]; ok { +// TODO: +func (e *KeyDel) GetProviderKey(fk CK) (PK, error) { + if u, ok := e.CkToMemo[fk]; ok { + return u.pk, nil + } else if lk, ok := e.CkToPk[fk]; ok { return lk, nil } else { return -1, errors.New("local key not found for foreign key") } } -func (e *KeyDel) Prune(vscid VSCID) { - toDel := []FK{} - for _, u := range e.fkToMemo { +// TODO: +func (e *KeyDel) PruneUnusedKeys(latestVscid VSCID) { + toDel := []CK{} + for _, u := range e.CkToMemo { // If the last update was a deletion (0 power) and the update // matured then pruning is possible. - if u.power == 0 && u.vscid <= vscid { - toDel = append(toDel, u.fk) + if u.power == 0 && u.vscid <= latestVscid { + toDel = append(toDel, u.ck) } } for _, fk := range toDel { - delete(e.fkToMemo, fk) + delete(e.CkToMemo, fk) } } -func (e *KeyDel) ComputeUpdates(vscid VSCID, localUpdates []update) (foreignUpdates []update) { +// TODO: +func (e *KeyDel) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumerUpdates []update) { - local := map[LK]int{} + updates := map[PK]int{} - for _, u := range localUpdates { - local[u.key] = u.power + for _, u := range providerUpdates { + updates[u.key] = u.power } - foreign := e.inner(vscid, local) + foreign := e.inner(vscid, updates) - foreignUpdates = []update{} + consumerUpdates = []update{} for fk, power := range foreign { - foreignUpdates = append(foreignUpdates, update{key: fk, power: power}) + consumerUpdates = append(consumerUpdates, update{key: fk, power: power}) } - return foreignUpdates + return consumerUpdates } -func (e *KeyDel) inner(vscid VSCID, localUpdates map[LK]int) map[FK]int { +// do inner work as part of ComputeUpdates +func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { - lks := []LK{} + pks := []PK{} - // Grab lks for which fk changed - for oldFk, u := range e.fkToMemo { - if newFk, ok := e.lkToFk[u.lk]; ok { - if oldFk != newFk && 0 < u.power { - lks = append(lks, u.lk) + // Grab provider keys where the assigned consumer key has changed + for oldCk, u := range e.CkToMemo { + if newCk, ok := e.pkToCk[u.pk]; ok { + if oldCk != newCk && 0 < u.power { + pks = append(pks, u.pk) } } } - // Grab lks for which power changed - for lk := range localUpdates { - lks = append(lks, lk) + // Grab provider keys where the validator power has changed + for pk := range providerUpdates { + pks = append(pks, pk) } - ret := map[FK]int{} + ret := map[CK]int{} - fkToMemoClone := map[FK]memo{} - for k, v := range e.fkToMemo { - fkToMemoClone[k] = v + // Create a read only copy, so that we can query while writing + // updates to the old version. + ckToMemo_READ_ONLY := map[CK]memo{} + for ck, memo := range e.CkToMemo { + ckToMemo_READ_ONLY[ck] = memo } - // Iterate all local keys for which there was previously a positive update. - for _, lk := range lks { - for _, u := range fkToMemoClone { - if u.lk == lk && 0 < u.power { - e.fkToMemo[u.fk] = memo{fk: u.fk, lk: lk, vscid: vscid, power: 0} - ret[u.fk] = 0 + for _, pk := range pks { + for _, u := range ckToMemo_READ_ONLY { + if u.pk == pk && 0 < u.power { + // For each provider key for which there was already a positive update + // create a deletion update for the associated consumer key. + e.CkToMemo[u.ck] = memo{ck: u.ck, pk: pk, vscid: vscid, power: 0} + ret[u.ck] = 0 } } } - // Iterate all local keys for which either the foreign key changed or there - // has been a power update. - for _, lk := range lks { + for _, pk := range pks { + // For each provider key where there was either + // 1) already a positive power update + // 2) the validator power has changed (and is still positive) + // create a change update for the associated consumer key. + power := 0 - for _, u := range fkToMemoClone { - if u.lk == lk && 0 < u.power { + for _, u := range ckToMemo_READ_ONLY { + if u.pk == pk && 0 < u.power { + // There was previously a positive power update: copy it. power = u.power } } - // If there is a new power use it. - if newPower, ok := localUpdates[lk]; ok { + // There is a new validator power: use it. + if newPower, ok := providerUpdates[pk]; ok { power = newPower } - // Only ship positive powers. Zero powers are accounted for above. + // Only ship update with positive powers. Zero power updates (deletions) + // are handled in earlier block. if 0 < power { - fk := e.lkToFk[lk] - e.fkToMemo[fk] = memo{fk: fk, lk: lk, vscid: vscid, power: power} - ret[fk] = power + ck := e.pkToCk[pk] + e.CkToMemo[ck] = memo{ck: ck, pk: pk, vscid: vscid, power: power} + ret[ck] = power } } @@ -164,8 +178,8 @@ func (e *KeyDel) internalInvariants() bool { // No two local keys can map to the same foreign key // (lkToFk is sane) - seen := map[FK]bool{} - for _, fk := range e.lkToFk { + seen := map[CK]bool{} + for _, fk := range e.pkToCk { if seen[fk] { return false } @@ -174,8 +188,8 @@ func (e *KeyDel) internalInvariants() bool { // all values of lkToFk is a key of fkToLk // (reverse lookup is always possible) - for _, fk := range e.lkToFk { - if _, ok := e.fkToLk[fk]; !ok { + for _, fk := range e.pkToCk { + if _, ok := e.CkToPk[fk]; !ok { return false } } @@ -183,9 +197,9 @@ func (e *KeyDel) internalInvariants() bool { // All foreign keys mapping to local keys are actually // mapped to by the local key. // (fkToLk is sane) - for fk := range e.fkToLk { + for fk := range e.CkToPk { good := false - for _, candidateFk := range e.lkToFk { + for _, candidateFk := range e.pkToCk { if candidateFk == fk { good = true break @@ -200,9 +214,9 @@ func (e *KeyDel) internalInvariants() bool { // any memo containing the same foreign key has the same // mapping. // (Ensures lookups are correct) - for fk, lk := range e.fkToLk { - if u, ok := e.fkToMemo[fk]; ok { - if lk != u.lk { + for fk, lk := range e.CkToPk { + if u, ok := e.CkToMemo[fk]; ok { + if lk != u.pk { return false } } diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 4f5ca48562..baf5dbfbb6 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -13,8 +13,8 @@ const NUM_VALS = 4 const NUM_FKS = 50 type keyMapEntry struct { - lk LK - fk FK + lk PK + fk CK } type traceStep struct { @@ -33,7 +33,7 @@ type driver struct { lastTC int lastTM int // indexed by time (starting at 0) - mappings []map[LK]FK + mappings []map[PK]CK // indexed by time (starting at 0) foreignUpdates [][]update // indexed by time (starting at 0) @@ -50,7 +50,7 @@ func makeDriver(t *testing.T, trace []traceStep) driver { d.lastTP = 0 d.lastTC = 0 d.lastTM = 0 - d.mappings = []map[LK]FK{} + d.mappings = []map[PK]CK{} d.foreignUpdates = [][]update{} d.localValSets = []valSet{} d.foreignValSet = valSet{} @@ -76,10 +76,10 @@ func (vs *valSet) applyUpdates(updates []update) { func (d *driver) applyMapInstructions(instructions []keyMapEntry) { for _, instruction := range instructions { - _ = d.kd.SetLocalToForeign(instruction.lk, instruction.fk) + _ = d.kd.SetProviderKeyToConsumerKey(instruction.lk, instruction.fk) } - copy := map[LK]FK{} - for lk, fk := range d.kd.lkToFk { + copy := map[PK]CK{} + for lk, fk := range d.kd.pkToCk { copy[lk] = fk } d.mappings = append(d.mappings, copy) @@ -108,7 +108,7 @@ func (d *driver) run() { // The first foreign set equal to the local set at time 0 d.foreignValSet = makeValSet() d.foreignValSet.applyUpdates(d.foreignUpdates[init.tc]) - d.kd.Prune(init.tm) + d.kd.PruneUnusedKeys(init.tm) } // Sanity check the initial state @@ -134,7 +134,7 @@ func (d *driver) run() { } if d.lastTM < s.tm { // Models maturations being received on the provider. - d.kd.Prune(s.tm) + d.kd.PruneUnusedKeys(s.tm) d.lastTM = s.tm } require.True(d.t, d.kd.internalInvariants()) @@ -157,10 +157,10 @@ func (d *driver) checkProperties() { // Compute a lookup mapping consumer powers // back to provider powers, to enable comparison. - foreignSetAsLocal := map[LK]int{} + foreignSetAsLocal := map[PK]int{} { mapping := d.mappings[d.lastTC] - inverseMapping := map[FK]LK{} + inverseMapping := map[CK]PK{} for lk, fk := range mapping { inverseMapping[fk] = lk } @@ -196,7 +196,7 @@ func (d *driver) checkProperties() { be pruned. */ pruning := func() { - expectQueryable := map[FK]bool{} + expectQueryable := map[CK]bool{} for i := 0; i <= d.lastTM; i++ { for _, u := range d.foreignUpdates[i] { @@ -208,13 +208,13 @@ func (d *driver) checkProperties() { expectQueryable[u.key] = true } } - for _, fk := range d.kd.lkToFk { + for _, fk := range d.kd.pkToCk { expectQueryable[fk] = true } // Simply check every foreign key for the correct queryable-ness. for fk := 0; fk < NUM_FKS; fk++ { - _, err := d.kd.GetLocal(fk) + _, err := d.kd.GetProviderKey(fk) actualQueryable := err == nil if expect, found := expectQueryable[fk]; found && expect { require.True(d.t, actualQueryable) @@ -230,10 +230,10 @@ func (d *driver) checkProperties() { queries := func() { // For each fk known to the consumer for consumerFK := range d.foreignValSet.keyToPower { - queriedLK, err := d.kd.GetLocal(consumerFK) + queriedLK, err := d.kd.GetProviderKey(consumerFK) // There must be a corresponding local key require.Nil(d.t, err) - providerFKs := map[FK]bool{} + providerFKs := map[CK]bool{} // The local key must be the one that was actually referenced // in the latest mapping used to compute updates sent to the // consumer. From dbc25173202864bad2797219421630f5fa228509 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 11:33:25 -0500 Subject: [PATCH 103/127] CP docs progress --- x/ccv/provider/keydel/keydel_test.go | 305 ++++++++++++++++----------- 1 file changed, 179 insertions(+), 126 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index baf5dbfbb6..9711572665 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -7,56 +7,67 @@ import ( "github.com/stretchr/testify/require" ) +// Num traces to run for heuristic testing const NUM_TRACES = 1000 + +// Len of trace for a single heuristic testing run const TRACE_LEN = 1000 + +// Number of validators to simulate const NUM_VALS = 4 + +// Number of foreign keys in the universe +// (This is constrained to ensure overlap edge cases are tested) const NUM_FKS = 50 type keyMapEntry struct { - lk PK - fk CK + pk PK + ck CK } type traceStep struct { - mapInstructions []keyMapEntry - localUpdates []update - tp int - tc int - tm int + keyMapEntries []keyMapEntry + providerUpdates []update + timeProvider int + timeConsumer int + timeMaturity int } type driver struct { - t *testing.T - kd *KeyDel - trace []traceStep - lastTP int - lastTC int - lastTM int + t *testing.T + kd *KeyDel + trace []traceStep + lastTimeProvider int + lastTimeConsumer int + lastTimeMaturity int // indexed by time (starting at 0) mappings []map[PK]CK // indexed by time (starting at 0) - foreignUpdates [][]update + consumerUpdates [][]update // indexed by time (starting at 0) - localValSets []valSet + localValSets []valSet + // The validator set from the perspective of + // the consumer chain. foreignValSet valSet } func makeDriver(t *testing.T, trace []traceStep) driver { d := driver{} d.t = t - e := MakeKeyDel() - d.kd = &e + kd := MakeKeyDel() + d.kd = &kd d.trace = trace - d.lastTP = 0 - d.lastTC = 0 - d.lastTM = 0 + d.lastTimeProvider = 0 + d.lastTimeConsumer = 0 + d.lastTimeMaturity = 0 d.mappings = []map[PK]CK{} - d.foreignUpdates = [][]update{} + d.consumerUpdates = [][]update{} d.localValSets = []valSet{} d.foreignValSet = valSet{} return d } +// Utility struct to make simulating a validator set easier. type valSet struct { keyToPower map[int]int } @@ -65,6 +76,7 @@ func makeValSet() valSet { return valSet{keyToPower: map[int]int{}} } +// Apply a batch of (key, power) updates to the known validator set. func (vs *valSet) applyUpdates(updates []update) { for _, u := range updates { delete(vs.keyToPower, u.key) @@ -74,10 +86,14 @@ func (vs *valSet) applyUpdates(updates []update) { } } -func (d *driver) applyMapInstructions(instructions []keyMapEntry) { - for _, instruction := range instructions { - _ = d.kd.SetProviderKeyToConsumerKey(instruction.lk, instruction.fk) +// Apply a list of (pk, ck) mapping requests to the KeyDel class instance +func (d *driver) applyKeyMapEntries(entries []keyMapEntry) { + for _, e := range entries { + // TRY to map provider key pk to consumer key ck. + // (May fail due to API constraints, this is correct) + _ = d.kd.SetProviderKeyToConsumerKey(e.pk, e.ck) } + // Duplicate the mapping for referencing later in tests. copy := map[PK]CK{} for lk, fk := range d.kd.pkToCk { copy[lk] = fk @@ -85,101 +101,166 @@ func (d *driver) applyMapInstructions(instructions []keyMapEntry) { d.mappings = append(d.mappings, copy) } -func (d *driver) applyLocalUpdates(localUpdates []update) { +// Apply a list of provider validator power updates +func (d *driver) applyProviderUpdates(providerUPdates []update) { + // Duplicate the previous valSet so that it can be referenced + // later in tests. valSet := makeValSet() - for lk, power := range d.localValSets[d.lastTP].keyToPower { - valSet.keyToPower[lk] = power + for pk, power := range d.localValSets[d.lastTimeProvider].keyToPower { + valSet.keyToPower[pk] = power } - valSet.applyUpdates(localUpdates) + valSet.applyUpdates(providerUPdates) d.localValSets = append(d.localValSets, valSet) } +// Run a trace +// This includes bootstrapping the data structure with the first (init) +// step of the trace, and running a sequence of steps afterwards. +// Internal and external invariants (properties) of the data structure +// are tested after each step. func (d *driver) run() { + // Initialise { init := d.trace[0] // Set the initial map - d.applyMapInstructions(init.mapInstructions) + d.applyKeyMapEntries(init.keyMapEntries) // Set the initial local set d.localValSets = append(d.localValSets, makeValSet()) - d.localValSets[init.tp].applyUpdates(init.localUpdates) + d.localValSets[init.timeProvider].applyUpdates(init.providerUpdates) // Set the initial foreign set - d.foreignUpdates = append(d.foreignUpdates, d.kd.ComputeUpdates(init.tp, init.localUpdates)) + d.consumerUpdates = append(d.consumerUpdates, d.kd.ComputeUpdates(init.timeProvider, init.providerUpdates)) // The first foreign set equal to the local set at time 0 d.foreignValSet = makeValSet() - d.foreignValSet.applyUpdates(d.foreignUpdates[init.tc]) - d.kd.PruneUnusedKeys(init.tm) + d.foreignValSet.applyUpdates(d.consumerUpdates[init.timeConsumer]) + d.kd.PruneUnusedKeys(init.timeMaturity) } // Sanity check the initial state require.Len(d.t, d.mappings, 1) - require.Len(d.t, d.foreignUpdates, 1) + require.Len(d.t, d.consumerUpdates, 1) require.Len(d.t, d.localValSets, 1) - // Check properties for each state after the initial + // Check properties for each step after the initial one for _, s := range d.trace[1:] { - if d.lastTP < s.tp { - // Provider time increment: - // Apply some key mappings and create some new validator power updates - d.applyMapInstructions(s.mapInstructions) - d.applyLocalUpdates(s.localUpdates) - d.foreignUpdates = append(d.foreignUpdates, d.kd.ComputeUpdates(s.tp, s.localUpdates)) - d.lastTP = s.tp + if d.lastTimeProvider < s.timeProvider { + // Provider time increase: + // Apply some new key mapping requests to KeyDel, and create new validator + // power updates. + d.applyKeyMapEntries(s.keyMapEntries) + d.applyProviderUpdates(s.providerUpdates) + // Store the updates, to reference later in tests. + d.consumerUpdates = append(d.consumerUpdates, d.kd.ComputeUpdates(s.timeProvider, s.providerUpdates)) + d.lastTimeProvider = s.timeProvider } - if d.lastTC < s.tc { - for j := d.lastTC + 1; j <= s.tc; j++ { - d.foreignValSet.applyUpdates(d.foreignUpdates[j]) + if d.lastTimeConsumer < s.timeConsumer { + // Consumer time increase: + // For each unit of time that has passed since the last increase, apply + // any updates which have been 'emitted' by a provider time increase step. + for j := d.lastTimeConsumer + 1; j <= s.timeConsumer; j++ { + d.foreignValSet.applyUpdates(d.consumerUpdates[j]) } - d.lastTC = s.tc + d.lastTimeConsumer = s.timeConsumer } - if d.lastTM < s.tm { - // Models maturations being received on the provider. - d.kd.PruneUnusedKeys(s.tm) - d.lastTM = s.tm + if d.lastTimeMaturity < s.timeMaturity { + // Maturity time increase: + // For each unit of time that has passed since the last increase, + // a maturity is 'available'. We test batch maturity. + d.kd.PruneUnusedKeys(s.timeMaturity) + d.lastTimeMaturity = s.timeMaturity } + + // Do checks require.True(d.t, d.kd.internalInvariants()) - d.checkProperties() + d.externalInvariants() } } -func (d *driver) checkProperties() { +// Check invariants which are 'external' to the data structure being used. +// That is: these invariants make sense in the context of the wider system, +// and aren't specifically about the KeyDel data structure internal state. +// +// There are three invariants +// +// 1. Validator Set Replication +// 'All consumer validator sets are some earlier provider validator set' +// +// 2. Queries +// 'It is always possible to query the provider key for a given consumer +// key, when the consumer can still make slash requests' +// +// 3. Pruning +// 'When the pruning method is used correctly, the internal state of the +// data structure does not grow unboundedly' +// +// Please see body for details. +func (d *driver) externalInvariants() { /* - For a consumer who has received updates up to VSCID i, its + For a consumer who has received updates up to vscid i, its local validator set must be equal to the set on the provider when i was sent, mapped through the mapping at that time. */ validatorSetReplication := func() { - foreignSet := d.foreignValSet.keyToPower - // Get the provider set at the corresponding time. - localSet := d.localValSets[d.lastTC].keyToPower + // Get the consumer set. + cSet := d.foreignValSet.keyToPower + // Get the provider set - at the corresponding time. + pSet := d.localValSets[d.lastTimeConsumer].keyToPower - // Compute a lookup mapping consumer powers - // back to provider powers, to enable comparison. - foreignSetAsLocal := map[PK]int{} + // Compute a reverse lookup allowing comparison + // of the two sets. + cSetLikePSet := map[PK]int{} { - mapping := d.mappings[d.lastTC] + mapping := d.mappings[d.lastTimeConsumer] inverseMapping := map[CK]PK{} - for lk, fk := range mapping { - inverseMapping[fk] = lk + for pk, ck := range mapping { + inverseMapping[ck] = pk } - for fk, power := range foreignSet { - foreignSetAsLocal[inverseMapping[fk]] = power + for ck, power := range cSet { + cSetLikePSet[inverseMapping[ck]] = power } } - // Ensure that the sets match exactly - for lk, expectedPower := range localSet { - actualPower := foreignSetAsLocal[lk] + // Check that the two validator sets match exactly. + for pk, expectedPower := range pSet { + actualPower := cSetLikePSet[pk] require.Equal(d.t, expectedPower, actualPower) } - for lk, actualPower := range foreignSetAsLocal { - expectedPower := localSet[lk] + for pk, actualPower := range cSetLikePSet { + expectedPower := pSet[pk] require.Equal(d.t, expectedPower, actualPower) } } + /* + TODO: + */ + queries := func() { + // For each fk known to the consumer + for consumerFK := range d.foreignValSet.keyToPower { + queriedLK, err := d.kd.GetProviderKey(consumerFK) + // There must be a corresponding local key + require.Nil(d.t, err) + providerFKs := map[CK]bool{} + // The local key must be the one that was actually referenced + // in the latest mapping used to compute updates sent to the + // consumer. + mapping := d.mappings[d.lastTimeConsumer] + for providerLK, providerFK := range mapping { + require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") + providerFKs[providerFK] = true + if consumerFK == providerFK { + // A mapping to the consumer FK was found + // The corresponding LK must be the one queried. + require.Equal(d.t, providerLK, queriedLK) + } + } + // Check that the comparison was actually made! + require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") + } + } + /* Two more properties which must be satisfied by KeyDel when used correctly inside a wider system: @@ -198,13 +279,13 @@ func (d *driver) checkProperties() { pruning := func() { expectQueryable := map[CK]bool{} - for i := 0; i <= d.lastTM; i++ { - for _, u := range d.foreignUpdates[i] { + for i := 0; i <= d.lastTimeMaturity; i++ { + for _, u := range d.consumerUpdates[i] { expectQueryable[u.key] = 0 < u.power } } - for i := d.lastTM + 1; i <= d.lastTP; i++ { - for _, u := range d.foreignUpdates[i] { + for i := d.lastTimeMaturity + 1; i <= d.lastTimeProvider; i++ { + for _, u := range d.consumerUpdates[i] { expectQueryable[u.key] = true } } @@ -224,37 +305,9 @@ func (d *driver) checkProperties() { } } - /* - TODO: - */ - queries := func() { - // For each fk known to the consumer - for consumerFK := range d.foreignValSet.keyToPower { - queriedLK, err := d.kd.GetProviderKey(consumerFK) - // There must be a corresponding local key - require.Nil(d.t, err) - providerFKs := map[CK]bool{} - // The local key must be the one that was actually referenced - // in the latest mapping used to compute updates sent to the - // consumer. - mapping := d.mappings[d.lastTC] - for providerLK, providerFK := range mapping { - require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") - providerFKs[providerFK] = true - if consumerFK == providerFK { - // A mapping to the consumer FK was found - // The corresponding LK must be the one queried. - require.Equal(d.t, providerLK, queriedLK) - } - } - // Check that the comparison was actually made! - require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") - } - } - validatorSetReplication() - pruning() queries() + pruning() } @@ -293,11 +346,11 @@ func getTrace(t *testing.T) []traceStep { ret := []traceStep{ { // Hard code initial mapping - mapInstructions: initialMappings, - localUpdates: localUpdates(), - tp: 0, - tc: 0, - tm: 0, + keyMapEntries: initialMappings, + providerUpdates: localUpdates(), + timeProvider: 0, + timeConsumer: 0, + timeMaturity: 0, }, } @@ -306,16 +359,16 @@ func getTrace(t *testing.T) []traceStep { last := ret[len(ret)-1] if choice == 0 { ret = append(ret, traceStep{ - mapInstructions: mappings(), - localUpdates: localUpdates(), - tp: last.tp + 1, - tc: last.tc, - tm: last.tm, + keyMapEntries: mappings(), + providerUpdates: localUpdates(), + timeProvider: last.timeProvider + 1, + timeConsumer: last.timeConsumer, + timeMaturity: last.timeMaturity, }) } if choice == 1 { - curr := last.tc - limInclusive := last.tp + curr := last.timeConsumer + limInclusive := last.timeProvider if curr < limInclusive { // add in [1, limInclusive - curr] // rand in [0, limInclusive - curr - 1] @@ -323,26 +376,26 @@ func getTrace(t *testing.T) []traceStep { newTC := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTC && curr <= limInclusive) ret = append(ret, traceStep{ - mapInstructions: nil, - localUpdates: nil, - tp: last.tp, - tc: newTC, - tm: last.tm, + keyMapEntries: nil, + providerUpdates: nil, + timeProvider: last.timeProvider, + timeConsumer: newTC, + timeMaturity: last.timeMaturity, }) } } if choice == 2 { - curr := last.tm - limInclusive := last.tc + curr := last.timeMaturity + limInclusive := last.timeConsumer if curr < limInclusive { newTM := rand.Intn(limInclusive-curr) + curr + 1 require.True(t, curr < newTM && curr <= limInclusive) ret = append(ret, traceStep{ - mapInstructions: nil, - localUpdates: nil, - tp: last.tp, - tc: last.tc, - tm: newTM, + keyMapEntries: nil, + providerUpdates: nil, + timeProvider: last.timeProvider, + timeConsumer: last.timeConsumer, + timeMaturity: newTM, }) } } From 2d32f66b89fb46062e2d517bdec38e65724bead1 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 11:58:18 -0500 Subject: [PATCH 104/127] Major symbol renames --- x/ccv/provider/keydel/keydel.go | 16 +-- x/ccv/provider/keydel/keydel_test.go | 156 +++++++++++++++------------ 2 files changed, 98 insertions(+), 74 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index 6c3222f79b..fa511d3220 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -67,7 +67,7 @@ func (e *KeyDel) GetProviderKey(fk CK) (PK, error) { } else if lk, ok := e.CkToPk[fk]; ok { return lk, nil } else { - return -1, errors.New("local key not found for foreign key") + return -1, errors.New("provider key not found for consumer key") } } @@ -95,11 +95,11 @@ func (e *KeyDel) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumer updates[u.key] = u.power } - foreign := e.inner(vscid, updates) + updates = e.inner(vscid, updates) consumerUpdates = []update{} - for fk, power := range foreign { + for fk, power := range updates { consumerUpdates = append(consumerUpdates, update{key: fk, power: power}) } @@ -176,7 +176,7 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { // Returns true iff internal invariants hold func (e *KeyDel) internalInvariants() bool { - // No two local keys can map to the same foreign key + // No two provider keys can map to the same consumer key // (lkToFk is sane) seen := map[CK]bool{} for _, fk := range e.pkToCk { @@ -194,8 +194,8 @@ func (e *KeyDel) internalInvariants() bool { } } - // All foreign keys mapping to local keys are actually - // mapped to by the local key. + // All consumer keys mapping to provider keys are actually + // mapped to by the provider key. // (fkToLk is sane) for fk := range e.CkToPk { good := false @@ -210,8 +210,8 @@ func (e *KeyDel) internalInvariants() bool { } } - // If a foreign key is mapped to a local key (currently) - // any memo containing the same foreign key has the same + // If a consumer key is mapped to a provider key (currently) + // any memo containing the same consumer key has the same // mapping. // (Ensures lookups are correct) for fk, lk := range e.CkToPk { diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 9711572665..7b30430df4 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -16,9 +16,9 @@ const TRACE_LEN = 1000 // Number of validators to simulate const NUM_VALS = 4 -// Number of foreign keys in the universe +// Number of consumer keys in the universe // (This is constrained to ensure overlap edge cases are tested) -const NUM_FKS = 50 +const NUM_CKS = 50 type keyMapEntry struct { pk PK @@ -45,10 +45,10 @@ type driver struct { // indexed by time (starting at 0) consumerUpdates [][]update // indexed by time (starting at 0) - localValSets []valSet + providerValsets []valset // The validator set from the perspective of // the consumer chain. - foreignValSet valSet + consumerValsets valset } func makeDriver(t *testing.T, trace []traceStep) driver { @@ -62,22 +62,22 @@ func makeDriver(t *testing.T, trace []traceStep) driver { d.lastTimeMaturity = 0 d.mappings = []map[PK]CK{} d.consumerUpdates = [][]update{} - d.localValSets = []valSet{} - d.foreignValSet = valSet{} + d.providerValsets = []valset{} + d.consumerValsets = valset{} return d } // Utility struct to make simulating a validator set easier. -type valSet struct { +type valset struct { keyToPower map[int]int } -func makeValSet() valSet { - return valSet{keyToPower: map[int]int{}} +func makeValset() valset { + return valset{keyToPower: map[int]int{}} } // Apply a batch of (key, power) updates to the known validator set. -func (vs *valSet) applyUpdates(updates []update) { +func (vs *valset) applyUpdates(updates []update) { for _, u := range updates { delete(vs.keyToPower, u.key) if 0 < u.power { @@ -105,12 +105,12 @@ func (d *driver) applyKeyMapEntries(entries []keyMapEntry) { func (d *driver) applyProviderUpdates(providerUPdates []update) { // Duplicate the previous valSet so that it can be referenced // later in tests. - valSet := makeValSet() - for pk, power := range d.localValSets[d.lastTimeProvider].keyToPower { + valSet := makeValset() + for pk, power := range d.providerValsets[d.lastTimeProvider].keyToPower { valSet.keyToPower[pk] = power } valSet.applyUpdates(providerUPdates) - d.localValSets = append(d.localValSets, valSet) + d.providerValsets = append(d.providerValsets, valSet) } // Run a trace @@ -125,21 +125,21 @@ func (d *driver) run() { init := d.trace[0] // Set the initial map d.applyKeyMapEntries(init.keyMapEntries) - // Set the initial local set - d.localValSets = append(d.localValSets, makeValSet()) - d.localValSets[init.timeProvider].applyUpdates(init.providerUpdates) - // Set the initial foreign set + // Set the initial provider set + d.providerValsets = append(d.providerValsets, makeValset()) + d.providerValsets[init.timeProvider].applyUpdates(init.providerUpdates) + // Set the initial consumer set d.consumerUpdates = append(d.consumerUpdates, d.kd.ComputeUpdates(init.timeProvider, init.providerUpdates)) - // The first foreign set equal to the local set at time 0 - d.foreignValSet = makeValSet() - d.foreignValSet.applyUpdates(d.consumerUpdates[init.timeConsumer]) + // The first consumer set equal to the provider set at time 0 + d.consumerValsets = makeValset() + d.consumerValsets.applyUpdates(d.consumerUpdates[init.timeConsumer]) d.kd.PruneUnusedKeys(init.timeMaturity) } // Sanity check the initial state require.Len(d.t, d.mappings, 1) require.Len(d.t, d.consumerUpdates, 1) - require.Len(d.t, d.localValSets, 1) + require.Len(d.t, d.providerValsets, 1) // Check properties for each step after the initial one for _, s := range d.trace[1:] { @@ -158,7 +158,7 @@ func (d *driver) run() { // For each unit of time that has passed since the last increase, apply // any updates which have been 'emitted' by a provider time increase step. for j := d.lastTimeConsumer + 1; j <= s.timeConsumer; j++ { - d.foreignValSet.applyUpdates(d.consumerUpdates[j]) + d.consumerValsets.applyUpdates(d.consumerUpdates[j]) } d.lastTimeConsumer = s.timeConsumer } @@ -198,15 +198,15 @@ func (d *driver) externalInvariants() { /* For a consumer who has received updates up to vscid i, its - local validator set must be equal to the set on the provider + provider validator set must be equal to the set on the provider when i was sent, mapped through the mapping at that time. */ validatorSetReplication := func() { // Get the consumer set. - cSet := d.foreignValSet.keyToPower + cSet := d.consumerValsets.keyToPower // Get the provider set - at the corresponding time. - pSet := d.localValSets[d.lastTimeConsumer].keyToPower + pSet := d.providerValsets[d.lastTimeConsumer].keyToPower // Compute a reverse lookup allowing comparison // of the two sets. @@ -234,70 +234,93 @@ func (d *driver) externalInvariants() { } /* - TODO: + For any key that the consumer is aware of, because it has + received that key at some time in the past, and has not yet + returned the maturity vscid for its removal: + the key is useable as a query parameter to lookup the key + of the validator which should be slashed for misbehavior. */ queries := func() { - // For each fk known to the consumer - for consumerFK := range d.foreignValSet.keyToPower { - queriedLK, err := d.kd.GetProviderKey(consumerFK) - // There must be a corresponding local key + // For each key known to the consumer + for ck := range d.consumerValsets.keyToPower { + + // The query must return a result + pkQueried, err := d.kd.GetProviderKey(ck) require.Nil(d.t, err) - providerFKs := map[CK]bool{} - // The local key must be the one that was actually referenced - // in the latest mapping used to compute updates sent to the + + // The provider key must be the one that was actually referenced + // in the latest trueMapping used to compute updates sent to the // consumer. - mapping := d.mappings[d.lastTimeConsumer] - for providerLK, providerFK := range mapping { - require.Falsef(d.t, providerFKs[providerFK], "two local keys map to the same foreign key") - providerFKs[providerFK] = true - if consumerFK == providerFK { - // A mapping to the consumer FK was found - // The corresponding LK must be the one queried. - require.Equal(d.t, providerLK, queriedLK) + cks_TRUE := map[CK]bool{} + trueMapping := d.mappings[d.lastTimeConsumer] + for pk_TRUE, ck_TRUE := range trueMapping { + + // Sanity check: no two provider keys should map to the same consumer key + require.Falsef(d.t, cks_TRUE[ck_TRUE], "two provider keys map to the same consumer key") + + // Record that this consumer key was indeed mapped to by some provider key + // at time lastTimeConsumer + cks_TRUE[ck_TRUE] = true + + // If the consumer key is the one used as a query param + if ck == ck_TRUE { + // Then the provider key returned by the query must be exactly + // the same one as was actually mapped to. + require.Equal(d.t, pk_TRUE, pkQueried) } } - // Check that the comparison was actually made! - require.Truef(d.t, providerFKs[consumerFK], "no mapping found for foreign key") + // Check that the comparison was actually made, and that the test + // actually did something. + require.Truef(d.t, cks_TRUE[ck], "no mapping found for consumer key") } } /* - Two more properties which must be satisfied by KeyDel when - used correctly inside a wider system: - - 1. (Consumer Initiated Slashing Property) If a foreign key IS used in an update - for the consumer, with a positive power, at VSCID i, and no 0 power update - follows, then the local key associated to it must be queryable. - Phrased another way: foreign keys which are known to the consumer must be - useable for slashing indefinitely. - 2. (Pruning) If a foreign key IS NOT used in an update for a VSCID j with i < j, - and i is a 0 power update and has matured, then the foreign key is deleted - from storage. - Phrased another way: if the last 0 power update has matured, the key should - be pruned. + All keys that the consumer definitely cannot use as a parameter in + a slash request must eventually be pruned from state. + A consumer can still reference a key if the last update it received + for the key had a positive power associated to it, OR the last update + had a 0 power associated (deletion) but the maturity period for that + update has not yet elapsed (and the maturity was not yet received + on the provider chain). */ pruning := func() { + + // Do we expect to be able to query the provider key for a given consumer + // key? expectQueryable := map[CK]bool{} for i := 0; i <= d.lastTimeMaturity; i++ { for _, u := range d.consumerUpdates[i] { + // If the latest update for a given consumer key was dispatched + // AND also matured since the last maturity, then + // 1) if that update was a positive power update then no subsequent + // zero power update can have matured. Thus the key should be + // queryable. + // 2) if that update was a zero positive power update then the + // key should not be queryable unless it was used in a subsquent + // update (see next block). expectQueryable[u.key] = 0 < u.power } } for i := d.lastTimeMaturity + 1; i <= d.lastTimeProvider; i++ { for _, u := range d.consumerUpdates[i] { + // If a positive OR zero power update was RECENTLY received + // for the consumer, then the key must be queryable. expectQueryable[u.key] = true } } - for _, fk := range d.kd.pkToCk { - expectQueryable[fk] = true + // If a consumer key is CURRENTLY mapped to by a provider key, it + // must be queryable. + for _, ck := range d.kd.pkToCk { + expectQueryable[ck] = true } - // Simply check every foreign key for the correct queryable-ness. - for fk := 0; fk < NUM_FKS; fk++ { - _, err := d.kd.GetProviderKey(fk) + // Simply check every consumer key for the correct queryable-ness. + for ck := 0; ck < NUM_CKS; ck++ { + _, err := d.kd.GetProviderKey(ck) actualQueryable := err == nil - if expect, found := expectQueryable[fk]; found && expect { + if expect, found := expectQueryable[ck]; found && expect { require.True(d.t, actualQueryable) } else { require.False(d.t, actualQueryable) @@ -314,6 +337,7 @@ func (d *driver) externalInvariants() { // Return a randomly generated list of steps // which can be used to execute actions for testing. func getTrace(t *testing.T) []traceStep { + // TODO: check the hardcoded numbers mappings := func() []keyMapEntry { ret := []keyMapEntry{} @@ -322,13 +346,13 @@ func getTrace(t *testing.T) []traceStep { // include 0 to all validators include := rand.Intn(NUM_VALS + 1) for _, lk := range rand.Perm(NUM_VALS)[0:include] { - ret = append(ret, keyMapEntry{lk, rand.Intn(NUM_FKS)}) + ret = append(ret, keyMapEntry{lk, rand.Intn(NUM_CKS)}) } } return ret } - localUpdates := func() []update { + providerUpdates := func() []update { ret := []update{} // include 0 to all validators include := rand.Intn(NUM_VALS + 1) @@ -347,7 +371,7 @@ func getTrace(t *testing.T) []traceStep { { // Hard code initial mapping keyMapEntries: initialMappings, - providerUpdates: localUpdates(), + providerUpdates: providerUpdates(), timeProvider: 0, timeConsumer: 0, timeMaturity: 0, @@ -360,7 +384,7 @@ func getTrace(t *testing.T) []traceStep { if choice == 0 { ret = append(ret, traceStep{ keyMapEntries: mappings(), - providerUpdates: localUpdates(), + providerUpdates: providerUpdates(), timeProvider: last.timeProvider + 1, timeConsumer: last.timeConsumer, timeMaturity: last.timeMaturity, From e7323c63d5fa094089a1797b933a5fa5ac2b7697 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 12:08:41 -0500 Subject: [PATCH 105/127] Docs --- x/ccv/provider/keydel/keydel_test.go | 47 +++++++++++++++++++--------- 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index 7b30430df4..fc4cf88cf6 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -339,14 +339,18 @@ func (d *driver) externalInvariants() { func getTrace(t *testing.T) []traceStep { // TODO: check the hardcoded numbers - mappings := func() []keyMapEntry { + keyMappings := func() []keyMapEntry { ret := []keyMapEntry{} - // Go several times to have overlapping validator updates - for i := 0; i < 2; i++ { - // include 0 to all validators - include := rand.Intn(NUM_VALS + 1) - for _, lk := range rand.Perm(NUM_VALS)[0:include] { - ret = append(ret, keyMapEntry{lk, rand.Intn(NUM_CKS)}) + + const NUM_ITS = 2 // Chosen arbitrarily/heuristically + // Do this NUM_ITS times, to be able to generate conflicting mappings. + // This is allowed by the KeyDel API, so it must be tested. + for i := 0; i < NUM_ITS; i++ { + // include none (to) all validators + pks := rand.Perm(NUM_VALS)[0:rand.Intn(NUM_VALS+1)] + for _, pk := range pks { + ck := rand.Intn(NUM_CKS) + ret = append(ret, keyMapEntry{pk, ck}) } } return ret @@ -354,14 +358,22 @@ func getTrace(t *testing.T) []traceStep { providerUpdates := func() []update { ret := []update{} - // include 0 to all validators - include := rand.Intn(NUM_VALS + 1) - for _, lk := range rand.Perm(NUM_VALS)[0:include] { - ret = append(ret, update{key: lk, power: rand.Intn(3)}) + + // include none (to) all validators + pks := rand.Perm(NUM_VALS)[0:rand.Intn(NUM_VALS+1)] + for _, pk := range pks { + // Only three values are interesting + // 0: deletion + // 1: positive + // 2: positive (change) + power := rand.Intn(3) + ret = append(ret, update{key: pk, power: power}) } return ret } + // Get an initial key mapping. + // The real system may use some manual set defaults. initialMappings := []keyMapEntry{} for i := 0; i < NUM_VALS; i++ { initialMappings = append(initialMappings, keyMapEntry{i, i}) @@ -382,8 +394,10 @@ func getTrace(t *testing.T) []traceStep { choice := rand.Intn(3) last := ret[len(ret)-1] if choice == 0 { + // Increment provider time, and generate + // new key mappings and validator updates. ret = append(ret, traceStep{ - keyMapEntries: mappings(), + keyMapEntries: keyMappings(), providerUpdates: providerUpdates(), timeProvider: last.timeProvider + 1, timeConsumer: last.timeConsumer, @@ -391,6 +405,8 @@ func getTrace(t *testing.T) []traceStep { }) } if choice == 1 { + // If possible, increase consumer time. + // This models receiving VSC packets on the consumer. curr := last.timeConsumer limInclusive := last.timeProvider if curr < limInclusive { @@ -398,7 +414,7 @@ func getTrace(t *testing.T) []traceStep { // rand in [0, limInclusive - curr - 1] // bound is [0, limInclusive - curr) newTC := rand.Intn(limInclusive-curr) + curr + 1 - require.True(t, curr < newTC && curr <= limInclusive) + require.True(t, curr < newTC && newTC <= limInclusive) ret = append(ret, traceStep{ keyMapEntries: nil, providerUpdates: nil, @@ -409,11 +425,14 @@ func getTrace(t *testing.T) []traceStep { } } if choice == 2 { + // If possible, increase maturity time. + // This models sending maturities on the consumer (and also + // receiving them on the provider). curr := last.timeMaturity limInclusive := last.timeConsumer if curr < limInclusive { newTM := rand.Intn(limInclusive-curr) + curr + 1 - require.True(t, curr < newTM && curr <= limInclusive) + require.True(t, curr < newTM && newTM <= limInclusive) ret = append(ret, traceStep{ keyMapEntries: nil, providerUpdates: nil, From c860422488c78aa7e9da23fe8e81f1c04e3f6ef3 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 12:15:52 -0500 Subject: [PATCH 106/127] Start adding some basic unit tests --- x/ccv/provider/keydel/keydel_test.go | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keydel/keydel_test.go index fc4cf88cf6..b0c788fe77 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keydel/keydel_test.go @@ -337,7 +337,6 @@ func (d *driver) externalInvariants() { // Return a randomly generated list of steps // which can be used to execute actions for testing. func getTrace(t *testing.T) []traceStep { - // TODO: check the hardcoded numbers keyMappings := func() []keyMapEntry { ret := []keyMapEntry{} @@ -459,3 +458,30 @@ func TestPropertiesRandomlyHeuristically(t *testing.T) { d.run() } } + +// Setting should enable a reverse query +func TestXSetReverseQuery(t *testing.T) { + kd := MakeKeyDel() + kd.SetProviderKeyToConsumerKey(42, 43) + actual, err := kd.GetProviderKey(43) // Queryable + require.Nil(t, err) + require.Equal(t, 42, actual) +} + +// Not setting should not enable a reverse query +func TestNoSetReverseQuery(t *testing.T) { + kd := MakeKeyDel() + _, err := kd.GetProviderKey(43) // Not queryable + require.NotNil(t, err) +} + +// Setting and replacing should no allow earlier reverse query +func TestXSetUnsetReverseQuery(t *testing.T) { + kd := MakeKeyDel() + kd.SetProviderKeyToConsumerKey(42, 43) + kd.SetProviderKeyToConsumerKey(42, 44) // Set to different value + _, err := kd.GetProviderKey(43) // Ealier value not queryable + require.NotNil(t, err) +} + +// TODO: add more of these.. From 2a1c59c5d3d09cb3d6dcc30d13de83371ff41e12 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 12:16:34 -0500 Subject: [PATCH 107/127] DELETE unused TLA+ model --- x/ccv/provider/keydel/main.cfg | 3 -- x/ccv/provider/keydel/main.tla | 72 ---------------------------------- 2 files changed, 75 deletions(-) delete mode 100644 x/ccv/provider/keydel/main.cfg delete mode 100644 x/ccv/provider/keydel/main.tla diff --git a/x/ccv/provider/keydel/main.cfg b/x/ccv/provider/keydel/main.cfg deleted file mode 100644 index ba7a938460..0000000000 --- a/x/ccv/provider/keydel/main.cfg +++ /dev/null @@ -1,3 +0,0 @@ -INIT Init -NEXT Next -INVARIANT Inv \ No newline at end of file diff --git a/x/ccv/provider/keydel/main.tla b/x/ccv/provider/keydel/main.tla deleted file mode 100644 index 8b1033f77a..0000000000 --- a/x/ccv/provider/keydel/main.tla +++ /dev/null @@ -1,72 +0,0 @@ ----- MODULE main ---- - -EXTENDS Integers, FiniteSets, Sequences, TLC, Apalache - -(* - - @typeAlias: lk = Str; - @typeAlias: fk = Str; - @typeAlias: mapping = $lk -> $fk; - @typeAlias: updates = $lk -> Int; - -*) - -TypeAliases == TRUE - -LKS == {"lk0", "lk1", "lk2"} -FKS == {"fk0", "fk1", "fk2", "fk3", "fk4", "fk5", "fk6", "fk7", "fk8"} - -VARIABLES - \* @type: $mapping; - Mapping, - \* @type: $updates; - Updates, - \* @type: Int; - TP, - \* @type: Int; - TC, - \* @type: Int; - TM - -Init == - \E m \in [LKS -> FKS], ss \in SUBSET LKS: - /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b - /\ Mapping = m - /\ Updates \in [ss -> 0..2] - /\ TP = 1 - /\ TC = 0 - /\ TM = 0 - -EndBlock == - \E m \in [LKS -> FKS], ss \in SUBSET LKS: - /\ \A a, b \in DOMAIN m : m[a] = m[b] => a = b - /\ Mapping' = m - /\ Updates' \in [ss -> 0..2] - /\ TP' = TP + 1 - /\ UNCHANGED TC - /\ UNCHANGED TM - -UpdateConsumer == - \E t \in (TC+1)..TP : - /\ UNCHANGED Mapping - /\ UNCHANGED Updates - /\ UNCHANGED TP - /\ TC' = t - /\ UNCHANGED TM - -ReceiveMaturities == - \E t \in (TM+1)..TC : - /\ UNCHANGED Mapping - /\ UNCHANGED Updates - /\ UNCHANGED TP - /\ UNCHANGED TC - /\ TM' = t - -Next == - \/ EndBlock - \/ UpdateConsumer - \/ ReceiveMaturities - -View == <> - -==== From 662e4f50795207f8d467565d2b292542937248f8 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 12:23:41 -0500 Subject: [PATCH 108/127] Symbol renames --- x/ccv/provider/keydel/keydel.go | 76 ++++++++++++++++----------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keydel/keydel.go index fa511d3220..25e6b03b5e 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keydel/keydel.go @@ -28,44 +28,44 @@ type memo struct { type KeyDel struct { pkToCk map[PK]CK - CkToPk map[CK]PK - CkToMemo map[CK]memo + ckToPk map[CK]PK + ckToMemo map[CK]memo } func MakeKeyDel() KeyDel { return KeyDel{ pkToCk: map[PK]CK{}, - CkToPk: map[CK]PK{}, - CkToMemo: map[CK]memo{}, + ckToPk: map[CK]PK{}, + ckToMemo: map[CK]memo{}, } } // TODO: -func (e *KeyDel) SetProviderKeyToConsumerKey(lk PK, fk CK) error { +func (e *KeyDel) SetProviderKeyToConsumerKey(pk PK, ck CK) error { inUse := false - if _, ok := e.CkToPk[fk]; ok { + if _, ok := e.ckToPk[ck]; ok { inUse = true } - if _, ok := e.CkToMemo[fk]; ok { + if _, ok := e.ckToMemo[ck]; ok { inUse = true } if inUse { return errors.New(`cannot reuse key which is in use or was recently in use`) } - if oldFk, ok := e.pkToCk[lk]; ok { - delete(e.CkToPk, oldFk) + if oldCk, ok := e.pkToCk[pk]; ok { + delete(e.ckToPk, oldCk) } - e.pkToCk[lk] = fk - e.CkToPk[fk] = lk + e.pkToCk[pk] = ck + e.ckToPk[ck] = pk return nil } // TODO: -func (e *KeyDel) GetProviderKey(fk CK) (PK, error) { - if u, ok := e.CkToMemo[fk]; ok { +func (e *KeyDel) GetProviderKey(ck CK) (PK, error) { + if u, ok := e.ckToMemo[ck]; ok { return u.pk, nil - } else if lk, ok := e.CkToPk[fk]; ok { - return lk, nil + } else if pk, ok := e.ckToPk[ck]; ok { + return pk, nil } else { return -1, errors.New("provider key not found for consumer key") } @@ -74,15 +74,15 @@ func (e *KeyDel) GetProviderKey(fk CK) (PK, error) { // TODO: func (e *KeyDel) PruneUnusedKeys(latestVscid VSCID) { toDel := []CK{} - for _, u := range e.CkToMemo { + for _, u := range e.ckToMemo { // If the last update was a deletion (0 power) and the update // matured then pruning is possible. if u.power == 0 && u.vscid <= latestVscid { toDel = append(toDel, u.ck) } } - for _, fk := range toDel { - delete(e.CkToMemo, fk) + for _, ck := range toDel { + delete(e.ckToMemo, ck) } } @@ -99,8 +99,8 @@ func (e *KeyDel) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumer consumerUpdates = []update{} - for fk, power := range updates { - consumerUpdates = append(consumerUpdates, update{key: fk, power: power}) + for ck, power := range updates { + consumerUpdates = append(consumerUpdates, update{key: ck, power: power}) } return consumerUpdates @@ -112,7 +112,7 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { pks := []PK{} // Grab provider keys where the assigned consumer key has changed - for oldCk, u := range e.CkToMemo { + for oldCk, u := range e.ckToMemo { if newCk, ok := e.pkToCk[u.pk]; ok { if oldCk != newCk && 0 < u.power { pks = append(pks, u.pk) @@ -129,7 +129,7 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { // Create a read only copy, so that we can query while writing // updates to the old version. ckToMemo_READ_ONLY := map[CK]memo{} - for ck, memo := range e.CkToMemo { + for ck, memo := range e.ckToMemo { ckToMemo_READ_ONLY[ck] = memo } @@ -138,7 +138,7 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { if u.pk == pk && 0 < u.power { // For each provider key for which there was already a positive update // create a deletion update for the associated consumer key. - e.CkToMemo[u.ck] = memo{ck: u.ck, pk: pk, vscid: vscid, power: 0} + e.ckToMemo[u.ck] = memo{ck: u.ck, pk: pk, vscid: vscid, power: 0} ret[u.ck] = 0 } } @@ -165,7 +165,7 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { // are handled in earlier block. if 0 < power { ck := e.pkToCk[pk] - e.CkToMemo[ck] = memo{ck: ck, pk: pk, vscid: vscid, power: power} + e.ckToMemo[ck] = memo{ck: ck, pk: pk, vscid: vscid, power: power} ret[ck] = power } } @@ -177,30 +177,30 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { func (e *KeyDel) internalInvariants() bool { // No two provider keys can map to the same consumer key - // (lkToFk is sane) + // (pkToCk is sane) seen := map[CK]bool{} - for _, fk := range e.pkToCk { - if seen[fk] { + for _, ck := range e.pkToCk { + if seen[ck] { return false } - seen[fk] = true + seen[ck] = true } - // all values of lkToFk is a key of fkToLk + // all values of pkToCk is a key of ckToPk // (reverse lookup is always possible) - for _, fk := range e.pkToCk { - if _, ok := e.CkToPk[fk]; !ok { + for _, ck := range e.pkToCk { + if _, ok := e.ckToPk[ck]; !ok { return false } } // All consumer keys mapping to provider keys are actually // mapped to by the provider key. - // (fkToLk is sane) - for fk := range e.CkToPk { + // (ckToPk is sane) + for ck := range e.ckToPk { good := false - for _, candidateFk := range e.pkToCk { - if candidateFk == fk { + for _, candidateCk := range e.pkToCk { + if candidateCk == ck { good = true break } @@ -214,9 +214,9 @@ func (e *KeyDel) internalInvariants() bool { // any memo containing the same consumer key has the same // mapping. // (Ensures lookups are correct) - for fk, lk := range e.CkToPk { - if u, ok := e.CkToMemo[fk]; ok { - if lk != u.pk { + for ck, pk := range e.ckToPk { + if u, ok := e.ckToMemo[ck]; ok { + if pk != u.pk { return false } } From 12e1251ed232cdfd6939729be0a2a94af99acb94 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 13:01:54 -0500 Subject: [PATCH 109/127] rn symbol keydel -> keymap --- x/ccv/provider/{keydel => keymap}/.gitignore | 0 .../{keydel/keydel.go => keymap/keymap.go} | 20 +++++------ .../keydel_test.go => keymap/keymap_test.go} | 34 +++++++++---------- 3 files changed, 27 insertions(+), 27 deletions(-) rename x/ccv/provider/{keydel => keymap}/.gitignore (100%) rename x/ccv/provider/{keydel/keydel.go => keymap/keymap.go} (91%) rename x/ccv/provider/{keydel/keydel_test.go => keymap/keymap_test.go} (96%) diff --git a/x/ccv/provider/keydel/.gitignore b/x/ccv/provider/keymap/.gitignore similarity index 100% rename from x/ccv/provider/keydel/.gitignore rename to x/ccv/provider/keymap/.gitignore diff --git a/x/ccv/provider/keydel/keydel.go b/x/ccv/provider/keymap/keymap.go similarity index 91% rename from x/ccv/provider/keydel/keydel.go rename to x/ccv/provider/keymap/keymap.go index 25e6b03b5e..0cadeeec29 100644 --- a/x/ccv/provider/keydel/keydel.go +++ b/x/ccv/provider/keymap/keymap.go @@ -1,4 +1,4 @@ -package keydel +package keymap import ( "errors" @@ -26,14 +26,14 @@ type memo struct { // 3. integrate with create/destroy validator // 4. TODO: document this file -type KeyDel struct { +type KeyMap struct { pkToCk map[PK]CK ckToPk map[CK]PK ckToMemo map[CK]memo } -func MakeKeyDel() KeyDel { - return KeyDel{ +func MakeKeyMap() KeyMap { + return KeyMap{ pkToCk: map[PK]CK{}, ckToPk: map[CK]PK{}, ckToMemo: map[CK]memo{}, @@ -41,7 +41,7 @@ func MakeKeyDel() KeyDel { } // TODO: -func (e *KeyDel) SetProviderKeyToConsumerKey(pk PK, ck CK) error { +func (e *KeyMap) SetProviderKeyToConsumerKey(pk PK, ck CK) error { inUse := false if _, ok := e.ckToPk[ck]; ok { inUse = true @@ -61,7 +61,7 @@ func (e *KeyDel) SetProviderKeyToConsumerKey(pk PK, ck CK) error { } // TODO: -func (e *KeyDel) GetProviderKey(ck CK) (PK, error) { +func (e *KeyMap) GetProviderKey(ck CK) (PK, error) { if u, ok := e.ckToMemo[ck]; ok { return u.pk, nil } else if pk, ok := e.ckToPk[ck]; ok { @@ -72,7 +72,7 @@ func (e *KeyDel) GetProviderKey(ck CK) (PK, error) { } // TODO: -func (e *KeyDel) PruneUnusedKeys(latestVscid VSCID) { +func (e *KeyMap) PruneUnusedKeys(latestVscid VSCID) { toDel := []CK{} for _, u := range e.ckToMemo { // If the last update was a deletion (0 power) and the update @@ -87,7 +87,7 @@ func (e *KeyDel) PruneUnusedKeys(latestVscid VSCID) { } // TODO: -func (e *KeyDel) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumerUpdates []update) { +func (e *KeyMap) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumerUpdates []update) { updates := map[PK]int{} @@ -107,7 +107,7 @@ func (e *KeyDel) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumer } // do inner work as part of ComputeUpdates -func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { +func (e *KeyMap) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { pks := []PK{} @@ -174,7 +174,7 @@ func (e *KeyDel) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { } // Returns true iff internal invariants hold -func (e *KeyDel) internalInvariants() bool { +func (e *KeyMap) internalInvariants() bool { // No two provider keys can map to the same consumer key // (pkToCk is sane) diff --git a/x/ccv/provider/keydel/keydel_test.go b/x/ccv/provider/keymap/keymap_test.go similarity index 96% rename from x/ccv/provider/keydel/keydel_test.go rename to x/ccv/provider/keymap/keymap_test.go index b0c788fe77..0a7aef21e1 100644 --- a/x/ccv/provider/keydel/keydel_test.go +++ b/x/ccv/provider/keymap/keymap_test.go @@ -1,4 +1,4 @@ -package keydel +package keymap import ( "math/rand" @@ -35,7 +35,7 @@ type traceStep struct { type driver struct { t *testing.T - kd *KeyDel + km *KeyMap trace []traceStep lastTimeProvider int lastTimeConsumer int @@ -54,8 +54,8 @@ type driver struct { func makeDriver(t *testing.T, trace []traceStep) driver { d := driver{} d.t = t - kd := MakeKeyDel() - d.kd = &kd + kd := MakeKeyMap() + d.km = &kd d.trace = trace d.lastTimeProvider = 0 d.lastTimeConsumer = 0 @@ -91,11 +91,11 @@ func (d *driver) applyKeyMapEntries(entries []keyMapEntry) { for _, e := range entries { // TRY to map provider key pk to consumer key ck. // (May fail due to API constraints, this is correct) - _ = d.kd.SetProviderKeyToConsumerKey(e.pk, e.ck) + _ = d.km.SetProviderKeyToConsumerKey(e.pk, e.ck) } // Duplicate the mapping for referencing later in tests. copy := map[PK]CK{} - for lk, fk := range d.kd.pkToCk { + for lk, fk := range d.km.pkToCk { copy[lk] = fk } d.mappings = append(d.mappings, copy) @@ -129,11 +129,11 @@ func (d *driver) run() { d.providerValsets = append(d.providerValsets, makeValset()) d.providerValsets[init.timeProvider].applyUpdates(init.providerUpdates) // Set the initial consumer set - d.consumerUpdates = append(d.consumerUpdates, d.kd.ComputeUpdates(init.timeProvider, init.providerUpdates)) + d.consumerUpdates = append(d.consumerUpdates, d.km.ComputeUpdates(init.timeProvider, init.providerUpdates)) // The first consumer set equal to the provider set at time 0 d.consumerValsets = makeValset() d.consumerValsets.applyUpdates(d.consumerUpdates[init.timeConsumer]) - d.kd.PruneUnusedKeys(init.timeMaturity) + d.km.PruneUnusedKeys(init.timeMaturity) } // Sanity check the initial state @@ -150,7 +150,7 @@ func (d *driver) run() { d.applyKeyMapEntries(s.keyMapEntries) d.applyProviderUpdates(s.providerUpdates) // Store the updates, to reference later in tests. - d.consumerUpdates = append(d.consumerUpdates, d.kd.ComputeUpdates(s.timeProvider, s.providerUpdates)) + d.consumerUpdates = append(d.consumerUpdates, d.km.ComputeUpdates(s.timeProvider, s.providerUpdates)) d.lastTimeProvider = s.timeProvider } if d.lastTimeConsumer < s.timeConsumer { @@ -166,12 +166,12 @@ func (d *driver) run() { // Maturity time increase: // For each unit of time that has passed since the last increase, // a maturity is 'available'. We test batch maturity. - d.kd.PruneUnusedKeys(s.timeMaturity) + d.km.PruneUnusedKeys(s.timeMaturity) d.lastTimeMaturity = s.timeMaturity } // Do checks - require.True(d.t, d.kd.internalInvariants()) + require.True(d.t, d.km.internalInvariants()) d.externalInvariants() } } @@ -245,7 +245,7 @@ func (d *driver) externalInvariants() { for ck := range d.consumerValsets.keyToPower { // The query must return a result - pkQueried, err := d.kd.GetProviderKey(ck) + pkQueried, err := d.km.GetProviderKey(ck) require.Nil(d.t, err) // The provider key must be the one that was actually referenced @@ -312,13 +312,13 @@ func (d *driver) externalInvariants() { } // If a consumer key is CURRENTLY mapped to by a provider key, it // must be queryable. - for _, ck := range d.kd.pkToCk { + for _, ck := range d.km.pkToCk { expectQueryable[ck] = true } // Simply check every consumer key for the correct queryable-ness. for ck := 0; ck < NUM_CKS; ck++ { - _, err := d.kd.GetProviderKey(ck) + _, err := d.km.GetProviderKey(ck) actualQueryable := err == nil if expect, found := expectQueryable[ck]; found && expect { require.True(d.t, actualQueryable) @@ -461,7 +461,7 @@ func TestPropertiesRandomlyHeuristically(t *testing.T) { // Setting should enable a reverse query func TestXSetReverseQuery(t *testing.T) { - kd := MakeKeyDel() + kd := MakeKeyMap() kd.SetProviderKeyToConsumerKey(42, 43) actual, err := kd.GetProviderKey(43) // Queryable require.Nil(t, err) @@ -470,14 +470,14 @@ func TestXSetReverseQuery(t *testing.T) { // Not setting should not enable a reverse query func TestNoSetReverseQuery(t *testing.T) { - kd := MakeKeyDel() + kd := MakeKeyMap() _, err := kd.GetProviderKey(43) // Not queryable require.NotNil(t, err) } // Setting and replacing should no allow earlier reverse query func TestXSetUnsetReverseQuery(t *testing.T) { - kd := MakeKeyDel() + kd := MakeKeyMap() kd.SetProviderKeyToConsumerKey(42, 43) kd.SetProviderKeyToConsumerKey(42, 44) // Set to different value _, err := kd.GetProviderKey(43) // Ealier value not queryable From f849b02d542c44a760829238faa9d04cd6f75adc Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 13:07:56 -0500 Subject: [PATCH 110/127] CP branch --- x/ccv/provider/keymap/keymap_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x/ccv/provider/keymap/keymap_test.go b/x/ccv/provider/keymap/keymap_test.go index 0a7aef21e1..aa655111f1 100644 --- a/x/ccv/provider/keymap/keymap_test.go +++ b/x/ccv/provider/keymap/keymap_test.go @@ -11,7 +11,7 @@ import ( const NUM_TRACES = 1000 // Len of trace for a single heuristic testing run -const TRACE_LEN = 1000 +const TRACE_LEN = 1001 // Number of validators to simulate const NUM_VALS = 4 From 52acd19fd193ecbdb92c33df60ea64397adcf4ed Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 13:18:34 -0500 Subject: [PATCH 111/127] get started --- x/ccv/provider/keeper/relay.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index 19938e0284..f7ad50a9cc 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -165,7 +165,12 @@ func (k Keeper) SendValidatorUpdates(ctx sdk.Context) { // Sends all pending ValidatorSetChangePackets to the specified chain func (k Keeper) SendPendingVSCPackets(ctx sdk.Context, chainID, channelID string) { pendingPackets := k.EmptyPendingVSC(ctx, chainID) - for _, data := range pendingPackets { + k.SendVSCPackets(ctx, chainID, channelID, pendingPackets) +} + +// Sends all pending ValidatorSetChangePackets to the specified chain +func (k Keeper) SendVSCPackets(ctx sdk.Context, chainID, channelID string, packets []ccv.ValidatorSetChangePacketData) { + for _, data := range packets { // send packet over IBC err := utils.SendIBCPacket( ctx, From b1deb605cf97b1b5a4d9ff19770d970e66ccd439 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 14:12:25 -0500 Subject: [PATCH 112/127] RN method TrySendValidatorUpdates --- x/ccv/provider/keeper/relay.go | 4 ++-- x/ccv/provider/module.go | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index f7ad50a9cc..297039906b 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -116,8 +116,8 @@ func (k Keeper) OnTimeoutPacket(ctx sdk.Context, packet channeltypes.Packet) err return k.StopConsumerChain(ctx, chainID, k.GetLockUnbondingOnTimeout(ctx, chainID), false) } -// SendValidatorUpdates sends latest validator updates to every registered consumer chain -func (k Keeper) SendValidatorUpdates(ctx sdk.Context) { +// TrySendValidatorUpdates tries to send latest validator updates to every registered consumer chain +func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { // get current ValidatorSetUpdateId valUpdateID := k.GetValidatorSetUpdateId(ctx) // get the validator updates from the staking module diff --git a/x/ccv/provider/module.go b/x/ccv/provider/module.go index 8b6721f3d4..ac2aeac9f6 100644 --- a/x/ccv/provider/module.go +++ b/x/ccv/provider/module.go @@ -167,7 +167,7 @@ func (am AppModule) EndBlock(ctx sdk.Context, req abci.RequestEndBlock) []abci.V am.keeper.CompleteMaturedUnbondingOps(ctx) // send validator updates to consumer chains - am.keeper.SendValidatorUpdates(ctx) + am.keeper.TrySendValidatorUpdates(ctx) return []abci.ValidatorUpdate{} } From 0a4d53f35b57f3daba89c51239c5cf46f23c9d28 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 14:28:58 -0500 Subject: [PATCH 113/127] Adds SetPendingVSCs method --- x/ccv/provider/keeper/keeper.go | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/x/ccv/provider/keeper/keeper.go b/x/ccv/provider/keeper/keeper.go index 18d40b7fea..217ea55c37 100644 --- a/x/ccv/provider/keeper/keeper.go +++ b/x/ccv/provider/keeper/keeper.go @@ -755,7 +755,11 @@ func (k Keeper) AppendPendingVSC(ctx sdk.Context, chainID string, packet ccv.Val packets, _ := k.GetPendingVSCs(ctx, chainID) // append works also on a nil list packets = append(packets, packet) + k.SetPendingVSCs(ctx, chainID, packets) +} +// SetPendingVSCs writes a list of VSCs to store associated to chainID +func (k Keeper) SetPendingVSCs(ctx sdk.Context, chainID string, packets []ccv.ValidatorSetChangePacketData) { store := ctx.KVStore(k.storeKey) var data [][]byte for _, p := range packets { @@ -775,13 +779,12 @@ func (k Keeper) AppendPendingVSC(ctx sdk.Context, chainID string, packet ccv.Val // EmptyPendingVSC empties and returns the list of pending ValidatorSetChange packets for chain ID (if it exists) func (k Keeper) EmptyPendingVSC(ctx sdk.Context, chainID string) (packets []ccv.ValidatorSetChangePacketData) { - packets, found := k.GetPendingVSCs(ctx, chainID) - if !found { - // there is no list of pending ValidatorSetChange packets - return nil + existing, found := k.GetPendingVSCs(ctx, chainID) + if found { + packets = existing + store := ctx.KVStore(k.storeKey) + store.Delete(types.PendingVSCsKey(chainID)) } - store := ctx.KVStore(k.storeKey) - store.Delete(types.PendingVSCsKey(chainID)) return packets } From 054bba45189855782fbf38a96d6d6da3e1ddf2d6 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 14:29:07 -0500 Subject: [PATCH 114/127] Refactors TrySendValidatorUpdates --- x/ccv/provider/keeper/relay.go | 47 ++++++++-------------------------- 1 file changed, 11 insertions(+), 36 deletions(-) diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index 297039906b..bf07540c03 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -123,11 +123,8 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { // get the validator updates from the staking module valUpdates := k.stakingKeeper.GetValidatorUpdates(ctx) k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID, clientID string) (stop bool) { - // check whether there is an established CCV channel to this consumer chain - if channelID, found := k.GetChainToChannel(ctx, chainID); found { - // Send pending VSC packets to consumer chain - k.SendPendingVSCPackets(ctx, chainID, channelID) - } + + packets := k.EmptyPendingVSC(ctx, chainID) // check whether there are changes in the validator set; // note that this also entails unbonding operations @@ -135,10 +132,12 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { unbondingOps, _ := k.GetUnbondingOpsFromIndex(ctx, chainID, valUpdateID) if len(valUpdates) != 0 || len(unbondingOps) != 0 { // construct validator set change packet data - packetData := ccv.NewValidatorSetChangePacketData(valUpdates, valUpdateID, k.EmptySlashAcks(ctx, chainID)) + packets = append(packets, ccv.NewValidatorSetChangePacketData(valUpdates, valUpdateID, k.EmptySlashAcks(ctx, chainID))) + } - // check whether there is an established CCV channel to this consumer chain - if channelID, found := k.GetChainToChannel(ctx, chainID); found { + // check whether there is an established CCV channel to this consumer chain + if channelID, found := k.GetChainToChannel(ctx, chainID); found { + for _, data := range packets { // send this validator set change packet data to the consumer chain err := utils.SendIBCPacket( ctx, @@ -146,15 +145,15 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { k.channelKeeper, channelID, // source channel id ccv.ProviderPortID, // source port id - packetData.GetBytes(), + data.GetBytes(), ) if err != nil { panic(fmt.Errorf("packet could not be sent over IBC: %w", err)) } - } else { - // store the packet data to be sent once the CCV channel is established - k.AppendPendingVSC(ctx, chainID, packetData) } + } else { + // store the packet data to be sent once the CCV channel is established + k.SetPendingVSCs(ctx, chainID, packets) } return false // do not stop the iteration }) @@ -162,30 +161,6 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { k.IncrementValidatorSetUpdateId(ctx) } -// Sends all pending ValidatorSetChangePackets to the specified chain -func (k Keeper) SendPendingVSCPackets(ctx sdk.Context, chainID, channelID string) { - pendingPackets := k.EmptyPendingVSC(ctx, chainID) - k.SendVSCPackets(ctx, chainID, channelID, pendingPackets) -} - -// Sends all pending ValidatorSetChangePackets to the specified chain -func (k Keeper) SendVSCPackets(ctx sdk.Context, chainID, channelID string, packets []ccv.ValidatorSetChangePacketData) { - for _, data := range packets { - // send packet over IBC - err := utils.SendIBCPacket( - ctx, - k.scopedKeeper, - k.channelKeeper, - channelID, // source channel id - ccv.ProviderPortID, // source port id - data.GetBytes(), - ) - if err != nil { - panic(fmt.Errorf("packet could not be sent over IBC: %w", err)) - } - } -} - // OnRecvSlashPacket slashes and jails the given validator in the packet data func (k Keeper) OnRecvSlashPacket(ctx sdk.Context, packet channeltypes.Packet, data ccv.SlashPacketData) exported.Acknowledgement { // check that the channel is established From 15e41537e58a3780562963f3e70f64a9c1ebee79 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 16:27:07 -0500 Subject: [PATCH 115/127] remove inUse var --- x/ccv/provider/keymap/keymap.go | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/x/ccv/provider/keymap/keymap.go b/x/ccv/provider/keymap/keymap.go index 0cadeeec29..2b52e70d7f 100644 --- a/x/ccv/provider/keymap/keymap.go +++ b/x/ccv/provider/keymap/keymap.go @@ -42,14 +42,10 @@ func MakeKeyMap() KeyMap { // TODO: func (e *KeyMap) SetProviderKeyToConsumerKey(pk PK, ck CK) error { - inUse := false if _, ok := e.ckToPk[ck]; ok { - inUse = true + return errors.New(`cannot reuse key which is in use or was recently in use`) } if _, ok := e.ckToMemo[ck]; ok { - inUse = true - } - if inUse { return errors.New(`cannot reuse key which is in use or was recently in use`) } if oldCk, ok := e.pkToCk[pk]; ok { From e02d5e2753f6965cac87ed521ab47ea2fe7d840e Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 16:50:35 -0500 Subject: [PATCH 116/127] Adds temp changes to relay --- x/ccv/provider/keeper/relay.go | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index bf07540c03..170fd6b6ac 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -131,8 +131,15 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { // w/o changes in the voting power of the validators in the validator set unbondingOps, _ := k.GetUnbondingOpsFromIndex(ctx, chainID, valUpdateID) if len(valUpdates) != 0 || len(unbondingOps) != 0 { - // construct validator set change packet data - packets = append(packets, ccv.NewValidatorSetChangePacketData(valUpdates, valUpdateID, k.EmptySlashAcks(ctx, chainID))) + + // Map the updates through any key transformations + // updatesToSend := k.keymap.ComputeUpdates(valUpdateID, valUpdates) + updatesToSend := valUpdates + + packets = append( + packets, + ccv.NewValidatorSetChangePacketData(updatesToSend, valUpdateID, k.EmptySlashAcks(ctx, chainID)) + ) } // check whether there is an established CCV channel to this consumer chain From 04ffb23b9595433e93fb2b296f5ce8b18bc9662d Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 16:50:40 -0500 Subject: [PATCH 117/127] Adds bytes.py --- x/ccv/provider/keymap/bytes.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 x/ccv/provider/keymap/bytes.py diff --git a/x/ccv/provider/keymap/bytes.py b/x/ccv/provider/keymap/bytes.py new file mode 100644 index 0000000000..cf5a4703f2 --- /dev/null +++ b/x/ccv/provider/keymap/bytes.py @@ -0,0 +1,20 @@ +num_chains = 20 +num_validators = 175 +key_bytes = 64 + + +def compute_usage(): + + # must store 4 keys per validator + fixed = num_chains * num_validators * 4 * key_bytes + + # in a very pessimistic case, we must store 10 + # old keys, for each validator + pessimism_factor = 10 * key_bytes + return fixed * pessimism_factor + + +bytes = compute_usage() +kibiytes = bytes / 1024 +mibibytes = kibiytes / 1024 +print(mibibytes) From c4e0204350c9b93168c07e072f8502d2042ae809 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 16:50:48 -0500 Subject: [PATCH 118/127] Adds store abstraction but does not use it --- x/ccv/provider/keymap/keymap.go | 16 +++++++--- x/ccv/provider/keymap/keymap_test.go | 45 +++++++++++++++++++++++++--- 2 files changed, 53 insertions(+), 8 deletions(-) diff --git a/x/ccv/provider/keymap/keymap.go b/x/ccv/provider/keymap/keymap.go index 2b52e70d7f..3c36a24213 100644 --- a/x/ccv/provider/keymap/keymap.go +++ b/x/ccv/provider/keymap/keymap.go @@ -27,16 +27,24 @@ type memo struct { // 4. TODO: document this file type KeyMap struct { + store Store pkToCk map[PK]CK ckToPk map[CK]PK ckToMemo map[CK]memo } -func MakeKeyMap() KeyMap { +type Store interface { + getPkToCk() map[PK]CK + getCkToPk() map[CK]PK + getCkToMemo() map[CK]memo + setPkToCk(map[PK]CK) + setCkToPk(map[CK]PK) + setCkToMemo(map[CK]memo) +} + +func MakeKeyMap(store Store) KeyMap { return KeyMap{ - pkToCk: map[PK]CK{}, - ckToPk: map[CK]PK{}, - ckToMemo: map[CK]memo{}, + store: store, } } diff --git a/x/ccv/provider/keymap/keymap_test.go b/x/ccv/provider/keymap/keymap_test.go index aa655111f1..b380e38ca9 100644 --- a/x/ccv/provider/keymap/keymap_test.go +++ b/x/ccv/provider/keymap/keymap_test.go @@ -20,6 +20,38 @@ const NUM_VALS = 4 // (This is constrained to ensure overlap edge cases are tested) const NUM_CKS = 50 +type store struct { + pkToCk map[PK]CK + ckToPk map[CK]PK + ckToMemo map[CK]memo +} + +func makeStore() store { + return store{ + pkToCk: map[PK]CK{}, + ckToPk: map[CK]PK{}, + ckToMemo: map[CK]memo{}, + } +} +func (s *store) getPkToCk() map[PK]CK { + return s.pkToCk +} +func (s *store) getCkToPk() map[CK]PK { + return s.ckToPk +} +func (s *store) getCkToMemo() map[CK]memo { + return s.ckToMemo +} +func (s *store) setPkToCk(e map[PK]CK) { + s.pkToCk = e +} +func (s *store) setCkToPk(e map[CK]PK) { + s.ckToPk = e +} +func (s *store) setCkToMemo(e map[CK]memo) { + s.ckToMemo = e +} + type keyMapEntry struct { pk PK ck CK @@ -54,7 +86,9 @@ type driver struct { func makeDriver(t *testing.T, trace []traceStep) driver { d := driver{} d.t = t - kd := MakeKeyMap() + s := makeStore() + require.NotNil(t, s.ckToMemo) + kd := MakeKeyMap(&s) d.km = &kd d.trace = trace d.lastTimeProvider = 0 @@ -461,7 +495,8 @@ func TestPropertiesRandomlyHeuristically(t *testing.T) { // Setting should enable a reverse query func TestXSetReverseQuery(t *testing.T) { - kd := MakeKeyMap() + s := makeStore() + kd := MakeKeyMap(&s) kd.SetProviderKeyToConsumerKey(42, 43) actual, err := kd.GetProviderKey(43) // Queryable require.Nil(t, err) @@ -470,14 +505,16 @@ func TestXSetReverseQuery(t *testing.T) { // Not setting should not enable a reverse query func TestNoSetReverseQuery(t *testing.T) { - kd := MakeKeyMap() + s := makeStore() + kd := MakeKeyMap(&s) _, err := kd.GetProviderKey(43) // Not queryable require.NotNil(t, err) } // Setting and replacing should no allow earlier reverse query func TestXSetUnsetReverseQuery(t *testing.T) { - kd := MakeKeyMap() + s := makeStore() + kd := MakeKeyMap(&s) kd.SetProviderKeyToConsumerKey(42, 43) kd.SetProviderKeyToConsumerKey(42, 44) // Set to different value _, err := kd.GetProviderKey(43) // Ealier value not queryable From 7ffba3b763be55b7e15130e012e0cf7784b0792a Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:00:19 -0500 Subject: [PATCH 119/127] Use store abstraction and pass tests --- x/ccv/provider/keymap/keymap.go | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/x/ccv/provider/keymap/keymap.go b/x/ccv/provider/keymap/keymap.go index 3c36a24213..42b6fe3a62 100644 --- a/x/ccv/provider/keymap/keymap.go +++ b/x/ccv/provider/keymap/keymap.go @@ -48,8 +48,27 @@ func MakeKeyMap(store Store) KeyMap { } } +// GetAll reads all data from store +// The granularity of store access can be changed if needed for +// performance reasons. +func (e *KeyMap) GetAll() { + e.pkToCk = e.store.getPkToCk() + e.ckToPk = e.store.getCkToPk() + e.ckToMemo = e.store.getCkToMemo() +} + +// SetAll write all data to store +// The granularity of store access can be changed if needed for +// performance reasons. +func (e *KeyMap) SetAll() { + e.store.setPkToCk(e.pkToCk) + e.store.setCkToPk(e.ckToPk) + e.store.setCkToMemo(e.ckToMemo) +} + // TODO: func (e *KeyMap) SetProviderKeyToConsumerKey(pk PK, ck CK) error { + e.GetAll() if _, ok := e.ckToPk[ck]; ok { return errors.New(`cannot reuse key which is in use or was recently in use`) } @@ -61,11 +80,13 @@ func (e *KeyMap) SetProviderKeyToConsumerKey(pk PK, ck CK) error { } e.pkToCk[pk] = ck e.ckToPk[ck] = pk + e.SetAll() // TODO: Try with defer return nil } // TODO: func (e *KeyMap) GetProviderKey(ck CK) (PK, error) { + e.GetAll() if u, ok := e.ckToMemo[ck]; ok { return u.pk, nil } else if pk, ok := e.ckToPk[ck]; ok { @@ -77,6 +98,7 @@ func (e *KeyMap) GetProviderKey(ck CK) (PK, error) { // TODO: func (e *KeyMap) PruneUnusedKeys(latestVscid VSCID) { + e.GetAll() toDel := []CK{} for _, u := range e.ckToMemo { // If the last update was a deletion (0 power) and the update @@ -88,11 +110,14 @@ func (e *KeyMap) PruneUnusedKeys(latestVscid VSCID) { for _, ck := range toDel { delete(e.ckToMemo, ck) } + e.SetAll() } // TODO: func (e *KeyMap) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumerUpdates []update) { + e.GetAll() + updates := map[PK]int{} for _, u := range providerUpdates { @@ -107,6 +132,7 @@ func (e *KeyMap) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumer consumerUpdates = append(consumerUpdates, update{key: ck, power: power}) } + e.SetAll() return consumerUpdates } @@ -180,6 +206,8 @@ func (e *KeyMap) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { // Returns true iff internal invariants hold func (e *KeyMap) internalInvariants() bool { + e.GetAll() + // No two provider keys can map to the same consumer key // (pkToCk is sane) seen := map[CK]bool{} From f27ab579dba06b3cb836ed267c56b9b81658cc4c Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:07:43 -0500 Subject: [PATCH 120/127] Move to keeper subdir --- x/ccv/provider/{ => keeper}/keymap/.gitignore | 0 x/ccv/provider/{ => keeper}/keymap/keymap.go | 0 x/ccv/provider/{keymap/bytes.py => keeper/keymap/keymap_bytes.py} | 0 x/ccv/provider/{ => keeper}/keymap/keymap_test.go | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename x/ccv/provider/{ => keeper}/keymap/.gitignore (100%) rename x/ccv/provider/{ => keeper}/keymap/keymap.go (100%) rename x/ccv/provider/{keymap/bytes.py => keeper/keymap/keymap_bytes.py} (100%) rename x/ccv/provider/{ => keeper}/keymap/keymap_test.go (100%) diff --git a/x/ccv/provider/keymap/.gitignore b/x/ccv/provider/keeper/keymap/.gitignore similarity index 100% rename from x/ccv/provider/keymap/.gitignore rename to x/ccv/provider/keeper/keymap/.gitignore diff --git a/x/ccv/provider/keymap/keymap.go b/x/ccv/provider/keeper/keymap/keymap.go similarity index 100% rename from x/ccv/provider/keymap/keymap.go rename to x/ccv/provider/keeper/keymap/keymap.go diff --git a/x/ccv/provider/keymap/bytes.py b/x/ccv/provider/keeper/keymap/keymap_bytes.py similarity index 100% rename from x/ccv/provider/keymap/bytes.py rename to x/ccv/provider/keeper/keymap/keymap_bytes.py diff --git a/x/ccv/provider/keymap/keymap_test.go b/x/ccv/provider/keeper/keymap/keymap_test.go similarity index 100% rename from x/ccv/provider/keymap/keymap_test.go rename to x/ccv/provider/keeper/keymap/keymap_test.go From 41d48028e07cf9042bda3c0a1f35b7795ab66a7d Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:11:06 -0500 Subject: [PATCH 121/127] Make keeper attempt --- x/ccv/provider/keeper/keymap/store.go | 36 +++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 x/ccv/provider/keeper/keymap/store.go diff --git a/x/ccv/provider/keeper/keymap/store.go b/x/ccv/provider/keeper/keymap/store.go new file mode 100644 index 0000000000..1a2fc84ae6 --- /dev/null +++ b/x/ccv/provider/keeper/keymap/store.go @@ -0,0 +1,36 @@ +package keymap + +import ( + keeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" +) + +func (k keeper.Keeper) getPkToCk() map[PK]CK { + _ = k + // TODO: implement + panic("not implemented") +} +func (k keeper.Keeper) getCkToPk() map[CK]PK { + _ = k + // TODO: implement + panic("not implemented") +} +func (k keeper.Keeper) getCkToMemo() map[CK]memo { + _ = k + // TODO: implement + panic("not implemented") +} +func (k keeper.Keeper) setPkToCk(e map[PK]CK) { + _ = k + // TODO: implement + panic("not implemented") +} +func (k keeper.Keeper) setCkToPk(e map[CK]PK) { + _ = k + // TODO: implement + panic("not implemented") +} +func (k keeper.Keeper) setCkToMemo(e map[CK]memo) { + _ = k + // TODO: implement + panic("not implemented") +} From bf91d80586d22284a62a3ee8c6324f4a427a1eed Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:13:26 -0500 Subject: [PATCH 122/127] Integrate keymap into provider keeper dir --- x/ccv/provider/keeper/{keymap => }/keymap.go | 33 ++++++++++++++++- x/ccv/provider/keeper/keymap/.gitignore | 6 ---- x/ccv/provider/keeper/keymap/store.go | 36 ------------------- .../keeper/{keymap => }/keymap_bytes.py | 0 .../keeper/{keymap => }/keymap_test.go | 2 +- x/ccv/provider/keeper/relay.go | 2 +- 6 files changed, 34 insertions(+), 45 deletions(-) rename x/ccv/provider/keeper/{keymap => }/keymap.go (90%) delete mode 100644 x/ccv/provider/keeper/keymap/.gitignore delete mode 100644 x/ccv/provider/keeper/keymap/store.go rename x/ccv/provider/keeper/{keymap => }/keymap_bytes.py (100%) rename x/ccv/provider/keeper/{keymap => }/keymap_test.go (99%) diff --git a/x/ccv/provider/keeper/keymap/keymap.go b/x/ccv/provider/keeper/keymap.go similarity index 90% rename from x/ccv/provider/keeper/keymap/keymap.go rename to x/ccv/provider/keeper/keymap.go index 42b6fe3a62..89cabd3699 100644 --- a/x/ccv/provider/keeper/keymap/keymap.go +++ b/x/ccv/provider/keeper/keymap.go @@ -1,4 +1,4 @@ -package keymap +package keeper import ( "errors" @@ -48,6 +48,37 @@ func MakeKeyMap(store Store) KeyMap { } } +func (k Keeper) getPkToCk() map[PK]CK { + _ = k + // TODO: implement + panic("not implemented") +} +func (k Keeper) getCkToPk() map[CK]PK { + _ = k + // TODO: implement + panic("not implemented") +} +func (k Keeper) getCkToMemo() map[CK]memo { + _ = k + // TODO: implement + panic("not implemented") +} +func (k Keeper) setPkToCk(e map[PK]CK) { + _ = k + // TODO: implement + panic("not implemented") +} +func (k Keeper) setCkToPk(e map[CK]PK) { + _ = k + // TODO: implement + panic("not implemented") +} +func (k Keeper) setCkToMemo(e map[CK]memo) { + _ = k + // TODO: implement + panic("not implemented") +} + // GetAll reads all data from store // The granularity of store access can be changed if needed for // performance reasons. diff --git a/x/ccv/provider/keeper/keymap/.gitignore b/x/ccv/provider/keeper/keymap/.gitignore deleted file mode 100644 index 2afa065528..0000000000 --- a/x/ccv/provider/keeper/keymap/.gitignore +++ /dev/null @@ -1,6 +0,0 @@ -*.json -*.tla -*.cfg -!main.tla -!main.cfg -*apalache* \ No newline at end of file diff --git a/x/ccv/provider/keeper/keymap/store.go b/x/ccv/provider/keeper/keymap/store.go deleted file mode 100644 index 1a2fc84ae6..0000000000 --- a/x/ccv/provider/keeper/keymap/store.go +++ /dev/null @@ -1,36 +0,0 @@ -package keymap - -import ( - keeper "github.com/cosmos/interchain-security/x/ccv/provider/keeper" -) - -func (k keeper.Keeper) getPkToCk() map[PK]CK { - _ = k - // TODO: implement - panic("not implemented") -} -func (k keeper.Keeper) getCkToPk() map[CK]PK { - _ = k - // TODO: implement - panic("not implemented") -} -func (k keeper.Keeper) getCkToMemo() map[CK]memo { - _ = k - // TODO: implement - panic("not implemented") -} -func (k keeper.Keeper) setPkToCk(e map[PK]CK) { - _ = k - // TODO: implement - panic("not implemented") -} -func (k keeper.Keeper) setCkToPk(e map[CK]PK) { - _ = k - // TODO: implement - panic("not implemented") -} -func (k keeper.Keeper) setCkToMemo(e map[CK]memo) { - _ = k - // TODO: implement - panic("not implemented") -} diff --git a/x/ccv/provider/keeper/keymap/keymap_bytes.py b/x/ccv/provider/keeper/keymap_bytes.py similarity index 100% rename from x/ccv/provider/keeper/keymap/keymap_bytes.py rename to x/ccv/provider/keeper/keymap_bytes.py diff --git a/x/ccv/provider/keeper/keymap/keymap_test.go b/x/ccv/provider/keeper/keymap_test.go similarity index 99% rename from x/ccv/provider/keeper/keymap/keymap_test.go rename to x/ccv/provider/keeper/keymap_test.go index b380e38ca9..2c978c63ae 100644 --- a/x/ccv/provider/keeper/keymap/keymap_test.go +++ b/x/ccv/provider/keeper/keymap_test.go @@ -1,4 +1,4 @@ -package keymap +package keeper import ( "math/rand" diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index 170fd6b6ac..1f0c9eb607 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -138,7 +138,7 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { packets = append( packets, - ccv.NewValidatorSetChangePacketData(updatesToSend, valUpdateID, k.EmptySlashAcks(ctx, chainID)) + ccv.NewValidatorSetChangePacketData(updatesToSend, valUpdateID, k.EmptySlashAcks(ctx, chainID)), ) } From 89ebcea932ddb118072dc87a76c7a140f3b9c4b5 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:15:02 -0500 Subject: [PATCH 123/127] split keymap, keymap_core --- x/ccv/provider/keeper/keymap.go | 263 +-------------------------- x/ccv/provider/keeper/keymap_core.go | 259 ++++++++++++++++++++++++++ 2 files changed, 264 insertions(+), 258 deletions(-) create mode 100644 x/ccv/provider/keeper/keymap_core.go diff --git a/x/ccv/provider/keeper/keymap.go b/x/ccv/provider/keeper/keymap.go index 89cabd3699..b97f9ab1a3 100644 --- a/x/ccv/provider/keeper/keymap.go +++ b/x/ccv/provider/keeper/keymap.go @@ -1,290 +1,37 @@ package keeper -import ( - "errors" -) - -type PK = int -type CK = int -type VSCID = int - -type update struct { - key int - power int -} - -type memo struct { - ck CK - pk PK - vscid int - power int -} - -// TODO: -// 1. Integrate into kv store. -// 2. integrate into Provider::EndBlock, -// 3. integrate with create/destroy validator -// 4. TODO: document this file - -type KeyMap struct { - store Store - pkToCk map[PK]CK - ckToPk map[CK]PK - ckToMemo map[CK]memo -} - -type Store interface { - getPkToCk() map[PK]CK - getCkToPk() map[CK]PK - getCkToMemo() map[CK]memo - setPkToCk(map[PK]CK) - setCkToPk(map[CK]PK) - setCkToMemo(map[CK]memo) -} - -func MakeKeyMap(store Store) KeyMap { - return KeyMap{ - store: store, - } -} - func (k Keeper) getPkToCk() map[PK]CK { _ = k // TODO: implement panic("not implemented") } + func (k Keeper) getCkToPk() map[CK]PK { _ = k // TODO: implement panic("not implemented") } + func (k Keeper) getCkToMemo() map[CK]memo { _ = k // TODO: implement panic("not implemented") } + func (k Keeper) setPkToCk(e map[PK]CK) { _ = k // TODO: implement panic("not implemented") } + func (k Keeper) setCkToPk(e map[CK]PK) { _ = k // TODO: implement panic("not implemented") } + func (k Keeper) setCkToMemo(e map[CK]memo) { _ = k // TODO: implement panic("not implemented") } - -// GetAll reads all data from store -// The granularity of store access can be changed if needed for -// performance reasons. -func (e *KeyMap) GetAll() { - e.pkToCk = e.store.getPkToCk() - e.ckToPk = e.store.getCkToPk() - e.ckToMemo = e.store.getCkToMemo() -} - -// SetAll write all data to store -// The granularity of store access can be changed if needed for -// performance reasons. -func (e *KeyMap) SetAll() { - e.store.setPkToCk(e.pkToCk) - e.store.setCkToPk(e.ckToPk) - e.store.setCkToMemo(e.ckToMemo) -} - -// TODO: -func (e *KeyMap) SetProviderKeyToConsumerKey(pk PK, ck CK) error { - e.GetAll() - if _, ok := e.ckToPk[ck]; ok { - return errors.New(`cannot reuse key which is in use or was recently in use`) - } - if _, ok := e.ckToMemo[ck]; ok { - return errors.New(`cannot reuse key which is in use or was recently in use`) - } - if oldCk, ok := e.pkToCk[pk]; ok { - delete(e.ckToPk, oldCk) - } - e.pkToCk[pk] = ck - e.ckToPk[ck] = pk - e.SetAll() // TODO: Try with defer - return nil -} - -// TODO: -func (e *KeyMap) GetProviderKey(ck CK) (PK, error) { - e.GetAll() - if u, ok := e.ckToMemo[ck]; ok { - return u.pk, nil - } else if pk, ok := e.ckToPk[ck]; ok { - return pk, nil - } else { - return -1, errors.New("provider key not found for consumer key") - } -} - -// TODO: -func (e *KeyMap) PruneUnusedKeys(latestVscid VSCID) { - e.GetAll() - toDel := []CK{} - for _, u := range e.ckToMemo { - // If the last update was a deletion (0 power) and the update - // matured then pruning is possible. - if u.power == 0 && u.vscid <= latestVscid { - toDel = append(toDel, u.ck) - } - } - for _, ck := range toDel { - delete(e.ckToMemo, ck) - } - e.SetAll() -} - -// TODO: -func (e *KeyMap) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumerUpdates []update) { - - e.GetAll() - - updates := map[PK]int{} - - for _, u := range providerUpdates { - updates[u.key] = u.power - } - - updates = e.inner(vscid, updates) - - consumerUpdates = []update{} - - for ck, power := range updates { - consumerUpdates = append(consumerUpdates, update{key: ck, power: power}) - } - - e.SetAll() - return consumerUpdates -} - -// do inner work as part of ComputeUpdates -func (e *KeyMap) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { - - pks := []PK{} - - // Grab provider keys where the assigned consumer key has changed - for oldCk, u := range e.ckToMemo { - if newCk, ok := e.pkToCk[u.pk]; ok { - if oldCk != newCk && 0 < u.power { - pks = append(pks, u.pk) - } - } - } - // Grab provider keys where the validator power has changed - for pk := range providerUpdates { - pks = append(pks, pk) - } - - ret := map[CK]int{} - - // Create a read only copy, so that we can query while writing - // updates to the old version. - ckToMemo_READ_ONLY := map[CK]memo{} - for ck, memo := range e.ckToMemo { - ckToMemo_READ_ONLY[ck] = memo - } - - for _, pk := range pks { - for _, u := range ckToMemo_READ_ONLY { - if u.pk == pk && 0 < u.power { - // For each provider key for which there was already a positive update - // create a deletion update for the associated consumer key. - e.ckToMemo[u.ck] = memo{ck: u.ck, pk: pk, vscid: vscid, power: 0} - ret[u.ck] = 0 - } - } - } - - for _, pk := range pks { - // For each provider key where there was either - // 1) already a positive power update - // 2) the validator power has changed (and is still positive) - // create a change update for the associated consumer key. - - power := 0 - for _, u := range ckToMemo_READ_ONLY { - if u.pk == pk && 0 < u.power { - // There was previously a positive power update: copy it. - power = u.power - } - } - // There is a new validator power: use it. - if newPower, ok := providerUpdates[pk]; ok { - power = newPower - } - // Only ship update with positive powers. Zero power updates (deletions) - // are handled in earlier block. - if 0 < power { - ck := e.pkToCk[pk] - e.ckToMemo[ck] = memo{ck: ck, pk: pk, vscid: vscid, power: power} - ret[ck] = power - } - } - - return ret -} - -// Returns true iff internal invariants hold -func (e *KeyMap) internalInvariants() bool { - - e.GetAll() - - // No two provider keys can map to the same consumer key - // (pkToCk is sane) - seen := map[CK]bool{} - for _, ck := range e.pkToCk { - if seen[ck] { - return false - } - seen[ck] = true - } - - // all values of pkToCk is a key of ckToPk - // (reverse lookup is always possible) - for _, ck := range e.pkToCk { - if _, ok := e.ckToPk[ck]; !ok { - return false - } - } - - // All consumer keys mapping to provider keys are actually - // mapped to by the provider key. - // (ckToPk is sane) - for ck := range e.ckToPk { - good := false - for _, candidateCk := range e.pkToCk { - if candidateCk == ck { - good = true - break - } - } - if !good { - return false - } - } - - // If a consumer key is mapped to a provider key (currently) - // any memo containing the same consumer key has the same - // mapping. - // (Ensures lookups are correct) - for ck, pk := range e.ckToPk { - if u, ok := e.ckToMemo[ck]; ok { - if pk != u.pk { - return false - } - } - } - - return true - -} diff --git a/x/ccv/provider/keeper/keymap_core.go b/x/ccv/provider/keeper/keymap_core.go new file mode 100644 index 0000000000..3addfad80f --- /dev/null +++ b/x/ccv/provider/keeper/keymap_core.go @@ -0,0 +1,259 @@ +package keeper + +import ( + "errors" +) + +type PK = int +type CK = int +type VSCID = int + +type update struct { + key int + power int +} + +type memo struct { + ck CK + pk PK + vscid int + power int +} + +// TODO: +// 1. Integrate into kv store. +// 2. integrate into Provider::EndBlock, +// 3. integrate with create/destroy validator +// 4. TODO: document this file + +type KeyMap struct { + store Store + pkToCk map[PK]CK + ckToPk map[CK]PK + ckToMemo map[CK]memo +} + +type Store interface { + getPkToCk() map[PK]CK + getCkToPk() map[CK]PK + getCkToMemo() map[CK]memo + setPkToCk(map[PK]CK) + setCkToPk(map[CK]PK) + setCkToMemo(map[CK]memo) +} + +func MakeKeyMap(store Store) KeyMap { + return KeyMap{ + store: store, + } +} + +// GetAll reads all data from store +// The granularity of store access can be changed if needed for +// performance reasons. +func (e *KeyMap) GetAll() { + e.pkToCk = e.store.getPkToCk() + e.ckToPk = e.store.getCkToPk() + e.ckToMemo = e.store.getCkToMemo() +} + +// SetAll write all data to store +// The granularity of store access can be changed if needed for +// performance reasons. +func (e *KeyMap) SetAll() { + e.store.setPkToCk(e.pkToCk) + e.store.setCkToPk(e.ckToPk) + e.store.setCkToMemo(e.ckToMemo) +} + +// TODO: +func (e *KeyMap) SetProviderKeyToConsumerKey(pk PK, ck CK) error { + e.GetAll() + if _, ok := e.ckToPk[ck]; ok { + return errors.New(`cannot reuse key which is in use or was recently in use`) + } + if _, ok := e.ckToMemo[ck]; ok { + return errors.New(`cannot reuse key which is in use or was recently in use`) + } + if oldCk, ok := e.pkToCk[pk]; ok { + delete(e.ckToPk, oldCk) + } + e.pkToCk[pk] = ck + e.ckToPk[ck] = pk + e.SetAll() // TODO: Try with defer + return nil +} + +// TODO: +func (e *KeyMap) GetProviderKey(ck CK) (PK, error) { + e.GetAll() + if u, ok := e.ckToMemo[ck]; ok { + return u.pk, nil + } else if pk, ok := e.ckToPk[ck]; ok { + return pk, nil + } else { + return -1, errors.New("provider key not found for consumer key") + } +} + +// TODO: +func (e *KeyMap) PruneUnusedKeys(latestVscid VSCID) { + e.GetAll() + toDel := []CK{} + for _, u := range e.ckToMemo { + // If the last update was a deletion (0 power) and the update + // matured then pruning is possible. + if u.power == 0 && u.vscid <= latestVscid { + toDel = append(toDel, u.ck) + } + } + for _, ck := range toDel { + delete(e.ckToMemo, ck) + } + e.SetAll() +} + +// TODO: +func (e *KeyMap) ComputeUpdates(vscid VSCID, providerUpdates []update) (consumerUpdates []update) { + + e.GetAll() + + updates := map[PK]int{} + + for _, u := range providerUpdates { + updates[u.key] = u.power + } + + updates = e.inner(vscid, updates) + + consumerUpdates = []update{} + + for ck, power := range updates { + consumerUpdates = append(consumerUpdates, update{key: ck, power: power}) + } + + e.SetAll() + return consumerUpdates +} + +// do inner work as part of ComputeUpdates +func (e *KeyMap) inner(vscid VSCID, providerUpdates map[PK]int) map[CK]int { + + pks := []PK{} + + // Grab provider keys where the assigned consumer key has changed + for oldCk, u := range e.ckToMemo { + if newCk, ok := e.pkToCk[u.pk]; ok { + if oldCk != newCk && 0 < u.power { + pks = append(pks, u.pk) + } + } + } + // Grab provider keys where the validator power has changed + for pk := range providerUpdates { + pks = append(pks, pk) + } + + ret := map[CK]int{} + + // Create a read only copy, so that we can query while writing + // updates to the old version. + ckToMemo_READ_ONLY := map[CK]memo{} + for ck, memo := range e.ckToMemo { + ckToMemo_READ_ONLY[ck] = memo + } + + for _, pk := range pks { + for _, u := range ckToMemo_READ_ONLY { + if u.pk == pk && 0 < u.power { + // For each provider key for which there was already a positive update + // create a deletion update for the associated consumer key. + e.ckToMemo[u.ck] = memo{ck: u.ck, pk: pk, vscid: vscid, power: 0} + ret[u.ck] = 0 + } + } + } + + for _, pk := range pks { + // For each provider key where there was either + // 1) already a positive power update + // 2) the validator power has changed (and is still positive) + // create a change update for the associated consumer key. + + power := 0 + for _, u := range ckToMemo_READ_ONLY { + if u.pk == pk && 0 < u.power { + // There was previously a positive power update: copy it. + power = u.power + } + } + // There is a new validator power: use it. + if newPower, ok := providerUpdates[pk]; ok { + power = newPower + } + // Only ship update with positive powers. Zero power updates (deletions) + // are handled in earlier block. + if 0 < power { + ck := e.pkToCk[pk] + e.ckToMemo[ck] = memo{ck: ck, pk: pk, vscid: vscid, power: power} + ret[ck] = power + } + } + + return ret +} + +// Returns true iff internal invariants hold +func (e *KeyMap) internalInvariants() bool { + + e.GetAll() + + // No two provider keys can map to the same consumer key + // (pkToCk is sane) + seen := map[CK]bool{} + for _, ck := range e.pkToCk { + if seen[ck] { + return false + } + seen[ck] = true + } + + // all values of pkToCk is a key of ckToPk + // (reverse lookup is always possible) + for _, ck := range e.pkToCk { + if _, ok := e.ckToPk[ck]; !ok { + return false + } + } + + // All consumer keys mapping to provider keys are actually + // mapped to by the provider key. + // (ckToPk is sane) + for ck := range e.ckToPk { + good := false + for _, candidateCk := range e.pkToCk { + if candidateCk == ck { + good = true + break + } + } + if !good { + return false + } + } + + // If a consumer key is mapped to a provider key (currently) + // any memo containing the same consumer key has the same + // mapping. + // (Ensures lookups are correct) + for ck, pk := range e.ckToPk { + if u, ok := e.ckToMemo[ck]; ok { + if pk != u.pk { + return false + } + } + } + + return true + +} From 4ba668adcdc14c31d85b82a661d7c67ab533e816 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:20:17 -0500 Subject: [PATCH 124/127] rn _test -> core_test --- x/ccv/provider/keeper/keymap_core_test.go | 524 ++++++++++++++++++++++ 1 file changed, 524 insertions(+) create mode 100644 x/ccv/provider/keeper/keymap_core_test.go diff --git a/x/ccv/provider/keeper/keymap_core_test.go b/x/ccv/provider/keeper/keymap_core_test.go new file mode 100644 index 0000000000..2c978c63ae --- /dev/null +++ b/x/ccv/provider/keeper/keymap_core_test.go @@ -0,0 +1,524 @@ +package keeper + +import ( + "math/rand" + "testing" + + "github.com/stretchr/testify/require" +) + +// Num traces to run for heuristic testing +const NUM_TRACES = 1000 + +// Len of trace for a single heuristic testing run +const TRACE_LEN = 1001 + +// Number of validators to simulate +const NUM_VALS = 4 + +// Number of consumer keys in the universe +// (This is constrained to ensure overlap edge cases are tested) +const NUM_CKS = 50 + +type store struct { + pkToCk map[PK]CK + ckToPk map[CK]PK + ckToMemo map[CK]memo +} + +func makeStore() store { + return store{ + pkToCk: map[PK]CK{}, + ckToPk: map[CK]PK{}, + ckToMemo: map[CK]memo{}, + } +} +func (s *store) getPkToCk() map[PK]CK { + return s.pkToCk +} +func (s *store) getCkToPk() map[CK]PK { + return s.ckToPk +} +func (s *store) getCkToMemo() map[CK]memo { + return s.ckToMemo +} +func (s *store) setPkToCk(e map[PK]CK) { + s.pkToCk = e +} +func (s *store) setCkToPk(e map[CK]PK) { + s.ckToPk = e +} +func (s *store) setCkToMemo(e map[CK]memo) { + s.ckToMemo = e +} + +type keyMapEntry struct { + pk PK + ck CK +} + +type traceStep struct { + keyMapEntries []keyMapEntry + providerUpdates []update + timeProvider int + timeConsumer int + timeMaturity int +} + +type driver struct { + t *testing.T + km *KeyMap + trace []traceStep + lastTimeProvider int + lastTimeConsumer int + lastTimeMaturity int + // indexed by time (starting at 0) + mappings []map[PK]CK + // indexed by time (starting at 0) + consumerUpdates [][]update + // indexed by time (starting at 0) + providerValsets []valset + // The validator set from the perspective of + // the consumer chain. + consumerValsets valset +} + +func makeDriver(t *testing.T, trace []traceStep) driver { + d := driver{} + d.t = t + s := makeStore() + require.NotNil(t, s.ckToMemo) + kd := MakeKeyMap(&s) + d.km = &kd + d.trace = trace + d.lastTimeProvider = 0 + d.lastTimeConsumer = 0 + d.lastTimeMaturity = 0 + d.mappings = []map[PK]CK{} + d.consumerUpdates = [][]update{} + d.providerValsets = []valset{} + d.consumerValsets = valset{} + return d +} + +// Utility struct to make simulating a validator set easier. +type valset struct { + keyToPower map[int]int +} + +func makeValset() valset { + return valset{keyToPower: map[int]int{}} +} + +// Apply a batch of (key, power) updates to the known validator set. +func (vs *valset) applyUpdates(updates []update) { + for _, u := range updates { + delete(vs.keyToPower, u.key) + if 0 < u.power { + vs.keyToPower[u.key] = u.power + } + } +} + +// Apply a list of (pk, ck) mapping requests to the KeyDel class instance +func (d *driver) applyKeyMapEntries(entries []keyMapEntry) { + for _, e := range entries { + // TRY to map provider key pk to consumer key ck. + // (May fail due to API constraints, this is correct) + _ = d.km.SetProviderKeyToConsumerKey(e.pk, e.ck) + } + // Duplicate the mapping for referencing later in tests. + copy := map[PK]CK{} + for lk, fk := range d.km.pkToCk { + copy[lk] = fk + } + d.mappings = append(d.mappings, copy) +} + +// Apply a list of provider validator power updates +func (d *driver) applyProviderUpdates(providerUPdates []update) { + // Duplicate the previous valSet so that it can be referenced + // later in tests. + valSet := makeValset() + for pk, power := range d.providerValsets[d.lastTimeProvider].keyToPower { + valSet.keyToPower[pk] = power + } + valSet.applyUpdates(providerUPdates) + d.providerValsets = append(d.providerValsets, valSet) +} + +// Run a trace +// This includes bootstrapping the data structure with the first (init) +// step of the trace, and running a sequence of steps afterwards. +// Internal and external invariants (properties) of the data structure +// are tested after each step. +func (d *driver) run() { + + // Initialise + { + init := d.trace[0] + // Set the initial map + d.applyKeyMapEntries(init.keyMapEntries) + // Set the initial provider set + d.providerValsets = append(d.providerValsets, makeValset()) + d.providerValsets[init.timeProvider].applyUpdates(init.providerUpdates) + // Set the initial consumer set + d.consumerUpdates = append(d.consumerUpdates, d.km.ComputeUpdates(init.timeProvider, init.providerUpdates)) + // The first consumer set equal to the provider set at time 0 + d.consumerValsets = makeValset() + d.consumerValsets.applyUpdates(d.consumerUpdates[init.timeConsumer]) + d.km.PruneUnusedKeys(init.timeMaturity) + } + + // Sanity check the initial state + require.Len(d.t, d.mappings, 1) + require.Len(d.t, d.consumerUpdates, 1) + require.Len(d.t, d.providerValsets, 1) + + // Check properties for each step after the initial one + for _, s := range d.trace[1:] { + if d.lastTimeProvider < s.timeProvider { + // Provider time increase: + // Apply some new key mapping requests to KeyDel, and create new validator + // power updates. + d.applyKeyMapEntries(s.keyMapEntries) + d.applyProviderUpdates(s.providerUpdates) + // Store the updates, to reference later in tests. + d.consumerUpdates = append(d.consumerUpdates, d.km.ComputeUpdates(s.timeProvider, s.providerUpdates)) + d.lastTimeProvider = s.timeProvider + } + if d.lastTimeConsumer < s.timeConsumer { + // Consumer time increase: + // For each unit of time that has passed since the last increase, apply + // any updates which have been 'emitted' by a provider time increase step. + for j := d.lastTimeConsumer + 1; j <= s.timeConsumer; j++ { + d.consumerValsets.applyUpdates(d.consumerUpdates[j]) + } + d.lastTimeConsumer = s.timeConsumer + } + if d.lastTimeMaturity < s.timeMaturity { + // Maturity time increase: + // For each unit of time that has passed since the last increase, + // a maturity is 'available'. We test batch maturity. + d.km.PruneUnusedKeys(s.timeMaturity) + d.lastTimeMaturity = s.timeMaturity + } + + // Do checks + require.True(d.t, d.km.internalInvariants()) + d.externalInvariants() + } +} + +// Check invariants which are 'external' to the data structure being used. +// That is: these invariants make sense in the context of the wider system, +// and aren't specifically about the KeyDel data structure internal state. +// +// There are three invariants +// +// 1. Validator Set Replication +// 'All consumer validator sets are some earlier provider validator set' +// +// 2. Queries +// 'It is always possible to query the provider key for a given consumer +// key, when the consumer can still make slash requests' +// +// 3. Pruning +// 'When the pruning method is used correctly, the internal state of the +// data structure does not grow unboundedly' +// +// Please see body for details. +func (d *driver) externalInvariants() { + + /* + For a consumer who has received updates up to vscid i, its + provider validator set must be equal to the set on the provider + when i was sent, mapped through the mapping at that time. + */ + validatorSetReplication := func() { + + // Get the consumer set. + cSet := d.consumerValsets.keyToPower + // Get the provider set - at the corresponding time. + pSet := d.providerValsets[d.lastTimeConsumer].keyToPower + + // Compute a reverse lookup allowing comparison + // of the two sets. + cSetLikePSet := map[PK]int{} + { + mapping := d.mappings[d.lastTimeConsumer] + inverseMapping := map[CK]PK{} + for pk, ck := range mapping { + inverseMapping[ck] = pk + } + for ck, power := range cSet { + cSetLikePSet[inverseMapping[ck]] = power + } + } + + // Check that the two validator sets match exactly. + for pk, expectedPower := range pSet { + actualPower := cSetLikePSet[pk] + require.Equal(d.t, expectedPower, actualPower) + } + for pk, actualPower := range cSetLikePSet { + expectedPower := pSet[pk] + require.Equal(d.t, expectedPower, actualPower) + } + } + + /* + For any key that the consumer is aware of, because it has + received that key at some time in the past, and has not yet + returned the maturity vscid for its removal: + the key is useable as a query parameter to lookup the key + of the validator which should be slashed for misbehavior. + */ + queries := func() { + // For each key known to the consumer + for ck := range d.consumerValsets.keyToPower { + + // The query must return a result + pkQueried, err := d.km.GetProviderKey(ck) + require.Nil(d.t, err) + + // The provider key must be the one that was actually referenced + // in the latest trueMapping used to compute updates sent to the + // consumer. + cks_TRUE := map[CK]bool{} + trueMapping := d.mappings[d.lastTimeConsumer] + for pk_TRUE, ck_TRUE := range trueMapping { + + // Sanity check: no two provider keys should map to the same consumer key + require.Falsef(d.t, cks_TRUE[ck_TRUE], "two provider keys map to the same consumer key") + + // Record that this consumer key was indeed mapped to by some provider key + // at time lastTimeConsumer + cks_TRUE[ck_TRUE] = true + + // If the consumer key is the one used as a query param + if ck == ck_TRUE { + // Then the provider key returned by the query must be exactly + // the same one as was actually mapped to. + require.Equal(d.t, pk_TRUE, pkQueried) + } + } + // Check that the comparison was actually made, and that the test + // actually did something. + require.Truef(d.t, cks_TRUE[ck], "no mapping found for consumer key") + } + } + + /* + All keys that the consumer definitely cannot use as a parameter in + a slash request must eventually be pruned from state. + A consumer can still reference a key if the last update it received + for the key had a positive power associated to it, OR the last update + had a 0 power associated (deletion) but the maturity period for that + update has not yet elapsed (and the maturity was not yet received + on the provider chain). + */ + pruning := func() { + + // Do we expect to be able to query the provider key for a given consumer + // key? + expectQueryable := map[CK]bool{} + + for i := 0; i <= d.lastTimeMaturity; i++ { + for _, u := range d.consumerUpdates[i] { + // If the latest update for a given consumer key was dispatched + // AND also matured since the last maturity, then + // 1) if that update was a positive power update then no subsequent + // zero power update can have matured. Thus the key should be + // queryable. + // 2) if that update was a zero positive power update then the + // key should not be queryable unless it was used in a subsquent + // update (see next block). + expectQueryable[u.key] = 0 < u.power + } + } + for i := d.lastTimeMaturity + 1; i <= d.lastTimeProvider; i++ { + for _, u := range d.consumerUpdates[i] { + // If a positive OR zero power update was RECENTLY received + // for the consumer, then the key must be queryable. + expectQueryable[u.key] = true + } + } + // If a consumer key is CURRENTLY mapped to by a provider key, it + // must be queryable. + for _, ck := range d.km.pkToCk { + expectQueryable[ck] = true + } + + // Simply check every consumer key for the correct queryable-ness. + for ck := 0; ck < NUM_CKS; ck++ { + _, err := d.km.GetProviderKey(ck) + actualQueryable := err == nil + if expect, found := expectQueryable[ck]; found && expect { + require.True(d.t, actualQueryable) + } else { + require.False(d.t, actualQueryable) + } + } + } + + validatorSetReplication() + queries() + pruning() + +} + +// Return a randomly generated list of steps +// which can be used to execute actions for testing. +func getTrace(t *testing.T) []traceStep { + + keyMappings := func() []keyMapEntry { + ret := []keyMapEntry{} + + const NUM_ITS = 2 // Chosen arbitrarily/heuristically + // Do this NUM_ITS times, to be able to generate conflicting mappings. + // This is allowed by the KeyDel API, so it must be tested. + for i := 0; i < NUM_ITS; i++ { + // include none (to) all validators + pks := rand.Perm(NUM_VALS)[0:rand.Intn(NUM_VALS+1)] + for _, pk := range pks { + ck := rand.Intn(NUM_CKS) + ret = append(ret, keyMapEntry{pk, ck}) + } + } + return ret + } + + providerUpdates := func() []update { + ret := []update{} + + // include none (to) all validators + pks := rand.Perm(NUM_VALS)[0:rand.Intn(NUM_VALS+1)] + for _, pk := range pks { + // Only three values are interesting + // 0: deletion + // 1: positive + // 2: positive (change) + power := rand.Intn(3) + ret = append(ret, update{key: pk, power: power}) + } + return ret + } + + // Get an initial key mapping. + // The real system may use some manual set defaults. + initialMappings := []keyMapEntry{} + for i := 0; i < NUM_VALS; i++ { + initialMappings = append(initialMappings, keyMapEntry{i, i}) + } + + ret := []traceStep{ + { + // Hard code initial mapping + keyMapEntries: initialMappings, + providerUpdates: providerUpdates(), + timeProvider: 0, + timeConsumer: 0, + timeMaturity: 0, + }, + } + + for i := 0; i < TRACE_LEN; i++ { + choice := rand.Intn(3) + last := ret[len(ret)-1] + if choice == 0 { + // Increment provider time, and generate + // new key mappings and validator updates. + ret = append(ret, traceStep{ + keyMapEntries: keyMappings(), + providerUpdates: providerUpdates(), + timeProvider: last.timeProvider + 1, + timeConsumer: last.timeConsumer, + timeMaturity: last.timeMaturity, + }) + } + if choice == 1 { + // If possible, increase consumer time. + // This models receiving VSC packets on the consumer. + curr := last.timeConsumer + limInclusive := last.timeProvider + if curr < limInclusive { + // add in [1, limInclusive - curr] + // rand in [0, limInclusive - curr - 1] + // bound is [0, limInclusive - curr) + newTC := rand.Intn(limInclusive-curr) + curr + 1 + require.True(t, curr < newTC && newTC <= limInclusive) + ret = append(ret, traceStep{ + keyMapEntries: nil, + providerUpdates: nil, + timeProvider: last.timeProvider, + timeConsumer: newTC, + timeMaturity: last.timeMaturity, + }) + } + } + if choice == 2 { + // If possible, increase maturity time. + // This models sending maturities on the consumer (and also + // receiving them on the provider). + curr := last.timeMaturity + limInclusive := last.timeConsumer + if curr < limInclusive { + newTM := rand.Intn(limInclusive-curr) + curr + 1 + require.True(t, curr < newTM && newTM <= limInclusive) + ret = append(ret, traceStep{ + keyMapEntries: nil, + providerUpdates: nil, + timeProvider: last.timeProvider, + timeConsumer: last.timeConsumer, + timeMaturity: newTM, + }) + } + } + } + return ret +} + +// Execute randomly generated traces (lists of actions) +// against new instances of the class, checking properties +// after each action is done. +func TestPropertiesRandomlyHeuristically(t *testing.T) { + for i := 0; i < NUM_TRACES; i++ { + trace := []traceStep{} + for len(trace) < 2 { + trace = getTrace(t) + } + d := makeDriver(t, trace) + d.run() + } +} + +// Setting should enable a reverse query +func TestXSetReverseQuery(t *testing.T) { + s := makeStore() + kd := MakeKeyMap(&s) + kd.SetProviderKeyToConsumerKey(42, 43) + actual, err := kd.GetProviderKey(43) // Queryable + require.Nil(t, err) + require.Equal(t, 42, actual) +} + +// Not setting should not enable a reverse query +func TestNoSetReverseQuery(t *testing.T) { + s := makeStore() + kd := MakeKeyMap(&s) + _, err := kd.GetProviderKey(43) // Not queryable + require.NotNil(t, err) +} + +// Setting and replacing should no allow earlier reverse query +func TestXSetUnsetReverseQuery(t *testing.T) { + s := makeStore() + kd := MakeKeyMap(&s) + kd.SetProviderKeyToConsumerKey(42, 43) + kd.SetProviderKeyToConsumerKey(42, 44) // Set to different value + _, err := kd.GetProviderKey(43) // Ealier value not queryable + require.NotNil(t, err) +} + +// TODO: add more of these.. From 8c380e9adf45ae29154258712cdb5ae192f6e399 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 17:20:45 -0500 Subject: [PATCH 125/127] bump --- x/ccv/provider/keeper/keymap_test.go | 524 --------------------------- 1 file changed, 524 deletions(-) delete mode 100644 x/ccv/provider/keeper/keymap_test.go diff --git a/x/ccv/provider/keeper/keymap_test.go b/x/ccv/provider/keeper/keymap_test.go deleted file mode 100644 index 2c978c63ae..0000000000 --- a/x/ccv/provider/keeper/keymap_test.go +++ /dev/null @@ -1,524 +0,0 @@ -package keeper - -import ( - "math/rand" - "testing" - - "github.com/stretchr/testify/require" -) - -// Num traces to run for heuristic testing -const NUM_TRACES = 1000 - -// Len of trace for a single heuristic testing run -const TRACE_LEN = 1001 - -// Number of validators to simulate -const NUM_VALS = 4 - -// Number of consumer keys in the universe -// (This is constrained to ensure overlap edge cases are tested) -const NUM_CKS = 50 - -type store struct { - pkToCk map[PK]CK - ckToPk map[CK]PK - ckToMemo map[CK]memo -} - -func makeStore() store { - return store{ - pkToCk: map[PK]CK{}, - ckToPk: map[CK]PK{}, - ckToMemo: map[CK]memo{}, - } -} -func (s *store) getPkToCk() map[PK]CK { - return s.pkToCk -} -func (s *store) getCkToPk() map[CK]PK { - return s.ckToPk -} -func (s *store) getCkToMemo() map[CK]memo { - return s.ckToMemo -} -func (s *store) setPkToCk(e map[PK]CK) { - s.pkToCk = e -} -func (s *store) setCkToPk(e map[CK]PK) { - s.ckToPk = e -} -func (s *store) setCkToMemo(e map[CK]memo) { - s.ckToMemo = e -} - -type keyMapEntry struct { - pk PK - ck CK -} - -type traceStep struct { - keyMapEntries []keyMapEntry - providerUpdates []update - timeProvider int - timeConsumer int - timeMaturity int -} - -type driver struct { - t *testing.T - km *KeyMap - trace []traceStep - lastTimeProvider int - lastTimeConsumer int - lastTimeMaturity int - // indexed by time (starting at 0) - mappings []map[PK]CK - // indexed by time (starting at 0) - consumerUpdates [][]update - // indexed by time (starting at 0) - providerValsets []valset - // The validator set from the perspective of - // the consumer chain. - consumerValsets valset -} - -func makeDriver(t *testing.T, trace []traceStep) driver { - d := driver{} - d.t = t - s := makeStore() - require.NotNil(t, s.ckToMemo) - kd := MakeKeyMap(&s) - d.km = &kd - d.trace = trace - d.lastTimeProvider = 0 - d.lastTimeConsumer = 0 - d.lastTimeMaturity = 0 - d.mappings = []map[PK]CK{} - d.consumerUpdates = [][]update{} - d.providerValsets = []valset{} - d.consumerValsets = valset{} - return d -} - -// Utility struct to make simulating a validator set easier. -type valset struct { - keyToPower map[int]int -} - -func makeValset() valset { - return valset{keyToPower: map[int]int{}} -} - -// Apply a batch of (key, power) updates to the known validator set. -func (vs *valset) applyUpdates(updates []update) { - for _, u := range updates { - delete(vs.keyToPower, u.key) - if 0 < u.power { - vs.keyToPower[u.key] = u.power - } - } -} - -// Apply a list of (pk, ck) mapping requests to the KeyDel class instance -func (d *driver) applyKeyMapEntries(entries []keyMapEntry) { - for _, e := range entries { - // TRY to map provider key pk to consumer key ck. - // (May fail due to API constraints, this is correct) - _ = d.km.SetProviderKeyToConsumerKey(e.pk, e.ck) - } - // Duplicate the mapping for referencing later in tests. - copy := map[PK]CK{} - for lk, fk := range d.km.pkToCk { - copy[lk] = fk - } - d.mappings = append(d.mappings, copy) -} - -// Apply a list of provider validator power updates -func (d *driver) applyProviderUpdates(providerUPdates []update) { - // Duplicate the previous valSet so that it can be referenced - // later in tests. - valSet := makeValset() - for pk, power := range d.providerValsets[d.lastTimeProvider].keyToPower { - valSet.keyToPower[pk] = power - } - valSet.applyUpdates(providerUPdates) - d.providerValsets = append(d.providerValsets, valSet) -} - -// Run a trace -// This includes bootstrapping the data structure with the first (init) -// step of the trace, and running a sequence of steps afterwards. -// Internal and external invariants (properties) of the data structure -// are tested after each step. -func (d *driver) run() { - - // Initialise - { - init := d.trace[0] - // Set the initial map - d.applyKeyMapEntries(init.keyMapEntries) - // Set the initial provider set - d.providerValsets = append(d.providerValsets, makeValset()) - d.providerValsets[init.timeProvider].applyUpdates(init.providerUpdates) - // Set the initial consumer set - d.consumerUpdates = append(d.consumerUpdates, d.km.ComputeUpdates(init.timeProvider, init.providerUpdates)) - // The first consumer set equal to the provider set at time 0 - d.consumerValsets = makeValset() - d.consumerValsets.applyUpdates(d.consumerUpdates[init.timeConsumer]) - d.km.PruneUnusedKeys(init.timeMaturity) - } - - // Sanity check the initial state - require.Len(d.t, d.mappings, 1) - require.Len(d.t, d.consumerUpdates, 1) - require.Len(d.t, d.providerValsets, 1) - - // Check properties for each step after the initial one - for _, s := range d.trace[1:] { - if d.lastTimeProvider < s.timeProvider { - // Provider time increase: - // Apply some new key mapping requests to KeyDel, and create new validator - // power updates. - d.applyKeyMapEntries(s.keyMapEntries) - d.applyProviderUpdates(s.providerUpdates) - // Store the updates, to reference later in tests. - d.consumerUpdates = append(d.consumerUpdates, d.km.ComputeUpdates(s.timeProvider, s.providerUpdates)) - d.lastTimeProvider = s.timeProvider - } - if d.lastTimeConsumer < s.timeConsumer { - // Consumer time increase: - // For each unit of time that has passed since the last increase, apply - // any updates which have been 'emitted' by a provider time increase step. - for j := d.lastTimeConsumer + 1; j <= s.timeConsumer; j++ { - d.consumerValsets.applyUpdates(d.consumerUpdates[j]) - } - d.lastTimeConsumer = s.timeConsumer - } - if d.lastTimeMaturity < s.timeMaturity { - // Maturity time increase: - // For each unit of time that has passed since the last increase, - // a maturity is 'available'. We test batch maturity. - d.km.PruneUnusedKeys(s.timeMaturity) - d.lastTimeMaturity = s.timeMaturity - } - - // Do checks - require.True(d.t, d.km.internalInvariants()) - d.externalInvariants() - } -} - -// Check invariants which are 'external' to the data structure being used. -// That is: these invariants make sense in the context of the wider system, -// and aren't specifically about the KeyDel data structure internal state. -// -// There are three invariants -// -// 1. Validator Set Replication -// 'All consumer validator sets are some earlier provider validator set' -// -// 2. Queries -// 'It is always possible to query the provider key for a given consumer -// key, when the consumer can still make slash requests' -// -// 3. Pruning -// 'When the pruning method is used correctly, the internal state of the -// data structure does not grow unboundedly' -// -// Please see body for details. -func (d *driver) externalInvariants() { - - /* - For a consumer who has received updates up to vscid i, its - provider validator set must be equal to the set on the provider - when i was sent, mapped through the mapping at that time. - */ - validatorSetReplication := func() { - - // Get the consumer set. - cSet := d.consumerValsets.keyToPower - // Get the provider set - at the corresponding time. - pSet := d.providerValsets[d.lastTimeConsumer].keyToPower - - // Compute a reverse lookup allowing comparison - // of the two sets. - cSetLikePSet := map[PK]int{} - { - mapping := d.mappings[d.lastTimeConsumer] - inverseMapping := map[CK]PK{} - for pk, ck := range mapping { - inverseMapping[ck] = pk - } - for ck, power := range cSet { - cSetLikePSet[inverseMapping[ck]] = power - } - } - - // Check that the two validator sets match exactly. - for pk, expectedPower := range pSet { - actualPower := cSetLikePSet[pk] - require.Equal(d.t, expectedPower, actualPower) - } - for pk, actualPower := range cSetLikePSet { - expectedPower := pSet[pk] - require.Equal(d.t, expectedPower, actualPower) - } - } - - /* - For any key that the consumer is aware of, because it has - received that key at some time in the past, and has not yet - returned the maturity vscid for its removal: - the key is useable as a query parameter to lookup the key - of the validator which should be slashed for misbehavior. - */ - queries := func() { - // For each key known to the consumer - for ck := range d.consumerValsets.keyToPower { - - // The query must return a result - pkQueried, err := d.km.GetProviderKey(ck) - require.Nil(d.t, err) - - // The provider key must be the one that was actually referenced - // in the latest trueMapping used to compute updates sent to the - // consumer. - cks_TRUE := map[CK]bool{} - trueMapping := d.mappings[d.lastTimeConsumer] - for pk_TRUE, ck_TRUE := range trueMapping { - - // Sanity check: no two provider keys should map to the same consumer key - require.Falsef(d.t, cks_TRUE[ck_TRUE], "two provider keys map to the same consumer key") - - // Record that this consumer key was indeed mapped to by some provider key - // at time lastTimeConsumer - cks_TRUE[ck_TRUE] = true - - // If the consumer key is the one used as a query param - if ck == ck_TRUE { - // Then the provider key returned by the query must be exactly - // the same one as was actually mapped to. - require.Equal(d.t, pk_TRUE, pkQueried) - } - } - // Check that the comparison was actually made, and that the test - // actually did something. - require.Truef(d.t, cks_TRUE[ck], "no mapping found for consumer key") - } - } - - /* - All keys that the consumer definitely cannot use as a parameter in - a slash request must eventually be pruned from state. - A consumer can still reference a key if the last update it received - for the key had a positive power associated to it, OR the last update - had a 0 power associated (deletion) but the maturity period for that - update has not yet elapsed (and the maturity was not yet received - on the provider chain). - */ - pruning := func() { - - // Do we expect to be able to query the provider key for a given consumer - // key? - expectQueryable := map[CK]bool{} - - for i := 0; i <= d.lastTimeMaturity; i++ { - for _, u := range d.consumerUpdates[i] { - // If the latest update for a given consumer key was dispatched - // AND also matured since the last maturity, then - // 1) if that update was a positive power update then no subsequent - // zero power update can have matured. Thus the key should be - // queryable. - // 2) if that update was a zero positive power update then the - // key should not be queryable unless it was used in a subsquent - // update (see next block). - expectQueryable[u.key] = 0 < u.power - } - } - for i := d.lastTimeMaturity + 1; i <= d.lastTimeProvider; i++ { - for _, u := range d.consumerUpdates[i] { - // If a positive OR zero power update was RECENTLY received - // for the consumer, then the key must be queryable. - expectQueryable[u.key] = true - } - } - // If a consumer key is CURRENTLY mapped to by a provider key, it - // must be queryable. - for _, ck := range d.km.pkToCk { - expectQueryable[ck] = true - } - - // Simply check every consumer key for the correct queryable-ness. - for ck := 0; ck < NUM_CKS; ck++ { - _, err := d.km.GetProviderKey(ck) - actualQueryable := err == nil - if expect, found := expectQueryable[ck]; found && expect { - require.True(d.t, actualQueryable) - } else { - require.False(d.t, actualQueryable) - } - } - } - - validatorSetReplication() - queries() - pruning() - -} - -// Return a randomly generated list of steps -// which can be used to execute actions for testing. -func getTrace(t *testing.T) []traceStep { - - keyMappings := func() []keyMapEntry { - ret := []keyMapEntry{} - - const NUM_ITS = 2 // Chosen arbitrarily/heuristically - // Do this NUM_ITS times, to be able to generate conflicting mappings. - // This is allowed by the KeyDel API, so it must be tested. - for i := 0; i < NUM_ITS; i++ { - // include none (to) all validators - pks := rand.Perm(NUM_VALS)[0:rand.Intn(NUM_VALS+1)] - for _, pk := range pks { - ck := rand.Intn(NUM_CKS) - ret = append(ret, keyMapEntry{pk, ck}) - } - } - return ret - } - - providerUpdates := func() []update { - ret := []update{} - - // include none (to) all validators - pks := rand.Perm(NUM_VALS)[0:rand.Intn(NUM_VALS+1)] - for _, pk := range pks { - // Only three values are interesting - // 0: deletion - // 1: positive - // 2: positive (change) - power := rand.Intn(3) - ret = append(ret, update{key: pk, power: power}) - } - return ret - } - - // Get an initial key mapping. - // The real system may use some manual set defaults. - initialMappings := []keyMapEntry{} - for i := 0; i < NUM_VALS; i++ { - initialMappings = append(initialMappings, keyMapEntry{i, i}) - } - - ret := []traceStep{ - { - // Hard code initial mapping - keyMapEntries: initialMappings, - providerUpdates: providerUpdates(), - timeProvider: 0, - timeConsumer: 0, - timeMaturity: 0, - }, - } - - for i := 0; i < TRACE_LEN; i++ { - choice := rand.Intn(3) - last := ret[len(ret)-1] - if choice == 0 { - // Increment provider time, and generate - // new key mappings and validator updates. - ret = append(ret, traceStep{ - keyMapEntries: keyMappings(), - providerUpdates: providerUpdates(), - timeProvider: last.timeProvider + 1, - timeConsumer: last.timeConsumer, - timeMaturity: last.timeMaturity, - }) - } - if choice == 1 { - // If possible, increase consumer time. - // This models receiving VSC packets on the consumer. - curr := last.timeConsumer - limInclusive := last.timeProvider - if curr < limInclusive { - // add in [1, limInclusive - curr] - // rand in [0, limInclusive - curr - 1] - // bound is [0, limInclusive - curr) - newTC := rand.Intn(limInclusive-curr) + curr + 1 - require.True(t, curr < newTC && newTC <= limInclusive) - ret = append(ret, traceStep{ - keyMapEntries: nil, - providerUpdates: nil, - timeProvider: last.timeProvider, - timeConsumer: newTC, - timeMaturity: last.timeMaturity, - }) - } - } - if choice == 2 { - // If possible, increase maturity time. - // This models sending maturities on the consumer (and also - // receiving them on the provider). - curr := last.timeMaturity - limInclusive := last.timeConsumer - if curr < limInclusive { - newTM := rand.Intn(limInclusive-curr) + curr + 1 - require.True(t, curr < newTM && newTM <= limInclusive) - ret = append(ret, traceStep{ - keyMapEntries: nil, - providerUpdates: nil, - timeProvider: last.timeProvider, - timeConsumer: last.timeConsumer, - timeMaturity: newTM, - }) - } - } - } - return ret -} - -// Execute randomly generated traces (lists of actions) -// against new instances of the class, checking properties -// after each action is done. -func TestPropertiesRandomlyHeuristically(t *testing.T) { - for i := 0; i < NUM_TRACES; i++ { - trace := []traceStep{} - for len(trace) < 2 { - trace = getTrace(t) - } - d := makeDriver(t, trace) - d.run() - } -} - -// Setting should enable a reverse query -func TestXSetReverseQuery(t *testing.T) { - s := makeStore() - kd := MakeKeyMap(&s) - kd.SetProviderKeyToConsumerKey(42, 43) - actual, err := kd.GetProviderKey(43) // Queryable - require.Nil(t, err) - require.Equal(t, 42, actual) -} - -// Not setting should not enable a reverse query -func TestNoSetReverseQuery(t *testing.T) { - s := makeStore() - kd := MakeKeyMap(&s) - _, err := kd.GetProviderKey(43) // Not queryable - require.NotNil(t, err) -} - -// Setting and replacing should no allow earlier reverse query -func TestXSetUnsetReverseQuery(t *testing.T) { - s := makeStore() - kd := MakeKeyMap(&s) - kd.SetProviderKeyToConsumerKey(42, 43) - kd.SetProviderKeyToConsumerKey(42, 44) // Set to different value - _, err := kd.GetProviderKey(43) // Ealier value not queryable - require.NotNil(t, err) -} - -// TODO: add more of these.. From e2e3d885da7c10194b6eb3ff8801a88b30a288e0 Mon Sep 17 00:00:00 2001 From: Daniel Date: Wed, 5 Oct 2022 18:20:17 -0500 Subject: [PATCH 126/127] Scratch up a basic doc --- docs/keymap/README.md | 116 +++++++++++ docs/keymap/diagrams/dummy.excalidraw | 284 ++++++++++++++++++++++++++ docs/keymap/diagrams/dummy.png | Bin 0 -> 103552 bytes x/ccv/provider/keeper/keymap_core.go | 2 + 4 files changed, 402 insertions(+) create mode 100644 docs/keymap/README.md create mode 100644 docs/keymap/diagrams/dummy.excalidraw create mode 100644 docs/keymap/diagrams/dummy.png diff --git a/docs/keymap/README.md b/docs/keymap/README.md new file mode 100644 index 0000000000..dc8d896b67 --- /dev/null +++ b/docs/keymap/README.md @@ -0,0 +1,116 @@ +# KeyMap + +KeyMap is the name of the feature that allows validator operators to use different consensus keys for each consumer chain validator node that they operate. + +Validators can improve their security by using different consensus keys for each chain. That way, different teams in an organization can operate a subset (can be size 1) of the total number of consumer chains associated to a provider chain. If one key leaks the other keys will not be at risk. It is possible to change the keys at any time by submitting a transaction. + +## Overview + +The KeyMap feature is available via a provider chain API (transactions and queries). The provider chain validator operator submits a mapping transaction to the provider chain with a consumer chain ID and desired consensus key as parameters. The IBC protocol used by Interchain Security takes care of forwarding the mapping to the specified consumer chain. When the consumer chain receives the key, it will immediately start using it with tendermint. + +It is possible to start validating a consumer chain with the same key as used for the provider. It is also possible to specify another key to use when joining the validator set. Moreover it is possible to change the used key at any time, any multiple times, with some minor restrictions. + +## API (High level) + +**Writes** + +```go +// Associates a new consumer key as consensus key on the consumer chain +// for the validator on the provider chain associated to the provider key. +SetConsumerKey(providerKey, consumerChainID, consumerKey) { +} +``` + +**Reads** + + +```go +// Returns the last consumerKey associated to the provider key and +// the consumer chain by a call to SetConsumerKey. +GetConsumerKey(providerKey, consumerChainID) { +} +``` + +```go +// Returns the last providerKey associated to consumerKey and the consumer +// chain by a call to SetConsumerKey. +GetProviderKey(consumerKey, consumerChainID) { +} +``` + +## API (Details) + +**Writes** + +```go +// Attemps to associate a new consumer key consumerKey on the consumer chain +// specified by consumerChainID to the validator on the provider chain +// specified by providerKey. +// If the attempt succeeds, the consumer chain will start consumerKey as +// consensus key from the earliest block at which it receives the update +// via IBC. +// The attempt can fail if any of the arguments are invalid, if either chain +// or the IBC connection is faulty. +// The attempt can additionally fail if the key consumerKey was already used +// as for a mapping with the KeyMap feature too recently in the past. This is +// to prevent attacks. In particular, once a key is used in a KeyMap association +// that key is no longer useable for another association until the first +// association is cancelled, and an acknowledgement of the cancellation is +// received from the consumer chain and processed on the provider chain. +SetConsumerKey(providerKey, consumerChainID, consumerKey) { + // TODO: signatures, types +} +``` + +**Reads** + + +```go +// Returns the last consumerKey associated to the provider key and +// the consumer chain by a call to SetConsumerKey. +// TODO: more detail needed? +GetConsumerKey(providerKey, consumerChainID) { +} +``` + +```go +// Returns the last providerKey associated to consumerKey and the consumer +// chain by a call to SetConsumerKey. +// TODO: more detail needed? +GetProviderKey(consumerKey, consumerChainID) { +} +``` + +### External Properties - Interchain Security + +KeyMap has some properties relevant to the external user + +1. Validator Set Replication\ +When the Interchain Security property [Validator Set Replication](https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/system_model_and_properties.md#system-properties) holds for an implementation without KeyMap, then the property holds when KeyMap is used. +2. Slashable Consumer Misbehavior\ +When the Interchain Security property [Slashable Consumer Misbehavior](https://github.com/cosmos/ibc/blob/main/spec/app/ics-028-cross-chain-validation/system_model_and_properties.md#system-properties) holds for an implementation without KeyMap, then the property holds when KeyMap is used. + +In fact, all Interchain Security properties still hold when KeyMap is used, the above are just the most relevant. + +### External Properties - timeliness + +When a call to `SetConsumerKey` succeeds for a given `(providerKey, consumerChainID)` tuple at block height `hp0`, and is not followed by a subsquent call for the same tuple before or during a height `hp1` (`hp0 <= hp1`), and at `hp1` a validator set update packet is committed at the provider chain, then at the next earliest height `hc2` on the consumer chain that the packet is received, the `consumerKey` is passed as consensus key to tendermint. Thus tendermint will expect a signature from `consumerKey` from height `hc2 + 1`. + +TODO: check, test, correct, guarantee and formalize this. + +### Internal properties + +The KeyMap implementation satisfies a number of internal properties, which are used to guarantee the external properties. These are only relevant to system internals. They are, briefly: + +1. Validator Set Replication\ +'All consumer validator sets are some earlier provider validator set' +2. Queries\ +'It is always possible to query the provider key for a given consumer key, when the consumer can still make slash requests' +3. Pruning\ +'When the pruning method is used correctly, the internal state of the data structure does not grow unboundedly' + +Details can be found in x/ccv/provider/keeper/keymap_core_test.go. TODO: link? + +![Dummy](./diagrams/dummy.png) + +footer placeholder \ No newline at end of file diff --git a/docs/keymap/diagrams/dummy.excalidraw b/docs/keymap/diagrams/dummy.excalidraw new file mode 100644 index 0000000000..31a8e76861 --- /dev/null +++ b/docs/keymap/diagrams/dummy.excalidraw @@ -0,0 +1,284 @@ +{ + "type": "excalidraw", + "version": 2, + "source": "https://excalidraw.com", + "elements": [ + { + "id": "jnw1YontONnXqPoVC-D_5", + "type": "rectangle", + "x": -154.92880673737443, + "y": 3528.5756797513577, + "width": 921.0283500395554, + "height": 472.3939033586794, + "angle": 0, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "groupIds": [], + "strokeSharpness": "sharp", + "seed": 1440774261, + "version": 74, + "versionNonce": 1312719803, + "isDeleted": false, + "boundElements": null, + "updated": 1665008710945, + "link": null, + "locked": false + }, + { + "id": "-lLVqybaoV_sFz4FYz-Q1", + "type": "freedraw", + "x": 127.38944319887776, + "y": 3784.339242812418, + "width": 486.37005434562263, + "height": 216.6303402976191, + "angle": 0, + "strokeColor": "#000000", + "backgroundColor": "transparent", + "fillStyle": "solid", + "strokeWidth": 2, + "strokeStyle": "solid", + "roughness": 2, + "opacity": 100, + "groupIds": [], + "strokeSharpness": "round", + "seed": 497609077, + "version": 54, + "versionNonce": 1332451483, + "isDeleted": false, + "boundElements": null, + "updated": 1665008713037, + "link": null, + "locked": false, + "points": [ + [ + 0, + 0 + ], + [ + -5.590460394777324, + -1.3976150986945868 + ], + [ + -9.783305690860288, + -4.192845296082851 + ], + [ + -12.578535888248894, + -11.18092078955442 + ], + [ + -12.578535888248894, + -16.771381184331858 + ], + [ + -8.385690592165929, + -26.554686875192147 + ], + [ + -1.3976150986943594, + -39.133222763441154 + ], + [ + 8.385690592165815, + -55.90460394777256 + ], + [ + 19.566611381720463, + -79.66406062557598 + ], + [ + 33.5427623686636, + -106.21874750076813 + ], + [ + 51.711758651689706, + -129.97820417857156 + ], + [ + 82.4592908229647, + -149.54481556029214 + ], + [ + 121.59251358640563, + -155.13527595506912 + ], + [ + 156.5328910537636, + -149.54481556029214 + ], + [ + 190.0756534224272, + -129.97820417857156 + ], + [ + 215.23272519892498, + -99.23067200729656 + ], + [ + 225.01603088978504, + -58.699834145161276 + ], + [ + 226.4136459884794, + -20.96422648041471 + ], + [ + 232.0041063832566, + 15.373766085637271 + ], + [ + 240.38979697542254, + 43.326068059524005 + ], + [ + 255.76356306106004, + 61.495064342549995 + ], + [ + 273.93255934408626, + 60.09744924385541 + ], + [ + 297.6920160218897, + 47.518913355606855 + ], + [ + 331.2347783905532, + 25.15707177649756 + ], + [ + 367.5727709566056, + -6.98807549347157 + ], + [ + 405.3083786213522, + -58.699834145161276 + ], + [ + 419.2845296082953, + -97.83305690860243 + ], + [ + 415.09168431221224, + -125.78535888248871 + ], + [ + 394.1274578317975, + -142.55674006682057 + ], + [ + 346.6085444761907, + -153.737660856375 + ], + [ + 294.89678582450097, + -152.34004575768085 + ], + [ + 238.99218187672818, + -129.97820417857156 + ], + [ + 183.08757792895562, + -93.64021161251912 + ], + [ + 122.99012868509988, + -53.10937375038429 + ], + [ + 76.86883042818738, + -27.95230197388628 + ], + [ + 16.771381184331744, + -22.361841579109296 + ], + [ + -29.34991707258075, + -40.530837862135286 + ], + [ + -51.71175865168982, + -61.495064342549995 + ], + [ + -64.2902945399386, + -85.25452102035342 + ], + [ + -67.08552473732732, + -113.2068229942397 + ], + [ + -53.109373750384066, + -141.15912496812598 + ], + [ + -15.373766085637499, + -150.94243065898627 + ], + [ + 29.349917072580638, + -143.9543551655147 + ], + [ + 85.25452102035331, + -117.39966829032255 + ], + [ + 134.1710494746544, + -76.86883042818772 + ], + [ + 155.13527595506923, + -48.91652845430099 + ], + [ + 163.52096654723516, + -18.16899628302599 + ], + [ + 160.72573634984644, + -4.192845296082851 + ], + [ + 149.5448155602919, + 2.795230197388719 + ], + [ + 135.56866457334877, + 6.98807549347157 + ], + [ + 125.7853588824886, + 8.385690592165702 + ], + [ + 121.59251358640563, + 8.385690592165702 + ], + [ + 121.59251358640563, + 8.385690592165702 + ] + ], + "pressures": [], + "simulatePressure": true, + "lastCommittedPoint": [ + 121.59251358640563, + 8.385690592165702 + ] + } + ], + "appState": { + "gridSize": null, + "viewBackgroundColor": "#ffffff" + }, + "files": {} +} \ No newline at end of file diff --git a/docs/keymap/diagrams/dummy.png b/docs/keymap/diagrams/dummy.png new file mode 100644 index 0000000000000000000000000000000000000000..a5efed1abd727a22556a3bcb6db15e2d638821cc GIT binary patch literal 103552 zcma%EcRbf!`!7*8DHK^54Xd&@iH4A5uTaR|dn7xdEqj!`v$xEI$e!86H!CyqcYVbD zJkRgxsqa7ctGm07^Eu}l@9TZNuXFp|lDSHNOM!dfzyX5m*RI??aNvmQfdhxSu@A%V zSRLAEK5&5c!1XI)ckDI44;%?UKE5R~@6&zj4!(i!D_@L9morJ6$Vo{#@1A*#MOv)m zUrSErWJj7;2_5De{svmUGh<t z+Wkj8;eYPmhaD7HJn zXL(QT{q+}2+VzaVEbFRJ_u}HAV^vY_5$4k*3jXNI&}Aiy6taEMzs&_;VA#Y3Iim9@KMYgRLPCqRJQ_ zYKqz?Ctuj?)E=e1C5xGS_#2m3S%>&YFXoIwT|2jEb8%Gf;W4W|S(k4y;ge#%nzF7> zbHx73j50ssR7WQ{e&o*Bh`mjk7BkkRq~$UmLJ!~Sj&+te;fn7;OUpd_j_lgW z)uMN<_(7x3J%@99%rsUCqn*!}4q7%%nvg5SX;;~@5J?mm{p&@XxGA!Z%T zz72hDg>~HdGvs5sue$A0hw%#~Mh6Y9UVm6od9LN%^%v?r>NSr#)l3WHE(Dl&jDNXG zL!eP?tHC>t8cH1TdNfJ)skUw;!J=$ud&9w$JU>9pY%`8#vNv!v*;PSHX?9DQIZ)-q z#7MeFLMPk50`W`Mr=BHvr_D8qH=c;9tc2aa+3C{3$u<@9{Y=jR4F60~X)#4{W%j!& zo>~Qj5x*j;!90d^7&k$b+qT2RXrgk9<79-L_aP#|%*1D3-d^>gvg^NSWp$X_*nN#A zpwO%EI&YZqF^?ezL7C1b=HqN&s~O(WZCDOa)+o&KluB41wOusFX^N24!X z@E9wt%9{XB$Z3Vea4aby5?9S6S7BlTpWzk_)&v=CFs|!p5rb&@`da_xk$DRw;I>5yp~<6;k#T&I}VWE$4;DbR`?J#lu^xgHdla z{mED>pPb~lG?$Jud9JM+Q%>Egy2C6|9cv-VO^ zXCgW?Y~GBgncQMJ(p)+fZ=N*EpGk^pPgKosFEh%{NbY0FZoFi(Gj_j!R7F36l&xgG zl20X#L+MSn9r=E4J~cK9 z$e9}%+&fUu9rFlWK5lyn7BpxHgz;RFlGIbw$@#_^~cRVUZTen&l#BXyubg3 z+eB6G9hjDGT_8c9S-FcG{joDL_*4S6js4;7NFuu>f;D2}KWQWRql7w2ZC>sCl}IJn zPg48#L@rI2b$V2WipJ6Z$ms6g48&D2$=b^JEfZvcO>=cpGftWtsTsP@%Tm0b=MtPt zITAuJWnAXHwBE(+{*%spB1GTYpRLFsur*CJ7gs((rm3U>wqy}H)WkDblKAYLe7t9# zNuQqSXVKkz&%A(rM@YUXzQSJfpedi&V#&(U=I6eB)pH8v$r&l_+;J;PNxj#p4z5-PUJC4<_dki zSM1;w7)NavKTx^KG3Sd-oLG**74fAvUOH^PmeTB=X^D=QQ8<(RXaEnJAp=vzMV!2S zG(q-KG+f3^Gse=SYOl<1r%%WWvL(6^d=Ew<4VkU`w7X(bOHw?op_r*azBx;S?)WL)OXC}x6!Kir zDHN9!h{8V=YJJYD=NdD_t^AavVAB^Dtm&w|V`1sM3qtP397~*f(z!Cz=L6{FzVKH# z){N4X-cxOub^dj={vTEUbvnc!*@?w_f0ANO;zTY6vhtkDFnp?SY}0U~BUCA(*VX*4 zrj6mE>Km;(Wz&JpjO|{fkCp>H%BAtB!OuZ;&s~~=daY6lFU;n**4^2nXz8d*==HQ% zq>!JglZuFV?Rldfr42?fg`dw-ruOi6>l*QMn~70UVG z`EKC*-A?vH?bEA6Ic+j{TN|rUZ>!B*7sKW^7n}9jG_s%qD$+U$2OuF#LO3M8P zUby3Ql6J+zgH|tRTH~Y;L_IN}z+$ikxKw;`qRF4aBA~Oe*?%|Y{Lg#7BRcd_UIqtj zeCpJzBHI}XYL}%ijZTn+`E_fbWd8WXOwRSALrKf)ey|2NB;h}Pktl5tVs!bY1C`=L zLN5!g;fq!W-9&PXd)_BLlNJ8Sz-iRUg5D}9Gq;Vt3&!1nEQ$wzTlurCp>WW(!ctQlPh0eG3BzZY{vyuf#gSL2S z1V8+CKl=_MO6?x{sB>^@)^0ej&$MCJ?EKlwPH^|!dl4%M)kALPS4Y(S{m}eB*BU#@ z`*Z9?)p)R$@Krx?LOuq33eK}uQxK1ug;v$Ob^KW$Q#b(7uJoofeS?N;MaM_y%0>wf z!yo^+yQyQcf=6M8#j%n)YehYJVQ6h>>fMVkP-OS3>;_%zy4`lR z{3#0O$~Gpjwa~Sq(I*_0666|p^m2FlAnl<{@(0KWh1Z5+C1*<(Kl4?hW!*Bo!=BH( zbhfH!wu!gb>O@|v%Wh6AP{t|~h~!TF8B9pT+GTEKX3;X0b!prE8d z8YnPZ(AaeHFMcbCQAgwET(CZ!GI@3(L1gP366ivtz}sd9%iVZqa@rnK_@QL@#$Ted z78h0;1h)r>uW$sX!|8m$BKNfa(48!$ZU->w9|-gZKlA07*R*QlvU=&uu~JLSjk=u` z&82yE1#%DjH2vWWx6M(S>=nIJ`wosEjei3Hdnv#Y2U5lM&t^Bmt28dw6TM)JvLahc z#FH1(i>i}6*$c;>qxm_IE?sy>eCaRkeopK4GhWQ;1+j_Gj>+(92ik-PaQ)?y?&F?v z`kqA>S*q$%_;Vtx3OE7PeM7uhcj{O=z?oRdK>q2(3+6)=Xl}~m2od2CD(hRjUbe9` zbtiJaM@z&z^w7LZ!;X~e{pwdIAE(*EGiatASc2Sf>D#ki zABYrk^46~MCgo~PQ_G)k5hp?`2Cjd5TUFtH2&@+<7w_4D;$4Ws<((WP_~V)!JtQQl z6Bl)^pqVL#t9*P0Z2MSuz3Ji|D#-J4^nE7z8uptDb!Z|l;gxP(qyYQ88%KdAp{Wg| zQfvP>5oSgm>9W#;&Lz8StPsG74xD0?45Fs8?YF4g&CI~hy|(g9rZg+DI3eQdjOIt# zlT1Ns3EMs7&k+L6N0V*8cVE02&WjZKf5R9Ra#yKH)62Kl`ECdT+M$cQ8ETWH+)}i? zj`C5`bcjm~vguSg175lFu{X@(Po{@W{E-oChaX74%&GV=y>x(*HUl z%k9K9E8nt3_oHyy?a#HRN&)KOHtF@u|LT~M7zEf!s01h!6(^3s-$#ahM<~P$6#Vhg zyo!LQQEqh3ZO6$MqKd|OvYhn7iBRp{oCgvv2yRF4I~v#>fEcRU@0fU9AQeX!#J!?k z!xXQnwghsn)>M`3`T8?a_t43F5N9d_c)s#p^@DQ93(B5fwj5cy4m&JPe1^b6=j9G1 zG;_lsly}7JLA&=w(X6@kJ-hkwx-zi8m5@t=b9qMh599Z@_EZW`ip_L;Qj9{1;%+IT z0rpo35mD$WeYSq{=e-j9e+F~nkMu1GvQg%cBAet!N>{()8Kq`P?gp*Vo)jn@s!jzo zhH^2p$60ZiU>sJHJZCE#rg^k zBc3pj{PET}yPL{;Kb}EnjvUhgek+f^#fg*(gzZM+6Q6hU-kttBlldy5s_B#r&G-Kp zwQ0*QBIJJ>&OK59ND)F#ww=$tEJ#E6*MNm50CMGg&qq(3W0wT^;pp*oBppbcAo{7}TLM0x%r!;pDgKSXkUt-^j3Ob)tB&Mayov~R9y{QWn zX%4R+W2R-4Z#zPDBVI3zMDt9@>XTDnl>eA@vG0+bCC%Y}(8}PvwcHgl&FFESB&T-D zou~V*$Mb+ER07<_#WQ(@p`3629JN$1Z(ybEv$wA;r{2JJM(cuzA_VN%dOjw~y{(iH zn0c9U-QzF4A&F?n_}cSBA0))Xc+9AUx7S8l##=9f^+ZAT&V+8zOXGWM^TXQt1GWtv z?>_uKK2Fm3RXUr@dW4{dq5-?T`pS3~eZp8qrw9eE!1`A1Hy+48EOD zNhOJwjf#v@=Z`-vhJtks!>R4z(6ysnR_f7M3pS*o8^BfRG*=oNr+(j~^C-QfXSzo5 zYvVCU$V8=%%b(C&#iS!y;SIly;qGUJe_01>_5qL)Y<#aJT5R~KI3YT_F>et&;$?4X z&=CiBHw+Q}<83x7`Df8K-I>}k`puDyl!ef#seE>oIs~1&yVKGO_1=XCt?%|O4eFuU zOh|jd#$(`Vgmu(T#_3E1O#AoqTV6zz%zd{khBDzSMQ@gF-35!0>b>d zPQ?w6y^#Hk-ce~lC#Py~XM1&T73kYdAORP4HR8`sm=NXah|aEVi=lv_JXIS+6ZPx* zjYy1-{Y5i0OA+qiu{VN^wfhS!R6fP&B3_5e zq;}g}V5H<*Y~-lIB3FNk-jNrLzh0B3m)h3?sCmQp@Jkh1--OuGz*fw%gzCz;N1qVs zXt>rEwT+Gs4pkf?!ngbCp#P->N>Xkl3hb>X#a74=Zc%#@HaYBh3@P!Sp}EO@e(PLt zVA8b~$L*h!UG`L+P+9VCLN$cuMGiPjMpWqr3BBI+9OS^OGjrQ5k=zN$^CB9jUvcor zDPpAg7#jm|P3?rRv;Cd}Q6vxqwjBvpMGA0pR9gOoVe8$$gwCT7I&J5^3JI1FHF&z%yl zyeJH8`SV`LRNU=#zCkk5u-*hVW9}5{6ISxImfn1`L304Iulnuw*tk2UT6urg3y7M! zgQ2@0p*}HNFdC?y%S@6(ZQHG_4N(O?3V>dv?@@YmvfTIlL05!qlMpB3{}z40etu}r z{dDQ!zc|%P!m2(O^cvr&OHqrwuzy?MdpPhZxl`}ahnW^VIKgA8j!qmv%FBMMW+4gjAmR0I^OGVwe0%Nz z@z=+P#N9YOdm9t5n?(Y%*868an+`^%sfZCmf&m^C&PJ_4HI-M6&x8HmQ03!YCj>XL%c+&r z`y77qH8eHU1O66bERi7WahL$t=al8O!nntI8P{6Yp~)cV45^BX`v(y0-q4d_Q+oTC z3?HrTep~(s+|W#nQikUE>+`*35YFE>uu%f)2+NrV+0n5PmDKtlP_=B!rBX}l*=vaKN{E*P++xjnPAvx-h<1C-$a@tmv6CZkek`f_Vc0h?WO}oURS9d-WharGz%VsNa1fOMQRcyU=Shf;;3wr z+tC`kV2iH`HBXPV1|vn2&C5S~MdmeJ$a0|U`8gjpxXBcJbvD`ts4VR!uCAM5zH~+< zG_v{9uWy7qhM~^>87|#@cDx%9U?p^c`~$b3jf_uek|W0!)GvktnFW<>bQ3#*-{yS&oi8he9-RoI9|L^ngDh}Dt>EhM3eyzXI{kiHSow?x<4018o1w~mKp|m7HjYL zSp5reL3hsP=K=-+2_2@H9}YhtTCf#-yS%+IC)-6p$<4C4l3(7`mXP)5`uMC`^qrP< zpnu6UsB^!^?od7zFrWhZ_FI3URnZ)95%0_;kvIY7y#k@)MVyH4@4asyzCBgB3lITs z;dX*TFcc68=06M~8SQ>CtKh{6LdJPLdS1~sAD_}q6+s(_{*?WYyTdx?>;Ed0_E+3J zmHT*LJ%a$KGXV+(Yots+gzE;`0aYIKNOSZ`s60SwS<0nhx7X>KL@#!20dza5sOd1i zXWTA8nNoI zDhsU)1q42qAanyHs(@Pb&jeT!td?xB0jCNm`}hi=+Z=<{*#W#={O~#*QQ}rXlnO^9 zFN-EcF9VT@)4-B_6Un2^R5^rTkV^oyS$m=9nh6f!gVd24b_+G+K5&adXnsCu2xwlb zTh{O07Pyc>H}LYmLQ%8G&L(nEohtfhWmKq)J)hBEB_r4XbxUQl>q_?CvZZi_m2F_$I?&qzi0(&#r>%GZ?)Suvz)uGyXb67dg?i{I3**d}B{@%;&zAilaKy zs(>@!K=uj>8{Z#lrT;L`BEg=I9=Y^UoC=!%q{GRts5UQF1b$~F~Irb$~%?1cc zZ?j1Vx4iKUTBEaiq`&1={#jp?4+Ck!pWrgQ!~f1=w00N*Cu_o$dJ{0yLq`blkP87l*2>;6k(Xns%y zLWPsFW>H5)4srijAm79rf5c@@C5A|pZF%EmKet_rBNU`w zu3PJIHL`$lFSj_3J-Zr@3WUC>InbGtXe^B!E)e2XZrh7d`-_4Y-IL=pFFh-DMpG{! zZ*xE=@!(xx)_iu^xPJj6r`)H-P>>}W<}=*3Qo>0X^P8x0G|fL?4HLPoiK}j|mhZUG z83=ug_Gq;!-x!#y1dio(u?p6oBkQfeioGiC1)+*fhr%xE-m?avG5gJdf{LqYhO^21 z%#ZSff=UGP{UDfZVVJ^y1k?Xc3FFvzh*Z_`j8B3HPjP=u!No@NFB0UiCV1B6mR{(q zAU6i}B|0{niSRw;+sheKPjfp}nM**pGyd{yp$WjJJ?)mPKL}!LAQHmvZ+6-2(I{wc zcJ%(OgqtC^_x1`^+L$@?ClYFo;GZA{_(yIFol)4Wkn`F+nl=mS22*>mnT9=^MJyHR zxaF*Y;zNb^tqt@|Wx8ypS{a|5;=6>76;RoB(+L89J@r&;XT56}mN~cQWP3eS4b{zN zw{5H8C!I6-gTioIN)fBPc3pq7FEWe{k4^&aZrTzE+qozBR;2;!x~wnZ@C;bX7X^dv zTo{44#tS+Q{Sl(0sN2$P{KwqG>)H86ZdI9ZT-%5>HA zLA#(Cjg3Y-wx}lc*h8$^n90{V0ZG7-1>q{`c5C-G8vt;)0OZb2+M7uI(eB;*F?Rds z0GwO-7q&q<7+GvO8=ZSKhPOhL+L-j^5y;2itVi#Yi;#Q;t%8^Gv156FO`&5z#`WVP zeUK+WQJz%ru_;2NwvU&I2t_BBK---%()mZd^+#X}sdeH*?CPm-zk148K z)L3hPh_VW#Na5%$n+rw%Fm!U*_k2Cf`2BTz06J|WKvxXH89)Ymnr;M15b5Vy@ud8d zmVf_?Pe;4K0IL8dMG*1<&s+s?LUk|hLBo4%9wwXK)o4Mnc)6wM`}>A?6liEkkiILL z#_B_5t6Tu8hrMKNeRiOjB$MS-Z%06LQf(4v46Wg9*Ut?h!Ew-Tzx7leFZpH*O< zN_*SwgTNerGvqm06z7B6h)5FA=eF8_>d}4;=x%ftPo&PFwradM9BP!lA51}#Lx`v| zR5zi0brv%%fP(9M?=a|g(OfUmyb@>lF?0Iay$dr+FVsWS`>M`+0t33pnsqq-IFx=< zC@JukS^Wgee?dXLxW>?L9tMt5nrE&nL+iwayP_sOT?HCY_6BfuV$(R!ixqjCD9SVL zxx|v&ej8{n*K1vF!2f&Mko`GSx4#10-OZuF>Xz4M2PO$sl|XiYU=3!IF&MhQPf0H_ zG-!Qy{SBX$UeWeG6rc6}8!9xxLhAf^!2WSzQQ%p_kMP(MXXYvL8>n)&N65>uLE`P! zO0_Q>=QQc%bcUomwa#9)cABMp^G6}2;veI~L5y5dxWGuO&wRu1Q_}X<<~I8(=)69a zFh!{It!U&iv-dPg1o-O~(5Ikt(g0l$gklcBzr)r(pl_T|H6^NW9nRj1R8g7kj!4@w zbeYbl{*6CM$K)P#Uc3MrAZt5vEoTc}P%Bu?0X>6wmACy?$34F`EcX0<%b4|CHGtll z%KCpR*>PZR2qFOia}PXe;*0{ z!H0&-nV`e&<_^zEHDLxjSsFS)m6UuSpZGz z+HckFS{EfGvi54AQ;$Q96y?>Krsf0f8v&Q^FhjzE#;HO|*exokZ0^8%xT?B|MNo2* zb6s>=`ChpN%!(I`p}Y%{jub4+{QMV*UlB>Sb(?h6;Py(swH?n_dtF|T>cAx_wNNDB zXo7yIm1*SV2MY~#e(P_<$o$??7EBiAqOmU6`$7n(km;owdSB({fo>P^WgtxUkY zQV8#CE>><$!%)WNB3f78{{wn4IFRG*cCQFZwb9Jh8_aRioZy?)HWoe5cB_KHwAQ*v z2n>h{W6V_f&$%1BT`}5f^0W5l+QQTXRPJuwKz0}kl+@0^JS>M^BQ`WyELBS+{K@Fj z{TqP3p^9`}pIXWSj^jOg_azK>uN@?`^09P7T?_>g^n@auaE_mXq6*{9DB)6bHvou4 zFn=XYvw_muQAOi;!C!ZN1ebdv^vLd&LA9f~){)`*iaV>8!pO6LjZLG-8V6Vm85&AZ zU}{Hr{n=M-7&Ggc!aO0KTpSqKSHVCic+?5T&q5GmnNG7?cq0hbEjc_~2 zkYV3wF4doJ&l1lVh_7L2A>KdTIoR~ z{6^SzXKR%lJ-5)0a^W_HcF3;ny3FKuT}DVzCWOR8+yYLW5b1@lcSB(-kLJAr;pqh$ z(#fwb>CrYYqNMFXzQ%=-9HGI7;#G;?8K@R~qy`<{hEcbZY(?Kk`UoVb?Mgt@Xmxo9f;P>5jq(58(*d6F#c*D z0yV!;p|v_p;6ouo0X0W?PB%0@C}-gxm0Kdvuct#=oz3Q@Lyzz*(ZNccszzQw5S$q( z(Zoyxf5id6y%EL_t>Y>fDVHmt0pZWE;rnV-b}cIB-{&@e!=ge~->(mfis1&o0?{-K z@J%KRBrYD6LmxcwRyS7XXn?BAS;_)p((B-%!|ktvxBko-(!yxmB}5*cRJ|wGwrUs) z<+n%T0Q3o^!g!Zj#H#>ssTa5*j3;7r|8TNsR3oX7y@nx3U*kcX4(g;62t}Hc1mi)T zpj3%5-+{^;7r8p!Kt3jQm~zP#((L=oTxfm;RC;8H7_!p=d{rFo%d)G1WqwtB#V0cTfvCz6*lMP*y?kltQ}W zzwzCF;ySz~V1k9Ea;|sOCFUgAZ$KBGwVwux!)`!S zZFT!%J};Dt1?Rsdr+Z}fplhoH{3Lf`2b5#+HY4jN;%n~%0p^wQGYj5*fHM7>)R|Ry zWaAC;asmRkxkPL;nEoF^y>%61SNi0(<5H9lApx0C)`l+X1Nns&1bh_ibd_&&eFx3r zlrCtsO%UFZ%A$wh-hiJT*Jg7S|=9$!Ru=j;$6vFNUZU%?6oxP z{r}DAL`rwYVD(GL%Q{n^5y6-zqM}>$8D5s!eagllf0gI;)+@h?a|-^w%d*=GVmDA zI@_(V)e5A^z3zHq9eE!I`5G2Z3@MhZD{r6^>?jC{SO~NvmB7v;fdM9f-{Pb92pySSbr|L0cRy(DQvR(K-gCL)X` z?XYnkDSSxCP{tRDnXypJp+(+uOgkUG`Vz+VhpL9_>)yPtw)!Ir##Be^kFVCdlpB$S z1Q#FLlNn8%ZhP5D1wFMBHz;jiI|s&O#UV|KW&Nq`qQ? zmU)IG>58^Q$xWz$g%d@!8NAH5@O4=c)4nC(z9tq8v{&rBd-R%EjJLDUd@q&7O4{=w zu8>*b!|&tSKhHj3`1Oq~nV8e}TZ$mL`vA%lc=aS7N}IvTt%cD#AE;Lnf=mjAy^%`G zP-O1q0qF)YJ@rcY<{%~EAM+)2iT>5OMWyF^vXcKxO(cENYzwRI2 z236B3?p}kpOId4rA7Gf}|DDM=PBl*=Jl3TB!O+2YSq}y3DDp>LCyI({jY=IszhGI;pT=h|M|Yb?L2dl?TaN)Bqapd?8#qW9hLjHO542<&L)}0@mtL0g6Z{S zyk>vKvMDN$E)PQf#QEBMXwATJr9;yLEdPq>hQatJDJ2N-XPF@YWV6&-+aaVbWSJ3d0ST6D)_+oj2w3WqJ+!|Pb7$L=4Ux( z@{6n{*j63$Ek}*cy&ZELbr8AGF^_4zCl_Zia1Y_o_hym~J)V1%qv4GPG@Zb@_oN;1 zLe+i1)|~4PBFKBt#^jQ^r&xB_;EMcLEIfUtrKPe18sB7(+8C}$SC)A(F8S*}s7VUZ zC8jJt(?y;dq+e4zIpldv=Il+@Q)X)5|4)@@aTH&idl-=xs$~6zl4eJe7dTw;@Z8Gf zTJqA#!J~VRe$mY@cWL>6YBb%T3*z*^gM^4mXoR)aj?6%F7*IE8C2@?>auwH=8(y=v z$$GP6U)lKAOkB%T%;!ewwH!(*vNmd3f%G_cbRUL#Cck>qn_qTHtI*{Rfrjo{sOQn~ z&JeA>GhJ2z`b9|ja!bu!f>l}CHD4lXI-8{tbx6E^uCljRaUi{YZhaaBPn78&WyhDY z%u7K9u;Vw!>;A$@%SuCtuyJ9oBv}eVIZ&P(7oZ`WlVR@!5uLR^NZwFkt9l8+{1D|m z)>BeBeqDF8CU$kS&PcW2l#0%E`TCRh7FUlX%HthaWM~yvIDwc`ue+da@TllX0t0 zCMNHgEC6&NkGK%3fTV$iptj_wX#f0tn1Q{_j>IyAZ*MC@6kzkviRUni+J6fN@0sXk z7@dQB;`-{k^P74~tLqgnon_N`RHB@^)V9cc%?!1GOz?WhwSBYQhp~)~N^9|Y=5*8N zPeRP~IaT#JVGC7^c!qw>7MF78A5DL7#UWOn$5;>-UKo70Mi=GHPinp?daI-S%X6Y8 z)=wO)3>m5SUVDu>xrAB7#77;7K|R{Z7dx`+iFdV+uI-V#j|mgN6XB({`}WYGY^&Yp zVvo=6K-r9&lKTF}dN;f4g8w|C5@c!i8{I>}hIGV()qeV>jQUnJ8w&)HUN(O25|*}C zen@Iru2fe#HU=_BRUplHY$fug_KNzR2Em|5nu z{oK==Kf}Uf6l{_Q*Ly4mY)B71x%a7KO(^o9+A;~AO_!fg#}W5+^A*Ez5&;|qtd!+p z1-X9l%~j4wh1Zz07LVT+sp{N4I@l--#mJ_Bg3`AY@#sKIie8nJg}JyjJ9$e2S^fCV zFL>i_#Yq%JOf+v&8wMp7`EPffiVA*O_iK95eHsJlgyF7lb~diT?-v}HXs3H;Ju@7BvVbn{_7 zPnvsvfLlhu7`NGb=Z2pM$7H0FX@u4yTbV@!X39cw*X=4d)*5=>57+T;>ddPu#Tae! zoQoVjZ?3_sDITaZOm;0EGjK<;y{>_af#W>Ghve)#lq;x^il|BU|OFG4(4{HxjwSOou0EXbKaTprSn12_!{-aRZml{9#ICVy2sKo`UN4jJ^)i zk)+!`7{LNcZyack+x#m{PAfu68l~J=XgV%6fXmP}Uh|MX?Q$sY>&(0HB^BPqlOX8ykLKF`unA4J z_<))|P%)nE-vJ43C5#A!33mb%mSFG4sEpqwWXPLH~srxM;pz3`~pg7i9X&4X~m z&mk$xs}9LuRXT=p)$&bAe;&#jKIO7C#fhUl+AOQVQrS^-%d}v__ux&zF#f~ai;F*4 zF}l1rD84vU9rva2l)9Xj4L#O?$VLXJ)nciGHklI|Q^v?(Iq*ky4w6ix-(z$%s7uir* zgK}xwCvorfO7}avWDH@q2>dGb-Y$oi#3z1hXeQUm?_y3V+M`SoZ1E+9rHQxUc+|uJM~VTvW1_ zwT73ny;Zm-COOIt+GXNTq^F<)>vlM*%ioUVRK{m%a%$=ZKHo(s3>XV8AEwK9P!cV> z59C3EB(C{R2{J%KKDz@#LIR}z`jXA-v|`+R{#U(miwIMP_xNCrznNis$5MPQ-*@}h znm?|}k}%ZpEIg;>5>~9|k%$%V;J7>zCggY3vN*;k=L@?x?SiqAnQ!NWf^xA5d{{uU zhO_(y;UF>$6|4~>ZXAutt|PJ|sHJeL&q?3P+f;(Ocrgf6XnzW@dZ*y6H+HNET+8fz zswGX3uzC-7b8WcUer07-^La(bVBMmc>aS-dZ;4=z3{oKcsT_xehBN!6x#vdHLx?=E z+P{XW0Xj9}DT-;W8;F8q+6+dMtUNOuh$`&&V8#lD%}K(Jb_J67TAmr5hZKYTi4^gj z+XmCN4JRsU=egJx2%^Va#c_4s+To+ZXF|7f<&{Ll7bY2GEXbo$ZnzfEZakwbynxf# zoUJ}u&ase7*-RGNWqb(lMUpNZK)P*Lzv0`PKW}b-{iw##u*NHYZFAs-XNrp9331vC z(M`bq6%G<0D zo$yV6tSCl&@=YSy=5efTcs;7+$hvFat(gnA4<$T)Q%jGT^WaKs;O&na+ynAUuFr-Z ziygpN#`%`bNUW57ewg^(A;M|}?$rxwi;~eQu_>5c(8bUEz-o5vfE(?BgUBu-v60T9 z9=x%UX2?o@83jCv0rDYLdEP&h?jasaDbnsc)}$m8birm(9r*Uha2P_aX-{W8@^dnZ2T_l%?R@Y!5(Y|T?;R}=qn*5T@0w1+1LQW4Z*q6XG5AuF zoy}q6&Xw3F;V7Sf^5i?J*YlxQ7Zfh1)ZY2>Dos8A2Hp_75_&UDIP!Fv$5MqUhwodu zr-TOw!Wass>+*+^y^L_ujhU|Le>dRT!F*JdLgLrjTfjk#RhB*O7C00`dC)|&edGFI zS8JkB;LtQT^(lhgn;`xu;0~krjY!K?!F1yCt^%=9ioNtWfBawO!il1|?WP#|{&TlDWJV@XnI8A$eibFj9Rs zM3TvvnfKmzhM$P9tPt}NXTXgul+}Xj>69z#rAIDc=U}+ga%E1qWTVp0GnzjoRNi@C zA496Aj#`n^Nx9O+IbAp{&<6#!JyGuLQtagJncX76fTxmtd!Uh-;>wWQ_9RvFTk-{~UX@WLba-f;t}u@M;B7we5b_f4stE}Ftg6?>kF@#s4) zM#J2nZEU{w9pw&*tEwwYm5y^~JMze?UBb%9QCmshuy1p0#7j9nAZe-0HT$3tW^;Vt zlb#3X)xf-LhUjJ!7xouxqZ>{=qFcDt9ki_Gukj1GG2hchf3$Eh{rS03bYOu+@`cgm zUw6Jqf`~qwK&YtDVZi3u2`c!3#wZaH>zn0Q?XGujyUg|%>OoJ@b*hD8*XTu~YjEDl z`6%Goka#XtUAA{+QjYXgYQn?uxgnwKefA`)H}gQ=9M*#9UDA|G=AW9256Xs0(-M4m zbv8yX=yaFVi-*D@4Cyz9o^`jeh^w2BJxI3_2xr^Z0?2b>xDQQpNa5Sm`E%pVlmEKi zd1`lT<(_B-JtuH>lLrJZzvi(1Yejj68|LWMjy&?VZBONDh!FX#UILTBb2`?nZwT zpjj?^^i4s6>{)A00S9wp(%A)t$`tma)vtWXA{n~L#W-kzEVix7oNRvtAc%2 zG=|$q3$A%T2!sc-sOapLmOCU0S|F&N-zf`cYp7@?+WR}FFjTucPmA5gGt9Vgfg&b)E#OK7VE7BDA4DIh{9qWZ9Wo=!zn4 zagmsFkBp>dk6}m{xF#Bg!;o&6&QrCdAw74<#q~!|EB=W3TT+}gGWBIXUU)-9 z@yhk6y%6k0YYx5ZHwbI;sY@L@4d%SU0rlEluKNn3(2MRw&a|85whS4|q_rnaHe zoUzJ`r-n7PSar+B_&+{{uxUy zeZ9+vR|n;5Sa3KtWroipNIvwcpGcYU(zh?Kr12yY#Zf8EIfo9aVU(frrx&H3Nt zf@3AgcM*5wG4N?R7f#Unun_4k5p zE3G;3m0=*<%1xeX^iu55QwZp+SlR#s*Bl=M)vh+uFPhIqf*mp$@!&$N^qwCkVShVV z{`PH^K@5N3xe=L1A!ftp1KGpnZ(^wUri*!|1YMK!tLaS~VjiHkQTFExTHtrWSZ8@Y z6q>`Gb4AfFO?Z~5NB)CH32}?Q=X+{SdZ7zR?u(bE*A5;Oz!2ufpUO4@vFE!F`Qo4D z_MpHWoPN*^=)t69N>MH7FU&e&-_>0KR9~Q7cbfw`#VrT7wt-kx;A48^JVycC+1{U?NPhLN?C zouhJTc^9C_NPAK0*&~vi=b=~nW@l&LHJQ8mA8YaEex=u97oQ{jC8hUTwOw}3^)_M5 zR=Q~G5l5mk=dvzB?EX~njzcg9_m_2KGMR(>y%AdJemlL^=+#-{PaX!(dU9FcC@+Wc zOa+-6cXt1W30;m(PAc!-Q`#b=yqQk7+gR4nnSTFzIRnL#Ak^*Xk$QWQ1{i1ZoNa=wxwgA_N>B^1d-uCdH-5s1|G_| zfJ)7l86m8bp%Qf4cFu-_^&!Nk5$-Pmq^v+V1FDb-q<5oL41-pUAUAp0{eWUG(!2~1 zRez?SMa*zvAyYH&q+IIu+2?nxltr$$Cig6<`gg&56v?X9E3eMKY478vueeA&FJ7T! zox<&OcACmVl=YzB`lqt!@0jc1HyR%GraZnozM-u8@&P%<&(qP()B|cG?KBCdMrE}m zX1hQ?+6MdE$vbhqv9AkT_(3k3JrQc;EgW#-Vr655K>nwC;JADGAyy9?Aul2Jz&wDO z{@Ay3zrsYtBV?<14Q11}R150qh%-;q4r^KQ&wy3bBx13Z9yzY@wB1)n`o_~mx*-{4 zm$CkL@=2dHxZr=X_nvswdI+^BR-SOb_j`O{0u4R}v4IdqZ$-?io2+b*)#OpNh-VfU zzkU+!MR#PxZvvi!v{w)a!AoN=UAnDsZ3eu|L=y(l&Y!XCGvToUs@p5$+!0HYte$FX zgFWzm>n?`H6~)+DT)`pMF-MNK>M!ngY+UzWV0n2a=3^o8HAztce}#riGWTwo-=EH7 zqV%{Fp4J?JlrQl?IhcwrdIsr+Z;5i18ZH=pJB0cCSj(2^Dzy?lRvGifK%ZYxo`K#R z#-KuFokxx-1YB!-nHSQolR&8WtQ$HAA|>0bk7us7Fg!vIGA!mh1~@u3ePtsk~Lk#7Nym~9FaUM;q)QarmU8e z)hv`Q(@;E6m^laM0FnqsQp#@<)~PUqbhjZ z-35K@2PLSd&RkzoJyr7fMyu2H2a?FnqA-_!p4FCW4!4kg=A&9r`{}A^*yoq=@;mX` z?~cfLoHw})u24O2!Lz%U-^w#`_nSqzG1H4@`4^{T-HN71!0Kd$mS4gP-_n;8mNs6< z3agBdoe}wYeKE*wlipdH>Sud`@GAm|^u0*?5$B*^Dx1FtOvYtk-#&r)>6+-H$A<4@ zBKWzMX9K7N@_uOQR9&7|dqQeu^st0u3oB^;0u zi@t!FlO!7@mY!R8R1J@#n)lZa!D2uZTA`pw%J{Sh(Hv$8Dv=@m82R-rkf@|s-j1CH z8>A_-g-L=ppxw!TDE_eFSI9ajf|+B_fTcJCMy{s?!9D(B8rCWJyKdS zDkTz)&+okN)#vm5{n0-?xL)@)&Ul{Zd7cwsX!0d!a{Hlz!-<)`iO@|(ZWiC@gfNP$ z%Zaz+k9OpzOm_LK0lWKzB37N;86^~Q$dl1zEbpw+)1?bOzP|f1tU@-U{-y<@Xwby+ z3IG)^O9eR4I2)j<@diX?tN(zKhYIxO=u9^QmU;@2^_B_WdW|ME)LhmuD^mqkR>?D& zY-`X2q#@azLPA1AWnz`YTW^|^xJ?I2_#2Z8HVa`mE|Z=~Y?_ePq|`+ZvTQHhV5KjZ zzcpF~pE85XSw9w$e9i2V%4TXc2Mb1e>I_Z?iq*TT5bmrAh-VO{fRaf+ZGN(D!;{TM8fGL7Cmr&YwY9C6%-tFxrWdEY zI{X-9MBiIJg6d=(1(%5NIBDQn>+?#Btha%3td$3oxn3^iexHv>pffSrLUgvPW8w3-mc{CVT-I&$&Tb{U-mv`>xeJn z(s^po+{{3FlF6-v((vz#i`EX;y*CXNt8y_zc1Imp901dnLEckGXlDpepE{Fg@#)j2 zDd3Kko;KIexdXoNOAQn)e}xKFh&CTy^P?zUzmk0ifJWSmZPBgKP5<71eHB;yD4R`0l=LyE!jpSKaFpGvF zCN8vA8@Ub6YljNay-4T+s4JFuNd@FB0S1l*84Ks>0iakrBD!)ilB8Q_{yGzLpa&zVeg22^v=+j0baU<%k?I{M?kVdb4Qx`OhHBbzI?sS5RBI0@h4B12a zaj82F9t6lI?nj8q)6F20<$Vge9c98n+s-kdjn69V&hMo=msE0AI^20Qj9J${E}zRZ zR$q>7-1_X=`cwaVl5iYFeNS)upc-V$ZCfSSue^S! zwzjst(N^>zH+?i#!Rp#N^s3~fOzpz?Ht)+!*Y;z#A2igbgRlmjS^*XYNdAL}mPwuk zkP5P(PtKNlBSn30s%kkfo<~BU$!O{5Rl?e9P?OA6Pj3J0=xlJ;*A`xaCgUZ5LUgYb zpNE}=y|a&b&zn`~_Ir!9SU$*67PaTc$P=xp8hZh-B)_=)?91a$q_&X`IXjLUD4_}Y zP2PG}-a;_Cm!t0D8R@r)26ZR@o@}e+m$V1J+Cn}D_P8q3Re^lPQ23R#x&4lF<+Mz zc}+D5I#rKVBI!{ATSU_a2m+UBVM9<%*k3Y~vYSqO8v{)#E zJ9bFIC(7Y)Haix7_hHxn0a6Z+kC8V^K_EBt2J9DMS$}zGHW~a%a766qkHhmIc|@O$ zV6F@xhp7m$ILErR9J)|)GKmeJlcW7S^`b#fi#j?fif%ARJ2nzLv%8`B$0gPHDfB9j#pZWk-8y-CsD_3uRV~Qj<5V31ewJv#M(Own_`NU_`hn8z^wT`=Z zIYt!{j(s^L6=Fn$|5Syv&S;K;FgFAMb{2z50LE|Hh+qRs(5h5zJFh0d+;meBRPDxl zK0ZEE5B~Y>O9{S4<2q&{Vwk8h8%n%P%jaI4NLwv^_F;n@u9=->y!+i4$p;KTs`|OT zPFoIcz|ghaqN#dzdBQRuPp@lFV0hRf&l z^A`lLT`XKrcT?_1(9M+L$_@B-zDBneUKb~i%wC!nXA2Ap!fUgL1L z3s76(#(?xe7xckCYR9`e^gI^%Ymgz~?~@_19tM98zR5rd=riT)?84_Je>QD8W{KZQ za)idGS~bw!`4rsw7Yw+}ozN_kSCi<;tq|MxOn3PGJlE4GoUqQ2)JB~&(n*J2n#P zh_$qu^fRv=jPpTzk&>4`i1pIX@6-%2h$iQ2NFKkrlXgCIbnxNDsxxz>s<2<5W@aMa z`!*(5V~T=1wB64iH8{!@p|h?+g}=64A4C5->Mw!ZVk^|xuiJDMtVCYwd^)y3m5q6M z9A${EJ|Z4qKR^iWBtwHWgj0Q5BmnC9RCa3V$C&99b(}FK(yj z;yg~`b#bpk;gzG|#OpY_lSi8~9**rE1M( zeMh`5 z9$tClDIJ&1Aw!>a2Z@96#*KTknq>GYo35Mahtp{t+bZ-=1*&!N8@yU zbO*QIPHy!tu*{dgAg!aGK9)~CGsk`gTI4c)L*5ETW*0FJos7GQ8PT18kH@w18iq(? zDFvc)Dbf}(t?>u5%)Q$6vM^_|E_(PIy}5>Uf~SfL;26=}sMFY|#$)^35kq&~uCt05 z{S2~C>5ynhxcoQOwQ_?L8;g(pAlquOt9&)0)G~#1;)Sk0+}7lX3;-gm*}vb=v%mip zbPPuy7TUKK16&86=2YEZ=WVwCdW0%{J;tNuA_OZHxALNDaRrFn$4d*a38zl9EY4v)@4>{}>2(u6zmx!ITcb{jCP;h~?S-5sDz_{r|bW9c>2 z%?`zrw)H(W0n*!{W9cm-rn)F%u~mbkPW9u8{7CxYUH!$cR*`D;vDT9ox;HXf)wuj5 zbC)ali|r2>Z(V9C{BUlEx&-!Rb zJYaZZ9E}(0^zQ_9GrOy=p+={I=qETSS6HymwJgB+*9}6`A3rK160y{sNySu1C@(G~ z=cFSFVgGZ~CospXz2Iw4$DJ`AEV}J+j)xtWD}q*6S9kK=BSp35782m#55Ck5PUCrm z=V9aJ7~s2|_l(zNREDRk?6}jh8%GZtj%~`@`cNnG2-3{p+R)Gt^JmEuT%%_nKNwHS zGH-e3P0MTZ12Kmf5dlub7ILI4Rpp3LX77KmgT;7VA5V?OdF6Pm}VA9Wv#m|{_5-nXmW{OKojQ&c~_d%u%o zJ8@G$Y|LEuC5}=ayZW>5T*JQJ>Oe34!Mij?>${|mWPZQ|`k;G51t^2AE^7S9USGsW z$tGb{u!@RODx$5gm@j{j;}iZ($L`qt5y*}CGzz^=tVZtQD%NYRJ!RP# z@LZx&_h<3u?FM->JVL{-HYJmJ3wYi50g9R7DW5pVw{#>+cB@Xxf#hcDhxt0eldE&x zpxP_L{cH7ERr8IzUz>L!w@@E5eUbF7F=??yUn)SJvC%2n>)1{^52PN_3CM_G zu$(8V`t5;+nCBcSMA$!7FBz1Zm#CVhIx*` zfh4ZH;0P@vPh-9bVv2I1)~b;p*VD^npQy*hzR+a(+?eV+aY5qv&5yt91t}`gqfb?k zJ-lz&{Jko2v84lIP9&7X{ew;ky<1!D34RmGc|M5W?wJEbxC`m%#5cmS?5ioiAN;fY zPe=U4MX|?!Lp1>ul%Kptw#`;ywOd)%i{*B%H>fCCA8KV0cGujx)?iP{s!U7Gojcup z_=&emzlmAf5ClvJ)o{nt`O5PPgypK2FJBt53UuQy zMZhnp(ww;>Hgn~z(v%U?3m#%|eN`&w({S#qQ&qbRZO`Wrs3|>I>GbUwZu(x+Ljm93 zasvbgkh71KM~BW|c%}5uw?qFV|A%eAqmWxAbgHq6gTDRwa%>but{zmA_K1{u zFHuqoMSBB>2-fqJKqv2@H@+3MoT^*!Lj(PW+jPucC;r}jhoJkdLwk)9zcoC0>YG#a zk<}|ry;rM*`M%Dfz5Cebwij-Bymp3M=MJ%WCU0PiCBhExaKCY~x=N)ST6`*!@}<0L zHhewX^iAj+qN=X|i7)ayDR73N243$|oa$y;|KUz}N|iFUvKZ6P9&Ru36~8RiP-ABA zZd>bRm?aRYSS9}0pQ~Il_h;(ov+_e9&D9nvs;c_$HB%#`d^R0+4B8PIheSFF@)R%@ zHlg7+R7m{G^MT=)hnNp47wyG*bTAgutWKnF+&><`o7ol;P_gq^iPL6Yt;K^=2mcJ+ zZF0T%=NtaY*yg`nfSmuq!p@7BW{Ezyc^m@wm)mgPdd`M;7^7C3tnO8GT}LNQ%qN0*2`SN{AYF$Aj2EDz9LIp=+dzKGdNKEa;nGq zti&nC9qkl%{F$MhJv0@!(TY*fq`cuOz8jYXrV4uNvH${AEBFCUe&W}x)dOliqF?+l z1Tr8_2Mw@k9f*wHn}jD8%I4e{P?QOcRaveEQR(zud%-AP^ztI*Z=(wNZGmp z0pBJ&d@Va^$2IGE_?!`FQS>X^vLco{EnV1?qB`xh_xmOzBku<=)h84Ds8#j6#)*e< zt>Yf&<6C3<)}e_v$e%Cp9JimXS#6qs#_rSJ^YHwQpMIB)7SjbA+^2=Z7$1-O!vV7$ zi}R9ghBo<5?__6KfOTlaJ-MrNYQI2@xZ(A5WIADm75FY$MSKaFH941x87h{3h(~!l z8u%6*vtG+oOxaM_Ee)7b*X=VypRe#QiB}qM1*#0LSY+DK6<9c=cn;dN-uOctl;G02 zYwl2?a5j!i-pn#CmG5EOWR5wWtzL85*HoFd&!W@2aG%vZ=7wzzCwJ;PEV=#pwNm0j zkm8g+xE7}0&uLz6TGm9~N%ia&$l0hhtJ(7*l7R-&m+{FeUes-cDGw?p)FSf|DRGm6M6G_k2MZ(Dw0(50cfts)iVJVber~C;<-JwF zg0aMm47kSU8)P1_UOw()8CUhpHFtME+x|%Ui>!0|WYW*e&M`_e1A@ZD-IyPdy5o7hcy4M(R!mA`JOI-YAodPK4QP z2Li8zzql5mD)I#VX2=y^ouZ8@lz;SsLu!NdWm^0+!Rv0rw-1{zi+Z?aTjw0ZWI-hev#gIEa(fT12QTmZUYYYF z3{`Ae0ffa)=QY$`lo9oxA7fSzR`C#5|B}!&uinHAKs|U^#+5H)N{t1$kZ!R7PXASTRnP62fb`($9Dd&j8nhdKWcQ{?Lz>Eiose$3UIkHsB$L zWY|7nJoryRX3~DZ=RU&(uwbmY%0r*p*Lr8@kanUdA>7Bz(^O2*R!zY<`+NiMm<7%O zpvSAB8%S~rQ(i}j@+gV0uua}SK0dB7`8qg`knJ{cAv6O`@EJ1IJjA@x?T4GOv_5Fn zid8K5H}Ac#hfK9qfC!1mmjq&I6LiopjZ|ne5u#6+9a`;;Kf=D~Csm>1#YoFR^yX1o z(?aT2M$Gp~HL z2nCGcC?fcP*X)L~9&;?*!fE7rB~?q+3pn;=2y6iB`x_t0sfGpafDm9Ex?L9+osP|C zKSCPSZTj`Q_u(Zyz3>1%d>Y<~5_WK}5X#`g-mk5Nx-OS)a!q7*ZSVCWaYZB_IR^yW zldC5h)5ObHU!><>D91jV{>j0l`Mej;HCnH!skvF>X@JL3?VU_>D5s0-b{uOJu7Sgz z2j@HmQ#n1S`;T8-;Qcl+e-UcEATiaw6VYcQL1%u78nhVOdjFVS3!xI%9U<+|OVyG- zeHuwQ`mDx=c88rljjCPF(X|q7sNvrIhm-P$W17W&%WQ4YRsX>|Cbu|g=CyN8&AQvh zdnZ+q_u3}zn1w$HCMHCF>)G_LI~hA*E|UA92;Su_>U+?@ctkIMxFPZmKV-fP_l$`6 z2TlUfJ7WbjV(gJxAQB;m7;AiiF43g_Vhb1JX2P_iq(5@d_86%o6%)U{}KyS=4^!)Q^}`gN?@A?eGR1^_r)ClI;y+#;5?3&#G70v+8mRu#GTXrw1FN_b8A-0bM05Dv9`bw6 z;SN+O5%5am*Oy0~Fj4p26%GcJ2(LpC0uZvHHmT@QYnvQL%nqf;zo1#F^5AOYBTHY? zp#1U>t}lEO4In>5$hTeE2lroDB6r!q!NK7ztFv&Pu^b{2@jhIguCV(wLer!qag~>H zmsN7DzF^d^kR<|Q339RSNGc$+6Z{)+czJw6X!pe-J0Kav$TCBV+%IS@ z;>JT^fs_AW>_8c~2*SRMCqKTvjqPe%(7|y(jym&g_v-XL1oB@PJba&m3k&dn4?iFhL;cHN=oq#Jt}QA zeO3n6j;~)mkt*oF_DPf4JBVjB?=39|KLt#KyNWDw!Dj;lRbLshI~A$Z$2uNfxRdJ6 zABr#>zFdS4>)5b%kq@FA{w$<*A~M+2WB^M^8yUJ{x_5LwB^k;qAc~Yf`e4GH6H-Sz zw^el`X3DaaIYll;xz5(FBgtvLd=f?*ez( zhk%f893wC92%IlccQ81rwWw-Du!n_mN!D5?- z&)^O@g*KQ{abL^PD<%B~%=GZsL_IkuID+be3es^kogDR4=869RsB&)zK*rPU^W63e z>gtt}QS8iHKnX*8U)?B5auTAbrI? z^8MU7j*mP6Z(>5LC3&uZO^p>c2cq}}79T>-ho<>yk;b$LC!bUs?L){--y`WLTOB8) zfh9yHyc6#G%j(HN!_*&NChWBgh0gTDSJ=eQsGgaf91D3YBl;7m6SU`n)?DB870q&J zGk=AsK>fd?^WNphT~sM7UFKleeCG4eO}+NXg%0sf7S~Lha>v8@CMBD7?($qs!-z?K zxIxBBK~bRax?(7>b+CY$w5zRu!OUy!bCv&=zjJzvmc@$?h5rEik8!~w3@0}_{J6XV zv9D!N@YE5KkJj5mzg};F7hl!>3<`{6JN=qjUXzfaMxZuLdUJa6;M&bRg#mMK5GFD! zd*IQ~LfopVFpG3 zNaR|nCBWr^uJ_E1`|j@#G^mf|3l>yiVAUTja#mB2QG}IZ&r%u5?>dw)$H==ck`q+D zN#)cCGMZJVszG2W+b)uC{+y01iP!b`@#CT&4t=kP$LHH}NAsL~_ihu%bYre^%NV)G zTWq0tw6z|<{&f^59-N2VRNZS8hUs)UK3SFXo4$f52EIQASwFzNClRHr9_ja3!nDu8 z9~kT)2-S%1e`|=Nylqg8MBo@QI|_RQQ6GyHzNUb2F~*6~NP*YQ+2(c@hPSmJ7dh1$ zkP@R!7%Y-V^;`L1I2yct;N()OL|tiV>FN3EiDsN7Y1D1&ZzKtVt`8qJs@w!&-oSfO zKl~5_Q6b~}IYOqCaukqJ=6H6f9UfY)@6LDtF=3uXyAORhUxHEj3Vp+yvmcRW^VdT$ zNn7hs+$bKN)zwutmwX*UA#1Bhf;goPB@o0~wX?>%Ozxz;XFiO3%zjEioPaL(@9pRX z6I@mvl%+igyNWi;7(6N6mC?jLQZ(ge_TmYqhYi`e)4b)sPjtH*)PG=UWJdO9QQ^YD z#r04Zp_+7ns|IC~4k$nHsEhaF@8}rk&m!g5rbS3zULb>+{Un=rtlg`fJf_gvdW_Fu z*O;0%*>Jsx$X8r1z08cL?Yzy=&*HRMfKEvmqV7DsfyW!-wj(mHwYRFN$)Oq!P&wy7 zPP)o^%A)bnh(-I@rZ)R_E^up!rm0A z=))o#Q9vZB2nz#ZecuB&1xK}pBJ+M}nQpAWottw!H!Sa_T0T@07~wywBgg@Oo_DqB zr{^gW*7Iq>IZF+~W~EKX`Z*alZ~9(_))RpY?7f-ud~10UZqgoAGv_SBfta>)ExfiK zrT9yTU}yl$?Zm4s!3*w}ic%?8aDKeK@!EeN#F+yMGr%?+Yx{V`+-de_8m)r$Wvcc0 zcq^Hca(yZW$y!`2Ix{m`Pn{gjf6zAzE`kP$id7?C`aJ4z=HwsMk&6yJE7~>uuGrU- z$T@psM%?gDRleA_kMNAgC-MND5+s{hFT3pssdSZ=J$qW&Yt_^W3Y@Eoxx)~EBCi3r z12Yn3O|>YtybSbneVm!4!H$nIQc{Y~mu|Ryk0z<)kf)NuM6;$NDfg3mWLoe?c-Saz zJUyDmLDoz4pPnP*+*Z8rW^`-Utmd0AypjmHr+w#IjMFOSH%Vd71hih};0o)mALR33 zv)-x@0tg=?2!lEGz>|G6Re*lpTRbR_yR>*~R}IzmL!rAeifNdSWsvz#LG5t#i_AcS zHOpn=*?0^?{O427T?=zP%@~1`LQQgYS-Y&phkAi zcUPuLM9q_5?y|o{{Lq~<&%4=&31hWz)Oij{6pmb*S!pS>tjnTIzkxmVj8VY>-mrRwU(pz=L zjHn#x0_^}YiBr0F)>uW9O0J&Er`+A$AAL5xjTDl<;Be7DsN?S%$`!o^q-ezB!gd9b zKtEi}ZSC7r5>_%!tkh3De0HmSX{S+BADRTgDy`4uCw6=@v7yXs2Mn-&es^Ovvrl8a zcy70ZkBpbg!3)@buN)u8fLA7P;$V_k=YhuD2%+B9$m1aoU%qiDiq^YoEO2Z79EU1X`#_}{9kSj!1z4YiHTk=p}*s5PXkz1yznY2d73($e`c+7LnXu$3PITau3aQq)Wog(=0v&S-s?!6 zf&+8R17JrQug-yE6A2v|M8H@7V#>@MNm;BTt=E^*ztrG zl}n@1cnU4Tlv{qs=8`N)zP@+vs#kQ#Ff65%Jc|#t`q`UiwpVZB6qqC-Trk>exRYRg za{V_z6wcKjh}0y=vm2H>d~bmCWtW_rIAiPV$An-mQPlZU$_LzZt32=+@0U zO(R_`Krp&@PgYtHd7UCGMou%31Uns|HIx$fxbWhCu-P&E{Rn4YA+uu zJlXNk`+(=O1BzQ*6pq;OBKGYM2Ega1?b6t3>yvLp80+>>K-FZrLCYJ%f`NL`Ytv(a zgCw(i&Bz>oGyttfrfLbb{kgA*zeNCx}pm?AbeKzr}p!v z0tQ05aJcz>fHO2wt@sJ-Tl-^bqNX%YWMa`2aF>_goctIu1pO+HK$K4oc?CdbT88bb z{~#>puTl+qg1*Cj8;U5Y>h$v&$7^f8D=!R;lS)^P`ywuLfTz6E z@m9C`@aFWMwNeqZ#1f5_pfUQ?y-#lNuQuU^m19 z2X@55;H7!txO^tfVJtAWn9%2W_wfbt38YL{3;@aflIjSpD%^gawP380t9Vd3V0Se_ zXq^HHoh>^I11BH@rl!kkzJb=IZB0eh4*$D4LmA0F_B5Zxs=0W>*ayvq9GpBJHuUaO zFNj~;wPtI*C}u%dD;9Wq+!0CPMtLrhb7UX-!_rOnEbgPXx~^SW=n4%+pvPQpmhrt9 z7o3N7(%E19MVnSMY7-sqMR({YAw52J(LNYzu_+Dp{9snV-}A=V}eS&0|lt4xKj2%jKkXE4T3Mi%#+){vOhm8((I% zQD|3~pIMLitZgXy(c~1j6FIU>co`nsAiBT-8T0x&8dBd%n@X3+ZAQT7J3Dfc9XPDZ zVJ3`+dI18cnAX8&t%bz+?U8rZemge?Z6{3D|Hta_j-LBk^#e=M!EKz&Q?cAu0_o(> zX;HQ5K#baJNyH6=T%V5OOD1EZfb>l;&-kh68e=Lqlppy;-gB~~;uER12Z?N<;#dCr z^BtbuO;~xpa871p#5(jxj5lzS5ibs6R)Aep(n z$L=~r4b9HZo^Ha-NI?71kz~w>yzo$fo>Q_S+BBN3e!dZJ3(8EXw}?7y3_9oF*-?>RwsC6^%3jqPff(;HypEXuL?po-C4a=C@uJ7uC~UbSkQy(gOrtx2NR8 z2!ki^n}?bU(0cvoxg|sj0T%TUSVVq0A#H#Clf>l0exu;qirwf3p#h_v!i|an9rLO^ z#0K=94*i3bd|6(Ny>4ioyH2A#rUQ-UH-2`E0hq8FB3D&6ZGMq#w0^Y9zl;+xVm4^+ z14NnUj@6&-s(!syUo)!tTq7W!MpgV=R@pA&_VrEMi5KyIk50K|;WLP<9zc!`<`BvJ zckeY=#*4S+6Q67G{#x<{&S{s~Ngj#n$s+^R5Bzfr7dbq-yv!7c8g1{X5vB6gzjo2o z=?3dRWxlz8==j#ueJquHUS0L4ig%xW_Rp_E9mKnx7jv{VNRI!H3y@l=d|puP7t?Uw z=QjO5;UNNg1%yayuW~cBQNWmYZv;$OD;R|w<><{_t3BZzz<@|R(DVbisPKVYUrd_C zWeOq4sEmj?oO=jma8EjKaIn~R#vtbHw<9>w31~4f0yJiCTJ6Lc+gk%vZ^ekK`-b!z zj!PStcY6rv_{hlAratbi{Y7SrybwY!Y1c=#Y{SuJS60p?n~h5JwO*wi+@_|*myfav zD!N42fW*c&Um!eyoP@GE-oQ^e82=I(=*hVL&|&re_Ao8nX)wd0l&}f)n>&KH(_nS~ zlm(=xMbBx&R;@!*9qU40>N~=0+lIRv5w1H=3vmiUwU*P;heWi-Ov%hEuf|aqB9ZVu z_eKC2VcP=Ua|qR5C%75i3FeFT#~?8Eme~cM0WwS~ltBrte_J(rHlVl0syIJ}**rX{ zPFG9BwO)2O3ECt|-wF;oMGGq+Sn&DqGh?@(5M= z)9SVWvD}}DXhP01zowv!artB33(!?rJXYuJ>|Iuo*Q$Wr;3%DD@p~*wHqk1z=a@|3 zQLY}PV&&(}APS;V<@fr?zAj*umWF@TLMX%XoSkbQi1#x%X-2{tZXFIO?=( zlDTN{n*i%un4HbhId+_1-WihBn)|{hb zEB+|TTZhF6)40?LrMA6DyoHPP%<1xp)5yq(pfQ&cp#f*4`8|Bz%XJ>br zEwR>=T}VL_`p^f2SzVQiJGT$MvdQd^a$^$Za(ANb%%4Kbk)V&`U+Ns!_H^>%GpKXO zACzK3ObO$~#WKDyK>B{q)WODOt6(=pUbZvC}wFXHQ7&og> zQ`h%b>+N~)>7KTN&V|EcGr!`bZx-sUs~Fs6Td@hWM!bR`UF4(Sd!M9hune}QPWw2! zD3|}eF^6*-8z-cwHHp&Wxi^K@khFDZ0bXku8px3B5l&~^=~S`+AO8I(Xqoar)B}3_ zf0rQwD-2%)eGb(<2Z0DpE0ZmCf?U%~9Y?Rh)O942V_G-KRql5nlFE9<&vNYHvkkiA zvt(Jj>TN!DcsTGqYv*B~Kc<0N%Y{GYEn}a6luoIDNN72q1SirUl5hbfpCv!)k@53& z>tW_TaZ_Me$9PAElH}qsl4sLR3bfuDD5oX}q~}`|V)Tq>{T5Y%_5_>6ZGXFg@!D}B zK`8kxou}PkxKgT)7d;45*Of@#6i^s6h0!cME)qF{)M7KCxwE!Fq}Y3-y}kX3el%zV zk4f=V4YrXS=JBb#5YAu22e>>cf7r_C`a5mhEbdd@^dRB-q2Wi70gO$6Y9UCC6=fXo zJuz*DL~u)1C@K(BV6=575hgn~6}-BYSFh$2Bg^>lb(A75ztYB%W3rL!tVvuVnKdep zGfos8#W~Sl9<9x{9Ec|H+SZgDMbXe^4n>Psi94_Fk601ItvX)X>&ZX>xxt@o{$8(Y zhnD|=VS2`6296*?w;yQ+8Hwrc?gOeTkFj_GTTz0o^h2{`^_`){5A==(Bwv0V!q`qS zt`~C*W17-^dHqJZ-7u%?C(Fm}$H^W;3-Jn?-kcOwLH>6(vJs_FJDx(&C-Y$i#X2Q33n6}ni?_=UAj1(-@PDPadikLNv6Xf z6tzHueApT36z3*X>3eSqyfjHFyjH29Agr}}TQJt5p zF?Fj5T3wuR1G3?X`5nY}sm%tNlYy@&1zSwg_LU+J6NtJ#c9~FSPSq%gzPol(kPak} z!v;Mj4dF<}_S;5M&BJaL?Y9%!mVPmQ3g;HHR@^Byz(ucs8rRT-Ji~{kh1S6 z-ZHXicuF=ag- zr(0&_`a3_^t^HCWN8JXJYh0zwg6ir*C=!i{VtVOK)BPqHAQoZ!JfYUU+X*1Xp_79T z%PJMiTW;0Q3#2W!M9JR-%pySOofq>!(#g-i?e@IF`?iRAPdpBzpvNE+Vj8n1#rkP^ zethgbywoIWZze&3b5CZ?_P1eaZ4x~)PBiQ(ND7ICLe)ZM5YSle4YKx$EDX3|xH<{S zBT-Uu%#g|-GvLPgu{)r0Y^%G`*R2fmwV>npWwOO>rmry1LQCn(V-q1eK{wiq#|ST< zPpy%k&td68^@w-V{C3;_7{@f3CEw0YY#~wXyqsYnd zJuBISK3|IrW#^*|x(gBH-Wj~O#YxiAJ>g6r3&j>0+eLXXGlAUgXy6r`0_HybhWPWu zHPZ(n#{GC?fQUCTR_@CZB>XieYZ*1S+H+su$b7}Wq37E%%S*6HFU2MsesrU}&bw{t zrN7@{7;L@?)o|(i(vq4+)^O+M_86`Ljx(Pv4@pXGObl}s6%FNH8jgGZEBnwTt*pvl zdAG%pBPrn#(F~!U^JJ2oL`-*=!*s+ zo9i&xkyHZl8H_6vWE4Sr#*0hJy_MVX4`lymD}Do}Q5jS#8V8W}vxnzcB$Ew=iaeIN zhBvB?)ZH*d`L_`!&WhKbf{!ttw_+?x#&tL33z=5{?(>+Y$62g8GTrNfF>dMZ(hgr| zr2PL*BO=y!=UO5JGJ8Njz}KrfwPKW@qkiA1ClK8QAtFyfLQ%qc+O#xqEYMyWhLmvsr{-&gDI|H_!gN%JcAy0V3^7bPDwvGf~P zQpvnxPbFE>I@4h661Lroi5Gx-=8DIa7S+SJqYS7IOP_F7ac|=%lD_SD0HKU--d!_` zlcOFfw0uXt_lTAw!xEY1kk2R?9;}HZa1e3cazl_^Kk4c+alaku76id1m&Y+&iM6

R?YvwF{OURS@{YaKnNFuFnMvKAnTIuf}tJkYgSK8xx`5z1H}wH))!=rW=-C{=$Ag2^8khoouh)<^#WSHF4vY_vBM&BCIE zn<{?sie#$%;~W-Vsv}Z1k~tN6pj4gCV;$+Y_xmqG*f%6PQ9rzx{5Lw`VDTv*44}kr z7Q}W;|B3(m9bsEE^MasxRXJd)BXZcW(+14Ek2$+x2>u<(k;nd4kbUDK`ELnCd{1sl zT$IacWXR+8!LdgRh9YqhWiQLGXg+X`atCn#WB_9Kvu8ek#x^+otgN0bH*iedACr28 zY?wj$WAH2G3c3+_E2eg+RqxNUvz4>!y3hZ&9Xlcm_;nwPA0pZqs; z;#2-2U1XdyDhl)*W;{b2-(k>dl6J<2(mjTX#6Dc+Dwp6oE7l4$pBVFh#4D8N$_+3@ zac7fypS`M=kN!5`+4<_nYi`6Sa+{t(jY|#MRJkQCkJX&b`?U{dB=-yC&ZRwx*>ic1pFRae?KHl{IR_;J8SwMg+1KtQun%8fn7PBdw`bewbNMEw62B%p?j*u*-Hf>`9A4%MPqhEIhYI=s@ zpK-ap%vN=lx@}|qEM(d*SBIljf1(V~N3j=A`-&f_L%k=e*+6JWck%hRvuSGT&@O%3 zc-{Lv?c>N_d>0RyC@QpRyl7jO!||_cuOML;YAI|8iIwj|zvGD;I~N}WS*F$3F2WeA zEQGax&^Uh%!0kCKFCTH~ULPmZ_`&DZaht|!7N!L%v=X)vXN2b}Nt#~@N=rMBCA2^= zHO1SLUlc}%u7caqTT+Tt;Al>p3>R?H`uC^4aZFP$)tbemp#-v#8=1h0a(*16z+>wZ zvpBzoYr}u{q~m^7>lr(jpoU^v3df@&ChCT*1i{J~ z;kT9Bg_puSh60~l9opR!LB^^Vf2NYMKL+RX2U!dDPR*C0?L_lIl-o4&9J7-~#1^%B zDsOXoEf)IpXG_N)zG1L=A&6Utp5-V7t*f~6Fo)&7Uo>CuD%7PjAQTDXJQipVlBD+G zDe|fU%7S9#AwxY+^qS#fLlGCx#+D!P?mYF5ucfGM>u`#Lc9q)!H@h7uGnD_V%iK*` za4B`1O1v}X{aQ&qTbj zV_qlc_b_)NGlZw-FXv2z>p-@+6A2ptUe&uU01tIFuG^0*v2D{$q%47+#}2dQW2RoE z&UUlam9Oh-rq~(*^bLRgeQ@vIxQC?5buE^Bn|}8WK9QcifZH1XScq4rHyyi9 z6G}U;1~$%M;v~~kRqmcV-3^nms)sv?ql@V@q}&4+Vd1DF1XpelWZ^kWVFJ|@DA+E) zaiI^iP%R4D#+vsue7Cyi0m2+g^H5+sw^zfnF-9;JLHfwJXkJrwL%H;ZAuS;0> zg}S{(+L}%wnMzuA>j9XD3CHd*V(oP>G5;;h^t3H0Em3aWj`rfH8}LlEzSAwvTv_Jv z<{(zL#^CwR-@&7%25_Tl<($D3Uh#DjzKZGOkEmpjrdN!hb(CwQp%kryy6^3mRN)tuF8 z0{M(zgDwsz{T?V@H!!r2jhN^#l@9&}8X5oX9okJwU8M64F@$ETEYsLrg(=x&RNK^{ z1x%(jziEr^gn@*_!D+CLzMvUxvu|4zU$44**Ulb67}#c`zaEgpYkx62u=MB9{BlGW01j(_Gac`d zTHFBD*=r{HfuUeHXTSS_tdBC{UVj|;UDib|M~ z>b!Wu^=5&^yI@d?%Ou!X9MF5lpsj0=s+M0!-|x=_zLU}lNSbj>0^6DmdWjj}B_0@Y zWc>zBN9XEw!Cdg*he6Dzhb4@#&-pOpX~EC>qW9E#y@uY=eS|ESovmabPM5Okh{L_` zAZTyXXxh1+Qq55ySI+ilt_j3A485AG*{iRP4YNrPZmCspE$eYV$h@L6JP`cZH&r7Q z6)-)1yT8E1giH`MAY>;&>%^~b-GWyK*@~tK$o(XGiX%LUuuFaO29hyKlEOti#8r>(_vzMYW``~jYi02s546DbT(AD)I z3pqg@ZnxfjlAl>#TEswDB5X@sE8e&6Ha+e!;rD2BC&0==x9gX)d1dpL5E)|g3#=A_ zv=cOUOzDZok$=O=3J{R76<;UwCW&~N^=-Uhr#fKi2Bo@ib3L>ud(lH4`y$m_ zM6KRG0lec!@?PIkM5)Xte=MAQ^Fj5>)iL_am%_jLlsi#~Ho=B75ya17LzJk;L*Pap z{An=z?>SJWxOyHn&9aOQ`O-e{m4yA=C7W-Pr4(Pk0Ug2(7@Dql{_!KE>$)3W@104T zAB#%*3`NM4^>nUkQkPQDuG!Bx5@XsoIH zTL@uBp{BMeZ!lS>K>5#F9!seU{U1GG$Cv8^6&GvzlrmhTMcX< zZc8Cc*VtqMk=BXC({sV{q5Bw$lyJg+*usmD7OrBBxjXe@ae}1d5f7~+R2%H-U;0ZI z+78=T?4NIw%^mPJGT>*?q;1)yb$|8+NYL^{43>RrX5%5k7%cBSWo*}a1W3YuJZpO+ zNI@vyJkOG}y;Bo%rwPBu+ou}RdCHv<@1?qL4)tYY(bP7+d9Xf7)cV2CT9L~&Cw_Zv zYcaf-cw}UGy4vl+5=V27VQqoxDG^X~arstXR<8kpup`5n zll4$zA%*iIwYthQIqA4H8sQ$v+z&aRu+T`&gYA)XjM=HC80aMuPOx$1t8#gk^u_vj zS>VHeE!QZoA0O-N`4`k0Tl;Mms521V9h(1|8-m_ivhSo)sk(bDiiE?OY8#u4UXrY$*wjFk97N2^Z}&7c%;I5sOt%zTB-0* zFG`|I*NjF8Z#t9#6Ft)Rq?U)63sO}^HZcDFWd%UkmOmCY5+rk1X;CNe4KI~D@Q$&o zlhAG2ZOQY1_*yvgv2|}ziMKK|tX3b%Tl{Cq+`%@xgMm zbpw33xM3RCP((Wm_jhh%3p;dBej!GkLS}Byp14QwK<1;z^l|)~5WEW~haJ^1A1&Cq z^C^h~&7)auc&~$TcN~|4Gu+pQlK5G4qSU!i*;PNLD;Zyxym*Lf?pO@&mv2)ic=DN}W+KN*8Oh9{MTzXlPB_S3*;+=jGE$-WZif-~B`YAl9FAW7hdB^k?=w;-E!OWc!HGX{oSPA9z61j$c3{EueM}sFy;X#|V zqQ@&$hfZuYXW}(B*y6ZV?q)qj7>rmXrb7dQ>|I1yF2@^gubhvc^ z+$)b`EXgr6gd9D6q=q{`Ru_A@!)U z>1-wpWt)0jn(8l%*4njcQz{rbY79Gb%Ot);%PS6D?#GP2e2`9x>1sCxe)$vKwUbln zN_~J4$-S6t1Z`RbUOHFHD!QvU$Z0txLi z+6wQB4jmy8DI1!f{&5?S7yy0t8fR8;l#C1_XL0db7`;TC@uh5u{-!e`QUZ+lEwnVUDF6VQIDYNijFX$FAJbz!@=MNAf1{)(}Vy7adgtFRzx0j!6=JWO zLINh;8lbRpw!Qy83+)p!YZVRRU-=T8tdo4_)x^wiNB@gvH2|h1aWea)+_Urm_K!p& zA+BU74Lu-QGuWesrC=?QOJY;;@I(=wgHmBa2i6GRq|wHpzQEX-etwORrh0qkO613K zFU`&|Z@EJ>kB1}W{`qO%WpJ!cd=IB649zmfLq2m$f55{zt|iSzKK0NSa&U1B$(zah zL7qJkdWy~OIdazc)gfI^Q7wKwfKQ4RAOVN0rO@& z_!4b)OkbC_>ZtxbBy9pqdOQf~jEi8I!o5BR^H-b68Y(&Fbr8pUZyofnc%oRUki*Kn zz`hyWLMDBR3NLT;Sp|ry`*y2c!Ybi=rR!ha-Fv$3x+-{*&8-W>OC0fiJgn}F=* ze%$t_`mLqM-W{>G6-#p#CIetv-b;lNUB&Ms$VC>+ zQ}kC-LPxw~BU++vFbGH_g-qtbGTq#y7TJa}VF)kaI#h2b zb#75vAxG)vVYE@7`J~>`BkEQMKG(s+R6vw2_`>5L{w9z{OPWykuEUFP)mEZf$YhRytpo!%;vS)W|MF+=i9ULv5;dkSeLl))~9~VWzU}3;-iLC^n4vV=f<= z(@gEei9wwn;hpHd<7{GP%HjAkCM8R%$pzYbm-nhCPs~_;$p3VSI`A#6=7=Yr=)pD)kLJ&j%Gj?*VEX-X}4qyi4mY_tq2w^2M87z^~vr%yxuQ3H)2+1}=FE=NJK7+%WpNp!x z(w!HCVdBu2_{l)^OB_961_zH)G2ahJ!$z|jXMZnnD``Dc;9zDLG=W0nWfB6RXU|Kj zwWz!$;^eQYkDgg;U)3ul!aNzMZbw~gJEf3+Xd93y;JZzHe0;DmwS=qRd_rrDc;8Jm zU)`wSA#Qay0UAWjFxf1H4Mm8}B*SEvvtnsqs&nLu?OrZo=*K8bLb83aL|U={Py+A% z-V|6l#Y@pm#H<4uEM*Mx!53^TViE(+UKMtbxYL9!#Ey7!7Q`f$YW#7(R-~2xWF`J= z%~L0?%inLDZFi6{jGV5(IT%!6g*ALU4f+RNiQb1y3iux34I_2N$1d7Mx&U>O`%;@6v&{_*^y?PdI0 z201dq_e!`^+NBbXuZ7&ZcOk9FF)xcmOUd|Nv)vjRCo@!gFlM1uYV~XaBI4UW3wbC;>Ul7{%<751 z>mdwF6*_{M_;DT{u(~GtC%~BDqp;NDRnciC&RY9kyYEgga0-SkAR7+>a&+jph?Mm= zo(5J3i~S+dQpRZ$z>2*`>PUEp6trl6%(sf&dIif{`~Lm*`WbcIlO{BTT6*{>)10eh z??K+TLwTU!5It26Bes}W%Dx!+G)FT1tN&feIDy2<>934wxWT>UU3*16wr>z5e%Rn^ z!edd4Cg$p*K*?*wWwjOC&1-SSDog@m%l68EeP;N>jW7yC{3rUEG)cresqme~3{T3k z>AR;6cIMi5WjIq`3^Cqy9UF@~XWyx4rgg{3qiVpQ3veJFN0m#LmX;W`&MW6g(6JxK zhhty;)-2vY+hEiH=C(vbsAa0WUBvrBrJQ6EZ*U^DE9juNDbmS|y!Zo^0hpx7diaqDl4GZ9U{K_xvJ?eSAzM+;$)p~A+*`deQR;2{v+wL9s?tr)L-y5 zb$24-oL4r`avHX8TS(ChdlV@3fE-Db_kK zHooPkr_!kaoBNE?Zds?ko>q(ZQMcnzB2HKaU+_96!Yss5;9qhmfbv;&X!ud3(yuO> zA>;ScN|^I(&nwx#)BOx=ZYhV+JaI^))=wuv(it~ewXs`Zqb+oq>NGHDT!87weAkn1 zNNpSjnpmYqxk`xRQ_#;df5=czB5?zcuY-XNRu5ng^u-mfRE^FjdK9dmgv+0?NU^?! z<(Y(nc)NVwsU4q%P}h>qOqdVdc4+&vhvmgHz*d-45~F^Kemj=OKDtpyImO%2*{SEH zQXpab=hCTMtvk*lLC0$;0A4~)%mKgu5O zg$%S()cktyXDzAIq`C}H(`vXI-#>+JbeyhZ{DhMr78Tonqca=@O)jlUR3I(x zn<*5uYANTd7tKK5`O)F>Dz#PsJqVbfBp>B&c3c1$a?+y&CHt_ z|Mnlh3ik|Ey3N{cZ~k~}>?0-Z_Q?pj_G1)s(WX31^HZO!o>GmZ1}x<*6>Cbh630F% zHbyaEwo#y6)l>47CXwY0V%2jB0zY;dbP^v;z;=Sk-59k#V5+~a-MWv+884+yAi~afDMUJgiCoNhA$Nlr}QBL?e@SWyW>f5`5g9X%`?A0uI zr@g(s=U*Q7Py4QB%n|#_O0AYK@SI<9*mFr;v$wWgL ze)Td&s^_%8#rmA-c5S}rl`Gy38r;ZP6VQQ&_nKK+9$eY0x$#Xn129W8O5&Z%u<#&3 zcIvW1Po7k-5sxT`W}1Z0K^2vI@NMCDBx%QkSg^(0cZt0CR^9x>Cg+DnaJ{cq_4d+{ z>ITZsOgV4n+k=vo@4$%hip=44A4AcqBPV82>h;pX+jd^*o#@Hg;nZHK9f-og6u*ht^$nVH@09NH~96w?Vgb*93p z6RSzdjIX_q5GE!Z9G#sxavx;QnkGr5>SHC3zlxp9os#?C#I{bJ8YOuREU1$ zF#mX=qdXu6(|2~ox3{Nv4t-nr z>1w$DyQYfbrh0IR(KjniLY!l>WB;XqJDEZZ9FxLX_|Vm3FSl(N0Qx3-iaM8qSLd%| z+a}nId!8(3@DxVg%|P|$TRRL$Q)77xJ~I&yx5u-)cu$*R+LQW1t}exX5e#AL_$it% zwvcO4!+sjkmK@5cBRzGLt4in3r`zi|A{Vvsvd(~kaf}n=+d;(pK~Z74{vSH`XzU>I zyx$9?yM2dE?8=|p89{DipKSO@l44%p?(go`#v9#}D?iN`nXi4e*RhLt+~ddeX^}(e zBiV)*J7=|T2vPU=`Y@)QvK09y<@QxmC+Ix2v5ux!x9Z9hpd0j#7gO`r(LAoOjc}W0 z+*X;_WYN<$UtXflNO0f~VpcfA%~2-z@mP<^f9OtkWI9==Ee6ra9^v37nYwKR16s|v zBMp7Gguj1jHjr=>=hN=$$qw}JH%bPEg=Y6ew=q^$)|WzOnXU<@s*6jEUG#jTrTWJu z45stDs~1j-nU=lQH)S8K4M^sZ@mc-4{-b^;Lz68bKAyc>Bi15_H>gIvbN9wI5vG{N zg*%CCMM>Azf9vuwWu;-&dYU8otNzjTm_$gM#sXAJZn`Fqqe-{wOT(kzMx?_S5{_vc#r- zt~MUGy*fHN+S}XvHAB#u^j0`&DY+itv1b=yO7_aBH69!rfC7-sx2zv&ZrqNhh&;>3fl*}iwU#MeXw~pXmc0-T_A-g z#{#{;URfOxYV;3TLMBmuyg1JRdwy7@F{bdqJ0&A%w&tfrg1Bvn^fqkygt2AnqlpM! zjSq5Mo$M`s=}en9LO*^68OBV`!N;9g|!A+qjVm+wGJ zSA)M0j!n$M3E2}a5R^&2Ck&@Wy-u{YrK>^l{OLmhm@?|)5)$vA10}O7VM0kq^VEx)8h9kVS!C@e%^JcHI%Y)_ZV{WZ2CFX zF6yrt;cqcf=)kGgmZ9J2?mPCZ^lpku>XBgO#uEo{6@?Q8#MLy}qiKwhLaP zeto<8?l%?vnWqyyZ#!<D_xzT`qPw7&|2#3}E6yeId@jT;2vbCndEmjuz5W~+dA zFo4X31qHu%MM<7IoWlD9ny;tZbpJfI(^1qhKztT64SFyZ+nVviP3*>rn@=`f_3a7_ z4^tkF?<&-PLfjEQXY^T8~xW-G(~M& zDY~GZG`XpY;cFX4b;!@tXTF(V@qrl_Rk*hoNn1GvHGy;G*b7VW;=K!mT?CNLk&u8s z8fB5>tkIz0Ro0GH6`cQ&MpSHU0%I{^bi%1AYK*hl4zHnCikAQCk4U&%u;J2`=II?V z??~=UltSb;EbUa@Co!16ml#fIY#;S2IltaY)6+66?JslQ;d4s zHP$}xfCLfTX~#3dA?%A^vRh=wRjP>D&U^ahTbC$q=0)X{2s&+_Q-OTG+6-(*SbEN+ zb6xzqt`H84kpnP0872S24ouhibvy6>TS&Y_Qs@B4dpAYiR$KSf8%4NE+F!pvW`!7H z%NT!)40}}C3x0R`z3OimCxd#xs2hP*)0aC6K){@=tT{It=S{)DaNeR+E%`dvX6vx| z<~Zp!Ihzh>eK&Hjw?B$~zM-K{sI$B?tEzlD?7tscNKx|Ytew%-iSE|Q5M3? z8X6kTAFI8-9XQZ;CoaGO*VU?F5ty885rq%hhB`-u{=Fi*Qgri>&Oy@C9wYclXDHRo zN>twE$CVzO1gh1~yIa~1@joZUq+3+5Y z`887@B(%2H;61aXv$a#t^l>KhyHx!U|`o8l-lqQ288i<|%iUfdcZ zqOtO;!fF9Yji5Q4NyxLP8PJzzM`T={vcxsiKjt@e%(I{A%BGvIa9)RVk|}R_hc7f+ zea5ozZFRTe_HG(|`j~(|^}O{buZnJ+8>C)FRGAi3na7@Wv?{K)(0-`r>(>i9Y>%v8 zGckq2w@3*{o;~x1x23gJ>yHX2$+SYp1?>va$)ibVkyU;niC{p03&AFKR}!o##Tpp= zsukr&gzO#9utX;@#IR_)Nb0Gf~BW9*KJz2}q$+qW) zb}f^23>|aW#2N|q^3|rH8L>2;9(Gf_Q#WRJlB9F|Ae3+Yb_*1yMMK5nXDG>lAk!<;6ejA!tlw`Z3cv?$&4#o6y^wyR+dKNJ=(+$In#QN%(u0J62fA+aXeuE%E1t?*8Y*dFuV9bZ)j?IpHiF~Jtjqrg4YDwOk@*UQ*w6B zlaP@3ed-2DsW`)5A$IoXJG+BEKq;!AM_uAx5JN2s1TKRVJCUE~f5H8IC|mZ1d7jjr zf6ebTG@tgv@fF3ms~k&qZOL{w9&m-vqAV;m&wU&Ay>@opW!)lqZ(@VnGdD!PsPWhE z(`^&{#u+ceSjpxHgW|z??It_kS;~d7swF!uK7_MGSy{Q+y`d6r5o1V|f-z0T*_FB$?RSW@P>@Qn=2~2l8CciL$t6_`(xW1)uM(A1W^Y+H@jtaY>XhqUH`r zK$*XUKnZ1_%aOFQC+iSPUh0u9vJZZA7UPrSU{Yukn6W#u^NShiEgwbxK?{fuA6LZ> z6pETv94R}K7Aqhoc7pc;HKJ%)0YqcSh51XYlh(CTYSNJZx$$!9!?pz*%Hc}Lk>Eq- zx%lJbF7tU2_3?Me0`(jzns~z+wSL$a&^ace@gaTjOPE|4EFp07AzArt6zwQmTiae> z&ZzXWenun;4Xwyg*6!Je(Dt%!29`_mXXMj7bknLGhR5utA+9lgdMrO_H{Tu1NtF5% zZP*-YpghYDyyJP<+0E^O62 zzP88oQjJsvB!_stQa8Pyy^LK^U?N*Oj5v}eJIM8Z>&~qDF<YVYlEV21 zz3{c_V}K5yY6)NZE0FzgEtEy1?YY{T1TERS9##~*$ASKNhwWL92eP>vveJMKTsN-; zR&;fB_3e{MWa|KCh=sFLN`PS(S+`;-1FI>L$K}%`Vw35=-rR=zjjBZmyy8--*#4z! zOD~kqliu%~K!;HpR_r>$TXn!^Plbs9OCG||I|x+geinlN*IDeN$X5#@U?7xg`_iby z0C(k=A_J%=`CT+LG`>cAb`1#ba#g6GJWv;ZcxCnLa}hg6s_UXDv)s$WXIb+6(<&ZzWuenH=QjYDo#X5s737X1+v|+X8_`?-}$kXS$Hj4 z88&&67ZIQ`FfuX)4$rfSojV$;Ig*(VA$rP6l@;I9Djqu39#47VSdikv54Tv}tYcTx zD~_WWn5iOcdXE3HU~EWe4kxL{p$(fbTURFoO}Et7;vHQLkbXeFaAwgUsHp9QY$WO{@L;NnDqmV}G+aBHs;)xhj(*&%cp?CZNvE-}PE$>ydw6r9V z(%lb!`&lFv9=Q=Jb%wL$?VnVgWavOE+MZq@Zc^2JKA;W69ftD!Gns(^sC`f*;k zO)|ZESsuQtz$Q0vTBxc;@0$|4;^WNU@L2Q%@6y+A`dk-(rY1AFOPS?wSEhzzoyI7Z={;Ye% zIAmZ$mDLC-YQ0@uXLaPtT^U2wHglW+%m5`)fA?E<2TsNJ%g^^pI7Hc78uA!OqZbjr)kipNz5LIC1e|a`#_fhGs=>6v*`kpn7$%qWu?>BBcH) zN|S)3(rU)?BehI&k%Uma`8JP0^7Tge7#|2WFrGne)W4|)K_P;;)9tzNASxhvjnzbg zOVB}Eg;SU^%BIJGh6s5C&O~0WShZ+bFP&61q2dvq117Qe5Q+MDL}G&xs*nDbXyHNX z@kt;D=ACT(P1eOTsAc*yF0c8SrZnR29vI^az%Wym@OUdK24$uevL!(TvX&o^Y`bD2 z5%_95VUBiuYa~*~2?D4Z(FfRILlLHGhaMqM)Ur2V*0nPa^(l}p zdOy7`iy?6Q3_loGOoYIYoObe3)UABhYh%x-isd-}ec8<@+I}E28XVp#J>+7A?~3hK zk`A}tUE%uz#kdWRMWu!`>_(a+r=dwlJ4!={+1=}(3w6>BjWq7bfgQ>Z-Wyuxm-gcR zt&i8x3xs>J{c3z!4q7TeBSKLy~PfWh2gpGHAGrR^R zZxr*%M{((+8@Y=oO6?09+9UxUXrbRiRLUV5!91h}@@`>dp((@Xxf$69hoUTg-ra{+ z0u?q`To_FA>%M-%z)PYI4Uf211;({bOP*qQXA!s3C z;m8R^e&O#OP^biNuJ%)T2V571NV#fQR!N*b5@{_Vqsxfs~s6-Q}h%3)UtlVe-s~v)Dys?*y>cf4<>%7)rLxx#EBC=@jStP zNLz-}5naL+7(@35dZ~V)4|sCKMA=DoNDMpL@QIl{%oB`zWaO%s@EPVu;I*FVKSNr@ zNYHkf!f!bJIUXqW@W-&WIZNU^pV-+2IiP?Xfm*i6r18g6{tgDaI5hU_TXNZN?qNF? z>ZO0)B##9?_*#qU;1Sm79a}GM*h#v16~xKuKh`X)nyr5)3-pleP9wr(XjFc4*0ap_R2#Ik(4a~;Y% z21=U4YPZpKNIFCB1@_u-ZME}`{6O)8&715T8wven*thB#Ox;kc$t5qzsAR*gz-jod7;#!-_+@W&IqXRw+9qM$j;yFmN$Kvj$ z4Vhx;Xk~$2JPS2uN93**{Y*OMx+y&K^25dLvc+Hy7vmjzk>a(K`?#(r(y#q=-}c_7 zzCAKZ$<2K&B_(zCir(T-S65?1qaf2`2&^ul14UV;9=0ENa9RVB<~wI#eM8dInW?Eq z@okaJq4c+t0{pkz>loGv3*z~}ZGJ1AH($8$>NZ(jl94X0HVj)L?7F(fXP>$k5RLlG z+_XGX)SvnTiY|5ML$dqbMnOHUc!*cA>gm(7+tY*+!FzY#YTKx=&`|Ie8N49Q+RH15 zU7wQ?y1jpFCtt_gw>AJH*pK+D#bR+)iL%T$_Rv%kWCG*e(w-bO$SOs4$Khz%KdWx0 zKGUT>i!l-i!k`c5DB?M%{?tL?Z~pK&;A6E>QE6r-|LcoBiy1aK#25XZLblJ{?_w-y z@DEg`Ul9GK1GgV^-4Xrev>0l6Z%&3%jgpsFr9GVobV{Il#`7Q5vj)V5X(vdw0eSTs zD-OPgELmxbK$(7Q?VpxlPqbgM{a4 zx)OKH(h&Np$0Lr`JWqUbyFOrZ3@;ax0t^ zvfxyx;P(%Stx$Gzsaw0E&yvHm#*2$K-~bxM_ZML-waj(2h2D6a+&rcqH^l_g`f)xa z%9xs@Me@^8paW^}URKybzGBUSC+22m`zIB9g3zdM^twIqN#tGAf+~skS;V*=d*@od}jpyp(=5|1Pxz6|4VRa+ao-8?U-8=x=C6KmHYZtm7>8YOFnC8Yr&eT}YWN_ea%Ycm0^ zmdxaU`cEMS;LKA#xz66XTPkt+AQfcapYVGnzt#H+Q1<-o1OePW|Z;%qmjyoOYB3dim%a ziv`sSbiG$Om(I7nd&Qf>XJ}~n^|zhiO38(uJlg^<9{S_WEfI@e3*kqwo9A`awOX9$ zDSOH8xDao0jq;KFSAr)cim|x?7 zLHVEqO+I|IJz+;fFqGugkgk}in_Dp;FCIkXox2fp4vRg!Qh1YBT~3~Ib*dZY;UD(< zBW*_|M9bV~D7ne`&jKyala9#mN|Vrqv}eDLNEzI^_W~Zd^0su^)?p;V_O;!l9A#^e3BNvKD? zz9WtzTLMmP4D~KvErSQWMvaQtQ_H}E0&yp^ZYhyCtglzeE=H^|UR=8=)Vu2Yrs!Pn zvPnPG@$Iu9e)wWbp>q~oI3%+s`gR)aK#Fn_TEX?rJ1CfRRY-aAV9lH(M$dmlZ>8m~mLlV<+E>@`j);z6je*vpV@O_FF$Aa6@pDf zF75?kTD;4@wStpu#^gDVcH%57E_&ZTewWIi(;K+Vp8VSnaPyjw>&j@?ZiXKRcu+7;K6<@k#kd?~BsB<^U{s=)8WHjr2Dg)|5*$ zh3)~rRy?yQ<4K`Zy8OwB32SP@C9BXD^K<)~X$o{4ivk~pVJc9~tE=42>@zhfCrEm6 z`w7N6tu!sIalsnDhXFBRN|W+CIBcY1_P`_S)|Y3_!6QX*%r7gx{qyff(qDqKbl2JD zNSemapRG?c#D}bZVXabJn#V$h z{L2~uw|UyIeP6|Nl=~W>T&mmRNlhq{90ly4;5ClMx7lO~ss7Q`r*u?PQ{%hUSjwPU z8Ibh-$rDkXzH^&5TVK9S&2Y%(A=xx|9S)Df_mt7Xy*UbSLFm2?AAf;={z5c1GFS}Cf9dW{)7RM+*a#0O!=LlixkjaQ79+-tm$MOTkj+E7 zZ2@v1IP2B;%~ltP`%WMt1CE4aY+R1W`?^@tsWMX^?Y;(Uo1Srysvo2qRA7pORe89d z7qE-Oh^l4fMF)KO{d+~K_3g5GSS(i3XKCtiJcpt(=KyNpPBqjvJKANIY(kf6Y~9J$ zM~VO=ukVT2oU}-M&7buZtyW6uPD|yS2;x4M>caQhYJDI*VjgD~$S|ReKlI{HQSSoF z81?483A=H04%nseb~vp)6xxa?X<0{(vALb$JPuXtQiIy{gA4m`1bL0=;^jcgQv>9D zMU?99o}-PBvwYJYH62=xSzDXEMrZ2)skydh$$_iGgvaND0Vo8Mq@NEMb}hpGo?ZWr zG&bI=Dk<7lc*R-CE+xqEH9R2M4<{`xoxT1h)u-J+oEsA%acstng>+%Aiky%oA|Psm z7ZXa?knr`ypu2u~BD_IhExP!D<-&j1p8NE15{@wv$B$nZXN|~#4vE?Us$akVJb^Hy zBwrYQwEDITeH-XnW@6vR*`qj=;#VC3IvQylA&GGMiUiFZo08Sj@ zyJlcp6%rCx13VrEd;2zl?-g?5q()eEZw8UhT@nb*g&ir7sRT8wBx)tdV39Z zI1nP>r9T{SogdL9Jhd_7yBT>*f?z8$*_Ia7!+JdD-}9q0%yh8!4>rB8v$jP2Bf_x? zvFL-%11`hyaOcjYzSjnPvC3cXf&2ssmw4l{6|Obvil^r*(NwyJ(e!5{GPAM-SYk7D z(2$z8w(i=kiN!`|%fPn?qQz$qr~IkIXJ4HD*3zmJB;8Gpt>@2JLEUNz0y>%$|uc08e=|w^f zE*KwpyaczuS{@LQyX@->6P|>tszuwazwnSgyNQVa9jXD?k302d={EPh0)XkCih8#L zCv_4WrJ>;ku-e|H9s$;O3qTYy31a1c+t;__V|%m@-0S9Lj09oiL;!F8+YhiDK5H3gw@C;4|UCFNr zy1bwumP6g$UnGCQE-+_A%LQyUjdw;yhTqIVVE%OJ2X?CI-3I?HFz`(=7pVjY1A~Lp z_TsqT>d5(yB*^%b^}KzHe$3g-9(Rw4tOV`oBFWBWfaSi2l`UUBiUP!O&(>+T1hMjv zu91;lTU%QaUpJJU9CSFLKfHX6Z~PA}ZdqGxAS5Kj*Uo00BrpL1`~-mT{lv-hd|}B0 z~CvezjiCt6SE|ON1T(bg*E=iD>PS zR;M%xYi6urW;5}B=`8XJW=IX0oJ{IM-vX!xjm8B;;Y z$6o-|kpUn?9E`jtH6c*->E^ywk1m}_3`<^ca4(D$_PHNEY5_!P@1c`5_>6e5=jdOF zAq$)984UJHbaZqQ-}CtfK-WeoqM&;Nl~zI|QZX*fxT#k3wL`JNfSiElp#WWxBw|wZ zyygN#Tgc=mDcTaNS%~n1P1Bz5@8h*5-JZSpzap)g5Df6u?R)>`{L z)TQE9>D_{e{J$@GOYLGXkc35G(A>Yd@vLRh>Q~Wxj$+fI4aUh^`_@@lSHaqCIphv1a{Q2E}ZQw-RZ>{pg7zV?6D7)%l-Opj-S&P~TMKEKG1M2p}&BTyiPCZ>+tBIgL zCj!T}JPBtKnqk%9H2AWG;~865+>}o~cNGuiZ)| z{2Scue?Auk6Gl2@D*$Q-MR->_!rsw5OwmdqD^e0>2Nzi$M$bgmDGl z2^mgYj~KuH|GPLQ0E`w8E!hqmhx<^y7MMjF)Zp~>&jZ^PVax(hGt$=}jwGxM1C)w% z?SxuN$pJjK4~)tW%Duj-JPG)Ri<{?ggeFyf`CQ5WM<5;tB##D-0Y3oLp)08ya_1E= zVr=+qtLawu;~;s~(Yb%62;+d3{CM^J3Iw5FuB<1{`o3BRMx9%(g`0TFpC83yQYGzQ+=TChjlDd z09bP4kCX`R1?cB{`^>}-1joJScrB4~-hWD6lNJSbNedyDIJ_CwLfeTNrLO|EX|aqXfY`BH_bnphPPJP|nj^$a1po|7DbnFwy?%Dqg{0*}FrG z(I`DOs`J^y^Yin@yi>3#Cr#G;B^k*|HFCEe!pjQ~(cY6zh8kKj5V4SSH`#yL{yiAN z!7Idzey}{m_3O(c4CJ$H-6&E(e3X=QK{llM4AX0{~+e)mjgwx z^5u}b)MUkPvWFVZllGApp4cTdYhh|R!*V8yYo7`Y1vkzI8 z_pcOgA}2Qg0waJjbt=38L?fmbE#X})8D%fS9`$#Sntj=A*1Ag$Dq^WA9a|MC@^h$^ zOfjt#o*o`v>^A4mpYKu=**H!R6{1;3bA{g81gcNlES%g*ys30j3JN+t`u@-=e}*i# zPcj%@$tK_7fd!AWw5owf#mQL^++P=@yhAaH7U`Fvy0?LQ16W;YdHM}s%FS_!(6 zxx{D;XCw-EK<~&D1I^CP(>q02K0uitC!6qBahj?F*7G_c#M#XQu_n|ddY}Y#7l>#2 z)ipE*pCS0LZ~r5bfljk6!N=SCwAPiG50z8rzd>6pn;wtH7l2ss_LLHN4XAS*F8=kb zoa`tjKF#q(F7r6yFi3FM0;#Jhk0iY$$1MN?!%OZzeq zw3&`~rJ-0~>Mj865G?@CbbMO)q2~B+EK9kC=oYRLnnT-cQjdLCF zZrXivc-zlQCMFNQO&k38ljapMYKLp&XCz8|3u2bI=z&0yF&!Es$ERE4H49!{68C;c zMvzcFGPHtFwFutV(2TNp5*}tY?6fw|v^B8xej3S){!b=^$O*7LTU0MSZN$bM^d1R2 za%@pJc^MhJQFj1K z0`dCOA8(`+?(x1v)&}n2a1Zw}?#ZJ&J%{<9y4JLt@I3FvcJ|U3b1YW4HUGk^nzomE z6J7=p*q+|z>(HBVaQJMZwf#P{x;O8iaGVNOi%Bj)cw1%E60bA~Lg)x>zsoyhVR%F1*(d@s0tbUcnwy1*N$9Me*U!}Uc+cTG*YH0tH#Q)~sAmVe zg@T$1wiKX7yhngyFef)x^C@z#&z^EGkke(VM>NgjX|q=34yMTXl&Q3|@Kv4kF*r7AL=K_%(gko?rwc5Agv^r+W6oTJphXz)4gr zJzITjEzpcSNAUMU#6?qVVk0ap0*QvuxY?_U!R9X0Yt`{H;e=o8^~L=6KzuN=TNaMX zW^uKlg#QK;5?u-Yj^6_yJ3~FaPX`P=5H^CCL{EYqsXj!$T`>1uh%A+s_UOwQltN)y z?;$1*n@gu({WDYyPa?e|?PeRb=jyFY`eE(MP&8YNE&@&o-*1VVEKHd!K( zB|?fcwA@?%;h8tBu14M#R$}X!;mHV1Y6&nPT}?WT%}dIjuRUk%Q%J0UoBp0tIVvN% zjcj}|thByA5{62yt^Y`#*$I|l?Umg6IrS$}sT4wRK&Tq#aKLkCKDD%{|1ZxzZxlf6 zU9I_<9p`0?&1E+Q2y=GCzhY`=Ym6R`e(%opCD_o`FWSsQfw%noKkoO2psaM|Im4NX zX@kLK*d{IGIkWTFziY{xzIg~`P`b?b<+?w*k!US)`0!n9VN7)Nxvgp=XZU#(F5^V4 z)xs^g)zmde!Rc&c5O|E#OXLh%HeWO@CVqkSA;lbCBlj;>LH70m*{%%?bW;EIG{O{1 z3Q@vm)|^@8zy%f+iV~Fx##F)G!=tk10+gt_G{hbNRu)<7StEJh zH~cAJPz1f5|GE04_-kcFBo~+b>;QZS4hpu8>Wy|OcOe{%6MC^ z7>8apC?yVDf>mw?kcq!h`oX0h0vspZQIB*a0o8wAgHr!Rd@j$}mh5)2^+U9p3s_W+ zaZqYIi2t1h{-G;zpF?t9xY<2CGx8DwySB2kiJXs8YT8aNQh+f z5WyGw&H4P`_M@j4$1lVknaQw zT1Bh+VK?m9?sZr$ec#-zO(Ip350WV`-4~HlLiF?=<~Eir|5=8>5D)R)%w=r(`bNP0 zueR}4hV@P?1s%_riKN7Q>FUpcnEtARMmk_J4&VT&U$fLjBejoc9Mp380Qtb-`%u6} zpCbby^o$MSh~X`2g02G(zc8ZfCCqo$&rUrD4e_R-TSLy* zwt=*?v;Z;>t}#C5qfseEJ|;ai0oh=m1o6A)VW`|yug!SY(b_tJssiyd0dZQ_shKpv zh66R%x|Q7OCpx6Sf&CxpIR*MNC8N;`cFjy!08!@2e}8QwP{cyPdH6^gjEF8dh2N%A zjFctc6Jd%C%&E*&c4m?*e+2Z08*k7e=5&EC8^cmvTI+n8jVQT)Tp>g|@Y5z$MKO(J7%gEKlOKcTB& z+m>Rb8T5r`W-fxSq+U}tL|oO^ct_13kIxlf?iGWCN&dS#ycLU&!~_WKgND|K*IA@$ zq;8WOur$)sRjXchlYw~YNlp&fik+IvTVK;v(+Y&Oh(GCSVJ$AaqxM%nlhut#@9m_GE;uE?tx zLjteSfA-yer7)O7A;nj#^!>2Adm7M}p8N@Lw-D&rBAWB=Lc#+G>s+p_m`Z>-Xt0tvgG*FaRdC?ni^RveB>z^gbt4- z>}Tz_OZG!XsodkQ=aZ)s z#3L`YNb_J@3JbO8bku-dG0Y?Vm@;UCI-prh=;Gm++M7f(sPz9lo(p}<5MGm4q5Iej zWB*28oZ-(<8v`iIH2Mk<8b5sn4TTY;-9i=; z@=rT?bfH`g<I;#SSvVaF;KKT zcRq0893G?!%1!Nv@`Bxv1m#uqA>v(F_zecVfgAs7;)!YUo7%d0P94g?!-sN}gf*Hk zED<{Wy2s>+CwuzNU5}ye9n(N6cj?l9MGB@R2!N{d`St16potwAi{wA6uaM`Zhf0dW z^U|T0P;SQc;00Rf72DvA zsSVo#Yy-Qm9jru#GB;!&aGSw7qljrjcsvk)SMlj4;_p_M`|8TNwWZr+a?9zWeg8Xf zhXpzaF0mIDWoOvuYU%0S_qcMU=vt;-;mAH1<$KAPI}j$33)AcOYI57f8_orH(QQ4n z4h6Yfk=Ku}mD|o;^k9cBFfvw*oHb~dNdt!r4G0ZN6GBpk z<}^?tB?(3I37IQOWhfFmgHSf$@xH|i<}##&1Vy3-(R<%VgC?!byu@JdlIUfo*QIn z{u|#vctw=inGFKkI{l^MTiwgPWhV|}3!S?=jfKi3Pr`OEtC(DWzM-=txqheSP{Shh zo4@0e?fmrYJ&?wetM=)7JwN>IH%?}WW?o?Guw!%3Qva510%;RgDDKm^7#11n0jZim z;E#M$+E~dCsRQ2M8N|Q)mUP=|ulKkCWccNFaLky>A&y9^jrmdJh!bCXD%W9N;?7Z_ zH#IV}zN6xA?(xIaK*4m7VqX>tXmTjU>ATHmgvM~c;S1wdfr@CGrB=(v5jK{R!@g}z>bX=Zoq-by(L-3xM`QipgWmikZm#S+=6!~eS}hKi1Xu_H);-a1(g)Ma7+1oe-3U>Dzms+ zlW*A7==*MGrPQykY4VBv{hO+@Jv6R+ZX40?ChMHU9L+nxYlW|-X?vTvbuXG#SDS;u z3g0}bzFaSGNeC~|&i|r2aEZ8_`NPd*!I|u>U&TR7GR4Ap$7GiB(Ykip8JeBvJuXzaG~e;; zNGo&B`-sl=qmKWmtE->#xc7MC_usv^Kc&>A)&=_Yhv`Mp3_w;(H>}X_Y}t}08?Jio zriMoNcmBD@;ZGiYP(H^b)F(6}-Z4Iz`M8 z@N=)x5E&!9Tu}S2UC_Jl!GH297l=>`QOmElIRig*HNeS?eg_1LV8tb?{dU#CK>xQN z7_2t=jEQ-KMuOSG4Oi{NIP4>(THR!{?>g*z_uUjRMGWO90MCRQrHCUpjqVSfv~cGo z=|BiLhyc>r;=jpPZ|4!>DiGgoLj2x-;~2lelSXa)rqDt#1a$jXi~V(1&rsyx=o#Q_ z_s*Pr=j=-TFKnq8KG@*(@}ZG$8!}aKd8iJ^%$dIEhy8%Q5MsJWr-E{t)0bxf#g}~z zYWR|IN7;K^^D_K5Jp=2EPaA65z1H!>Q}j1*Ek6QSV)o4G;@XiS!%A{>ECh?-e(bq0N2=df^UgDRTr#^ca&nPWmvZm*(2~|CaY}>fc`3cFJAi}mf zB!fn)cFY{*&0Nr5BaZquuDlKzXC5B!Lxf+qSRi6)c!4~UN3=a0`cqEa@9!_;a@O5k zIsCz#F+RtALL{4oN|m$()HLElR_b>?z5SHqY-m|a_Jhl(o^85k{T*xs&Dnujs72uk zWi=xg!1I!ti$W<~Nk}kdfQ6aa&QZ{pRke5pbWHnFahx?r><533=IX`r9Kj<-jJS}c zIHbVS)3Y!V8Y?vrhAo{EO7?Zk1G zZU;o`MDk4D;owMKGqTh&J9IhOj0zpDwSUI#*NykvVbs_RRw|lu#$AIVYVrZ-MGfmGHL+XTzWHYK8Vxh`P#4gP&s#fty56Hl zk4iH&JVVDgjh@K;E#kl3H~4yab~f z@4q8$$gKN8yLy7o_oJB_DIcMp=&9FiU%eGAFKoAcqPBl|Th^XBl(Ivm>4v1z*-4+C zuDQay_uWg};6y?gc$hh>(km({=>xl$C9@mmQ0U?G>EfDyRC_vWdOimpD_{6EwY`Bj zf3Vbgkf|y|{ANsjdJtKxsLK zfI6y;A3y${pgM=DYlxR#b19^_JsW~=km@{S8t1`fXs&elyo0!IDC17M-(OyFHr=_S zl3%6j#*gm4=PZO`vZLTc={0No&xY;^u>EdrOI74U--mELKN8UO>8eeJfbW5Mk5iQg zd9byFBP(!l#CVT+T=bAF#>KqLT2F|dm3ed8c=1F@{~wq2?b!XW)W7-<7=o`X7lpBj z2};w(hC|4=0HWw zTLnjOvYuaCOOjRDw%6n6nH7hp*7>a8ZZ!&bzo@jlw{(`S+MxIAL%thm&!0Yn!8fHb z?0SdiXD@m-8^L$YhW>MS7eTI1xR~F$Bl?}T0}rTY8^}{`9wO4V2=1KkkL+TP)9!gZ z*NT@tHfAyQ$kLvMb_b}ot{8u0_dV865zvPU`^F3QNBe0ffBHN5%z>~`{vr@%_N`LL zS$y<&=PF*%lh~n1(@$81vrnIJDsY#@{5lLx-ZKiN6CGPy+dr1Ee(^HEUgV7TkfoNA`-bwuPq55$vcHoweVosSG0 zpTzyTai%9}(dQQxL0a3dHHXT0C>M_2GpXm3|GKH)9=_s6D)f%qwDhgL?fogqoQ<1B z)fTf#T{9;WA;R|hyd&7AAvJF<)jGvJ~!r*;dN^fz{G z6irvTjuPp?KLf*GjP9SYrx)(J0-F1V3Y>DOL(>!1Lm>>Hhtl+;BQwtL7$pP7>^in) z7bD-}2cXdc6<2Xm7Si-BDIHaFmfYC>@@Rfq+G5%f*I^UR!t9U1E+s@dWQ<@6#THio25AU*)m!_J0#-UuKL>y{c|$&OC%XS zZAZS&mLdUPy$Z6sW^=>#v-D6Z_TZsZ?Mwo8slPzYSyon}%&e|xOMt}$Lx*VA5l(oq zU79`^U9E8)C1e9GvX}Q@6R+CWxlS;MLX~j}qtGD1)GR>W7uk?YRjRE}`n{@PgN7hTT1=Pa&A7Z^I1T0AR| zjkxf5-)|!2Umy9G%isM3G@U;lGfZ4Q^`xoD*NViRHAt<3wyw_qTxeGt{vO9{$EaEF zZsuvOx7yDnx?i&sb6TzG7+$si?qRGCLQ`nfV6ms?ZrmrLmzX1S=~68M{KVfnUw&I2 zJTccA^eS;5IhyZpQ=CDko-PN)sizw$aNVy+YYwF2Vb`u5e!(Pu;aTVbt@ItMy7?~` zA>BPinHk>4cG&CzWM(@taH(jY4y2wctp`0 z=Qicl)g$v~?g&B%+e4Mcy-0--4UD~1#G}cD%X~sWe}RjVqzX@e{zEqK%P*SD(vtN! z*r%N#`%{nPQ|*hk1A%A(wqt}uNsjF2mjhaJpg=K4r-~KRqY0TB%d8CcZk%cAGA^7Z z!;+acK)Jv3yk&Gg_HCf3r8b6(k;5Vr3D45TZbU6+zfCvjGu{$ngHKW-Iq&i z`wimR07ma4=5P~A{qX9pMP7(T{RPC#=gW?BjbV67s1Xre0B&w>7Xk;W#;F31(W%Bz z5;AfOA9(~EKddze6q^MayP1N%ZzSe)pyvGN&&RXYj0(jztl5g%9kJ$|3rpUIMkenZ z

t*I;y8kQ3-9gT}^O)NeK_@xwwXp4t0HU`*zH9UKyz{V&c3@1 z&4(5?PnBjASvobHyQA9lsTxL3!>hZt%wkRxH?F>Tv8QG8mze&; zzm6uaY)8mlLrb3{DxvHuK3>*o(Yy5VFoqzOCYW`1!m$)ZZM<93onHn*&BCyw=8 zcqZe7tWTmZS#rNGEg0?8-0c$r0^#N%dNzoMj8z9mh?U|L?DU{r?_3 z`r98?a9rbw9``DjgCk$qMxo5s%nRk$~vLE zsyk74+)t1B_Pm}ysLzC7q^$*Y;(fNcPE8`=E3{}zwE#-PtO~oK2jHrB@YuJ z8wwE;de?+UL!!4JW=A~ryhJ?A8d{5j>T$7r?WyIFDAj*#E->_6a7y{dZT;n;_)jZ1 zBrA{4uK=%`{es8p-WXIY4nMzstme^3-BHqYQ4iDd(nw!ARaz(vlIf*McFm2QII4Vx z4j&c6hldaO`Lq4{{E%_@j5ZBrGTTve@7ZYNI53$1fi~(YzXyG zWmC4?90nV`0~jNnLrd>uLdLu-!0V~(?Z&>c4hbd#(V{)%x+DS2{T&QboH}XXn)xPE zmEJ$%8h`n!Kr#Cz4gCDm!y~$~W#>qnJ}EQwa}9O%PI@GXha$|*g@X;aEOg*w*MLGj3mlT2;5(ExW zoaCGo=WebArfZzScU5PJZhm~aXM?X@bU#WPj6pSu+2>`NIsrl^0M|GrTY`Fe93Yo+ z>4M(BzuDZ6FCia-J!n3$WaJU_310bXf5jlg={#^t?KU;p;@X@g-LMoQ)h0*a0b@)O zj-MAdz4ya*O*Igo!3wATO@Wkk&3OD^XhhhhY#cU^g>O!?4jT|CeK&X}p|fJGEr{j3)VE3$-xF0uVG zgj&nVFTcFAzw(loYGy0liL?#OQ=9BCeK?ByOinX7P5_iuXBPzDjPWF!d;cQAQV$-X z&DTWf18Odw4H#T95#!@6Xw}u{?IV~S<9NsRmXBx?zWjB$y0+Jll+TNaO*<>=XcuSk zLcBa-d#M!cXRXi@^ToeLq93i|FPECX1?3jq#P5E9QF5VdH|*5;h2?VKjmE&53GhZ& zx=X)lhpV1VcR6?-+Xl&X37s&Dx%OpA)A6<0r?rQ4-ls%4S27&U1 zsJkOYEsEn9Zfq(1j1efrJa6w$wnYlH|MeSS^fJ|@%~uZ2f)hRWj4=?Z7o`XnU3>i)|d*RuOnjW(jj%4Cr*z$6I#|1HacqV9CjMNwE z-r2kR{p_q8xHIQ$(#ckEUDv#wcVWHo-yAxLy4n-;cdv!#@$Njlt-a-alcPm)L!xF^ z)!82+@b3owp*xP)OCA#7c3GA{l>2Y{kmtqPunM9z{vD<@tajh9*@x@GqWI8IK-^uU zBj-(;_#7`9%h!iT)5KzL;$uI+l?ECX`m-ivj?@L*E}{3ALjM)Le|tlU9PTsfb_j5d zpvH_@^KUHa{g|1!WGd9sm7zETyLXm{DHJJ~bH;RuYRf=LPytFL`_Nh3Cbqk-P_%r@ zg<9~$T1?I^sNqYA01J)g7B8;5Z8>jyg>7n23?_A?YYjxTsmKb1^czE(ka7>{gI@y{ zoSKLvUYR>npm%I_X(+g^TRO@XkyPnPIVbzocdqCHeWxSVk=ZrtJ;BMmlLXR ztj_W`MyI~Oh^n!@gOqgx1qX%WHhGoDehcTbQG`%L;=ih}KZnCzo$cwsm**QAP!4%p zmSmqSR~L1+0>O`I00CmZ4^U&MucH*;%Nu6^O=E7PVM>RxDug$1@#Yu-YlT_kZ`y%f z?gO_l*=j$OR;^G`s-7Pi%MqS=JYn_f)$kWuiG!WO9|42 z+`aj^s{%4f6sIMk)t)bL8vZuhsS<+-bf@YGmS ziZWxMG6`mHvQYOwwJ6;4CY0<{?z)T$y0stD?zZGc%S4|A{(T_%nQCxF#kXyna5csD zGo^w)-n?-mk%rhc*Ld%#MUaQ*6f?~Ud|RI4oZLXM)jCgu31*wz-u6r`*_&EEX=H*r z8&Y7-wAsbPWxysTiJ6UuB$4f%sWHb2qw`-c*&Jw zmr)CyGZGQW;qZWwl9Mqd^h85@YXkrOe@0}s>bxZh7PCdWFPshy(^*6FoInK!kRv?- zg$6GpTswCO&_w@Lev+c)0Wd);An)pS3JPDEYqg%PncaKFH(3KmPnVr=s(tm#{tLFQ ztHViY8_g@Vo4|%D>i>x?#pX?&L@5EIhBHh^V9@sgQZ(6TLlz}QV%e(=pC&uD?|pmW zP-Ewi8GfTt7;#MIU$l3JAFgm^5(V8cW;+q>I!)r{VbRM_!<)?BbZF(LVWE)@hjG;A zLxk+t;DN*^5jv*EPUx0`kg_nfM+B?;C}8QZ7q5C|ox>t?;r4M z%C4oe$(yEDggaQnv~dfNAeTk|OB_3Lc# z<&R5|>*AiNw)p~BjD*|JY^^T^q^y6~%JQ-9l0i&%qAKA7X`?@aYhq_QUx2xmA~Dl^ zaovJ^U4;Iv2XIhLK(~CL%b0#Lq#x-p!8PnphKgbLamsAEJ;g1G@1a+(za1|D>y@q6rjCf3^+CBuxAOPJb9`<(91YmED4+MM7BbCLBhSvT^uO+Gr) zoq)aA>CP66id}CJuOWMnihL*~8YKhlcUF>6p%sQ{$%9d5Og8)F!>Jy}F?aoSL03a} z*BmOtYOsSXWY+Ge0RG$U3Fcyvf!({y4k;M?at~K;o^F_->PKn@L!~7*miC@ks1vdy zz4~V~0eZ1_9NF+#5nQ@?s1Pa!IwvjfU!)_wZR*6B)*#GbWwLgPwa?cn-zTXkn<=qI ze$1xHcIQaX1`Cj7%@N zv8$;QgFCrbFZ#GN*%<;QV{{TlGV!MO_vP2+-6;|}pr!>2M?{}E$s%rTS0}CzItDK^ zwY&lSDj%W#e8WnRi{s6Gr6TQmPlHZP+kvLsd%*ERC7l2jyx>}PD>XaY1cPKo{lOij z%ef6n047UqG_~PJjTeD1AgTm6W?p^mBK8yG@LTF4KdV1hUGCH_(uGbQpLJbV_aNo0 zesXSt%Gm`O=OQs_@guYxZ`!Y0ym&yy32P_zHDiGJ@44}hf8(<5@1q05X;`~3+}!0; z-S7MXbs<++4w|m%4OJ>{<21K9!8+a0vht=KK}a06k6TRE3^tBp#>KR}0OziSBt`RP zFI^6Frzv>JG6z&f-QcUQg9)XCrm+)JupsN&e1=QMJPuVFmD}J70z8nI35*Ks_uCf#MAfVu^EN+x{c&#G2%Co)kFuKD z8(r6Bc7IaEfq}}Odj$7_hv(?lfR7*Z}A*Z=b1@xyduSIvJU zPG)J|vf@3zSDA*UjsWLuz0=vT1nCKKH2;6Sc*+Cr8{_^3j{41( z@1B(9pSua8iRC}iS$XSNEdTK*03N9Oh&p$55J^K^Ig^J*v%XX0hjGCIVuCx#zCoiM z=g`!iVtIodHBU5565qy!7WIt;e3!X#drM!H>V1EouL#u60g??^*7>E`{Q1?wD%c@e zP0jOQuU0*P?Ft;c%vQ(=Y*0R(L9U|!6d4mU#5hXr5)>IYBAkGFa|0Fj_nc)o4sK`6 zRcw6oNH}&C0J{dDm3Gty1zI<;|#$V?fEh25oa(vDheAd*~ z-4n~I+Uo|H;97=I+G2v3v$_rT^!~^b!a0BwT=O@ta+Re4t0r>$&(fDekm-JaZa#nK z?r+hof#BL_kY?oZV6qa{2F<;bcQci+(Ng&4qWC7LfV01;Put+Nw z3XfaI(3p{g2jMueeCQ>Vl8!ceyfSN9YHs5bj;S=W(Xrv%@hCRO8vk6)#N2@CA0Ueo z(sKko8-xz8o+r1v<<}+X4iozN&)E3K^B$>xev&V|8!YiAFJ^|;V?|`aGiI4S+4qcs zOZPt5tM%H{v*DSM99jK~jfCkTqH8fG`z#^B~6^l71|$L zFl2(k6ur|QJR-SN&+GQ#WYnkTVDx1o@;#xFvmi7USe7j+nmx-@f1@i$1+47xp_v^c z&0B`PO7jXYI#lEp=In5PN8|H878uA)z83d*1CnrN*eEH~qIHv}9MiaD?@_szzOhM4 z`dOJGU2UDWXOlDwdoPovdXA-vt`YDfM{+%tYCu*kusv`oFdkf9^p00)4u|U+$aTfc zSPw1g{f^tX!GYtvg9?KUSR-ue4^Au&eU7X*hr_Ofd?WMFXH;vA;bxE3W#kL*5D63B z-C5}snW5HKfH$MB15WQKAxU~9mB1;_(y=N%rdp0+!}t&wrRC&+q8UL^Kivi=A3Dqnw+fbDl+bx z{RlsPkHRkN24cMc{(4kPx_tdwqrtkIo1m{BprCL{f^I-DCVD`42UWG2nkY3NY~ppb zukeyLnCOlN9{h%{@_P^l9R);_3FKsC?{%~_T|{;I<*~aj20dieb zIVVns%N)(nMktK7jfMhvdl(_JGW!ORFP0lO(MIU@zO~xx26HFtyy25-wmF>8N(rtLJ2QQC(9x?Nb#}1v;=$r?4F=)? zL=QYb?zT?n{Wk^ZFxgY8brF4|Jb}rML>XT6bh(1z~GFj(*}m?oc@yjS2M)8+T){S!OP`>mT*X9R{#Q-m=o%%$KmxN z)0$bd3%C)sIhA63i{RhaFpcbZT^&TfTWEq!n=drg*Ni`+A_`D2E275OXJjpI42?%`}>46^M`Zk_#d4?bS~o4b`*>nqbo zZL|LbCrKN?yxj2h2@Z*1!j{==?u!E)LTO0Jyhn4?wAP^_eM_=Ex-d=fcJ|($f?f4; z9;>A$jE=MQ>xU;f__#&(_z)lz_y9Q9)R1K`%FiDmhE<~pXss*S#o!I2*u0PStJ;`B zOJfHqvQ{Uv?p)H{#HgVjMVKS`B=^>yhK`QY#_mVz9`}i0aO%pyzH*dht*;k`XXs%c zQ`!+A;2JP4_pq$vm)F%O!j%NgKlW34{Nv|j`9|fHfuM{?Z=vz|CR!`Rh_PF=`fkL%u#Gh8C3omqj~`Fc#0EQ!~g(n-}$Y5;zcy?SBQsfP^g z7JiUbP~@}U#`1$LFG8{2Cx;$79TJmpghH^Q%hJr{#z%>pgex(ILeJ_#(A-!y; z#s%%~o{=ZmJiz3LHtrB zb!*XCEQx3ZhDv4I8{+bilm7Zp)mWiKQ(X6omvIKh%17gt2==%g=z7U1aT zoEl}yLv;brUO)V2Rv0?nrpLqr49bDyzTMG*3Ao`=tC?X{5tFmQdjX>W1GuSlDSH=q zcw$izifiM53^zJCK~;TE4&#%a>*W1G^7#Yp6=*`?id_~Fxv^Yhg4e$M9+eF#vOC*b zJyN5j^mT@F&7CY5Fp5WV zE|vdZZ|sYW3nq}3cK71tmTNVDr@Cz!c~IQ$&3zW8)89{K=Xz~dt)W*(hMw=;Xf6+h ztC<7ZO>va_l?Zr<_R>5EnP&!E_^>|gs^89)!}%6T%0-6rpHk^e51H{J2>CUpymVf^ z0hswXFmha)T;g@7mIXLIP@=G8BZH&?N~TInLvhSHPT7ZsPaKOfd|P>&+#AD{y}_N- z?grMouD4S=gd+ouR~pT=Xq=dZ!7mT9P_Ot}5o!XPBx=`M?=KblznIdGhZZdE(E_mF z<&9V5k*7-5ZEifGKYk8^&l{;=o8W={@&6ahY zm3J=*NQCZ&Aw6-LDNgDL;9n1y0fUyIY-9ZS!>cyimOUPPGMWBsd4Ju|pZCN}7?4k- zgU8J6oQB|vp}LVMxv;c8@8g~sA>qkIX&6Eg>Bc79&%6x6vUOWDR{#8nUtFe(=Rg}| zpnEZnOR(fyoj5}#gH%w5P-P8o7=L1~z#^kXZ06mWZM&C|ruDiN+ z&W+ceY>>)3&twIub!?juF_zlogO25rb8>Jx-&u;sGc+em^_YsO9x?1^CV@gEl~(AU zhBA^(JU}bPXv~&>oZSE#kfuL*9m?x%LEVLvZ|Cv8Lkm_*zmFp|ZqLJ!O|m{5DmPr>+qB~>%Vnu@8X!mI zS`fJcqeA;YZ!0+vnNf?8puuZB`-|%?Nyo{{mTJ>fK2rA4`EAjT=rdo*0rEL5Wo_mf zNrS5@aX}ReBk?ZWX+n<$*)9?jn>k>MM`b-KA53Bj033-+^^jPw!M_yler1^iTPJlK zoous^y7~lcaVFs+!>5usXv&gd*n~pI16=Ci#n9GZ#18;3 zdTb49WR_OAM?+j_78-7ZQOIp=2ohXWjbO5BFzpMe|7K_b%Ljr&TxqekD(O;SGi1ZZ zNa>AqYYy&3;X)bjxR!yf08TKqBr~)N4=RqM@UP5%DrH{u)RDYW_oxI2t_=5)mvk&@ zzlBz5hMy1Ft4Rr9C3}bIBBBQf6kZbL;H@h#hQJ+HY~BYz^v1D}K zkB8(ubVnyHtvQTRY0v3Ex$+_;6t+!08d_7_%kvl$x3CjPNvxP`vyp;1{IwB)de1@L zlzEB{axn<=N=i9F-`^j^?{ESura_kscS8S`jT_h`Z}ve3>|&vfB58M=HtugMzHLh@ zC^Byb2WZl*QQ*1vfVCHivnEw$I$&! z!^&w<K5#_v<2Eo+&<8!wM`r}<`DAeryK9>TUlw$D1yb_-qHBFHvDc1J)_{F;L**i6 zy2a5lm7%&2OH3kUOI7XLK)=zbeO7NNDyNHvHI-t!3mdVT==Gaw*AqgU3?wxG}J=s5zZrBxwwg(yM!kD>i`VAcB;xfU!lb zdD5Jrx(L&49m4G-6@D_PR_C>#4 zqLak(G3V_B>2cP$jBYJ_qr+jccOpt@!ewJD{GnpY`Uob^i-+R;Bl6&B9w*1gIU(Fq zO}css**Nxagm~d(IdhyZ%&nHr*xy%S;OyXSU_C{loE<8l^H~Z=p=T>WV;Z5VmOZHe z7EgCL)5&+j&4S3CXN(Hz196%C3vh2ydb8Om<%O=-c~^XH^L&%w%RV6=lJLupsE8L$ z!>4w_o<2IGGNSiPA(Qjr;ymh7S=@bk8_LhvUEQ1++>d*+c zCa@LWp6S<`1Do$pFg(63F)bu~sbrgBA8*vHMz&ULw$_(&=yF{Lgp+iw!WpaklrxP+0E$4B_9KkhXH93dmgoL_ss|(P_&YyZyPQ|! zmByVJ{=3%^<-}wkGj%p=-eMPw_1PaLTsQtG=2VDUl6WE=(-#^YTLfQG+Z~f9g8O5Y zEk zbODw*kR9Sy*vaJqxDe3_%6b82(IOl|;10BevLDE51;hL~e~EHr<`rn>c+!d(u$`t# zF!Yh#(8k@zH8gtcy(9%QXp}ckVqaRr)3$YCaL;ihz0yqHoGnLNcfsV$CZ8jabi}ie z;YeMea2Dm%_=Y;47J(IP>w=)ht>1uurZIbSeZYW9Ajv)KZ*-l_a7dIRR0v*5gWN&! zQ*FT0Y=xkCp4fJ?Fe$Mxt3+9$nHhNZ0ZD(;UzMZiOVQj5`;OS80m}> zojrSnboZ_(s!O+RFD`SdK%SA^ih(uy)JnzV+o*HIbpa6`Nx=jyv90li@RZGg`$rx{ zceRA29;y8Sm8H3@OLMolj1AV|Ad3fc-S8h8Jx? zcB^t`tws!H?+U)y;Yom{fI@n+r& zE*bTTn+HEv-+;Rvj$)GVuILzeS_#}o(8+#Y5-?^Z2Gs=bJdB>a<+xzmi@|qsl_<>(Bouf0V9qXujN zt*8TJ8#?0HE`v!K+yd5?t00Unz_{jV?Ecb8|Y^^SQ%y28?&?bLlE21D!V3Cx{> zpf!%V446C(A((p5fZl|zZZh!ViKBr6xtL-dv?L7n@)4MH+K6j?gp{hm-&BK|QedMC zF~}iv>zM+CP0DNQFe6X#5Jibf2U!=`WmPS2e}+BWt9U`ql8rIsG!6fr_q6O*UG5m5EVomblD*}O=lk-CQH`!_9$ z*_1marU{;Z27`|eujD>SVm3zQsDC}#D1L1>SslT-o)RnyM_c9l0E07^2eOlIVNIHG z@)q4v3q_jF!tLT>fulr`@diP!DhhLE^3`B$G-1Jawm$nD(halIRnCs*H9~K}VH^bk z1~0Z?Tj5;lTfM(>`HX*j)&BZxT!PY41$8%W4!}u~ulBOpqjS5M7vLK*M8l0eY8P5$MmDOtlnLa zFHGJs3>Bh}NB~7NrM4ADy~QVI?>p5^KpKa?9X*I=hx{s>H6~vXAs6!&a5GwlIZ`HzGqRvZJ@J@9ve09x&gO<;ng$nhL;dhswhP6y|hsmab^-%g5KcN4LxtPSzRf zcll0V2Cl{!O z{~eBEzZ-@>&lZ13^`J!L8%%7Bbo`X)atb|}_gMJRDjizNjW=g;!K$IKcsi-QXyQSp zgIOP7+lSmXl+s*~xZCK+3{%8|G@Bn<2Zr!w9K*#9`X zly^ic%*LG&Tyr?ojEWljG2E9^NCx`rO6c0iMAfh;_*f)MW!l?L zz<0>H1A;Jl99%Vyjm$qId9>L_kD60(^KES5WSTI-gNwu&8Yi~t7`hXK{CsRxu-DYr z`1rhxeYC>CQT&+x@reoa(5B&jL|=zN3zKY>r=jJ|HZ25RcTxyOE5MJqOCl~XIf!uf zBwM6gAj$VUUzt-3kqHXd4~+Jstz$YSW%x0)P_{v9vA2olXrH%zbLRd(z*qJaK$0sN zxK<-x8Wj*D4qB`~AD5@PNwcIrbkEPuwXfRtlZleOzeTy^`rkH>$6=J()4t z!m~-bdXKDx+ty^Irq0&jWd{!)T)bz`9`*fwfanv;H{kYyR}c^cKjoFSIpE^rvPjLx zeCQBeuesKH_e$nHq$VrR(fNH$LsJu3BV_6bPV6pR316P-6op!l@M-8pR&&$4y;n4B zeZuSzg+7qa!>RNToUOEvmFZ2ZbRuR=GsT@OBT!dY65Cmo>7A38$C1~U3x7REJI-|N zXRr%umr)EKM&+CHvmS}L{vD3e&d~k)_XjVFFi7G?ZHCFiu5l1974utUJ1_fG)!D`S z@*&wd0H#tf?_s=oK^n&XUgYiz=q#eEsfQyiTpjWWX~b}F;+gX|-MDfhP|Vi+{n8^1 zIANTcVcKIDyd5p5?Dj$jj5DykA}nrNn(SyViQAGxPEL~8ejqC=>)JnkWMGp$ca8$xP)X3)r*&X5%u{sX8%kwxDKZ3+eQnB|Ut^h^jL&dTJ|j zzCZ(Y5YqF@+)!Yuu8`fp$SU+xoE*9M_}tB|o(QytQvVf(E@B->f1)KmRB~^=5sglZ}I)57njSdNh>Cz5e=EH=Vn? zqTt`vry_edJUxGlDg&?a5`K0V)SW8c0Q=%pPU+cc0RN4U=msz2lXK$9^Xmg|z2?Ai zj?jsnu9xhvupw@B4Lpw=9PbCf@XOp|;+FAF`tCI5tG~o-x7F{%8m)FU8^5)yzWU|n zrFUUT;uj@q69Sx@^;41eWlqp=>VEC};%IG1cJ`&{`CI4AZ<7==w{+v_##hVA=~lG( zUa6Kh?jPT^dxw^mR^ra8mBwBgZnNQX8X879Iy$aakwK&JTyb&Ep0~)^=3?Jk?+OB5 zX2?j-$Y;%tF;2mUp0>s(``Cp^L>v}Xe7yF?)z{jp3#v_OSJ^m*-tcx;jwxT4qh1^H zj!Ugbt@+F4CN*&OAw;BP#cmgP$9n3U>AU?a@JHDLDVDrKSg4=2?XoYh^4_ zcJ^{RQ}^ZH0<$-I8M2;gRqsFQl%JcRk=Xi`@$(wCd-v`Gxw*N|Q}aT#4pHN7|$q?s||vRHwMT>u&u|U@-8y{Mek$j{TPS%)PmD)z-YHw>~HvhVHL7(W^SW?l0Ng zs>_eg%3QJYuaQwdA6c^6I@|5B+Qg26dBM#U|E(L9mwv6$AxX43LfJ0({MAO+(HYHo za{~R}#^}z83Fni}HO$J;U2j<`EVF6ivzpE)2V@qQ)ybZhpV}Wu>)9QjL8YbP?IxWO z6uW*Ruo>#PjgU#LR!KI^4f=qlS4FX_iDCmiKm05HSFl6t+SoD$&Pfq@wdOH5&W+ip za(#2Yv1dt$XU^P5pM&zl&d(UK#{Yk(CeH*upptojn)l7x;Or3)KcH^EC~w7EYiVwF?ds~fHvNx3H#c^mXZAo9)n?h& zm}zr0j91o<&(jLc_T3a9YJV-vzbIkzE%W^JJDb`PW1n(4pUHiH;x89%m#Rm;hiVHV zm#d7!^FMt$fM~T;?7|OJmrGo#$?9dBZG(g#ojyI|SVsNyo2s1}ERU#QmlGUH(o-V} zaq`AZf9CGy<_h#7Fl|uM+ZiRgj>%^}T-qEKJ!9%cr`|$zvF>EsL3XiADyDIj-PpCI zO+SoAcNZeepR%8#0N>FErpqyLfkT)pDal0{@d<}z=5h&W`hUMuU4yzj{NKJaYgzag P{->m Date: Wed, 5 Oct 2022 18:26:49 -0500 Subject: [PATCH 127/127] Manually revert relay.go --- x/ccv/provider/keeper/relay.go | 55 +++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 21 deletions(-) diff --git a/x/ccv/provider/keeper/relay.go b/x/ccv/provider/keeper/relay.go index 1f0c9eb607..19938e0284 100644 --- a/x/ccv/provider/keeper/relay.go +++ b/x/ccv/provider/keeper/relay.go @@ -116,35 +116,29 @@ func (k Keeper) OnTimeoutPacket(ctx sdk.Context, packet channeltypes.Packet) err return k.StopConsumerChain(ctx, chainID, k.GetLockUnbondingOnTimeout(ctx, chainID), false) } -// TrySendValidatorUpdates tries to send latest validator updates to every registered consumer chain -func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { +// SendValidatorUpdates sends latest validator updates to every registered consumer chain +func (k Keeper) SendValidatorUpdates(ctx sdk.Context) { // get current ValidatorSetUpdateId valUpdateID := k.GetValidatorSetUpdateId(ctx) // get the validator updates from the staking module valUpdates := k.stakingKeeper.GetValidatorUpdates(ctx) k.IterateConsumerChains(ctx, func(ctx sdk.Context, chainID, clientID string) (stop bool) { - - packets := k.EmptyPendingVSC(ctx, chainID) + // check whether there is an established CCV channel to this consumer chain + if channelID, found := k.GetChainToChannel(ctx, chainID); found { + // Send pending VSC packets to consumer chain + k.SendPendingVSCPackets(ctx, chainID, channelID) + } // check whether there are changes in the validator set; // note that this also entails unbonding operations // w/o changes in the voting power of the validators in the validator set unbondingOps, _ := k.GetUnbondingOpsFromIndex(ctx, chainID, valUpdateID) if len(valUpdates) != 0 || len(unbondingOps) != 0 { + // construct validator set change packet data + packetData := ccv.NewValidatorSetChangePacketData(valUpdates, valUpdateID, k.EmptySlashAcks(ctx, chainID)) - // Map the updates through any key transformations - // updatesToSend := k.keymap.ComputeUpdates(valUpdateID, valUpdates) - updatesToSend := valUpdates - - packets = append( - packets, - ccv.NewValidatorSetChangePacketData(updatesToSend, valUpdateID, k.EmptySlashAcks(ctx, chainID)), - ) - } - - // check whether there is an established CCV channel to this consumer chain - if channelID, found := k.GetChainToChannel(ctx, chainID); found { - for _, data := range packets { + // check whether there is an established CCV channel to this consumer chain + if channelID, found := k.GetChainToChannel(ctx, chainID); found { // send this validator set change packet data to the consumer chain err := utils.SendIBCPacket( ctx, @@ -152,15 +146,15 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { k.channelKeeper, channelID, // source channel id ccv.ProviderPortID, // source port id - data.GetBytes(), + packetData.GetBytes(), ) if err != nil { panic(fmt.Errorf("packet could not be sent over IBC: %w", err)) } + } else { + // store the packet data to be sent once the CCV channel is established + k.AppendPendingVSC(ctx, chainID, packetData) } - } else { - // store the packet data to be sent once the CCV channel is established - k.SetPendingVSCs(ctx, chainID, packets) } return false // do not stop the iteration }) @@ -168,6 +162,25 @@ func (k Keeper) TrySendValidatorUpdates(ctx sdk.Context) { k.IncrementValidatorSetUpdateId(ctx) } +// Sends all pending ValidatorSetChangePackets to the specified chain +func (k Keeper) SendPendingVSCPackets(ctx sdk.Context, chainID, channelID string) { + pendingPackets := k.EmptyPendingVSC(ctx, chainID) + for _, data := range pendingPackets { + // send packet over IBC + err := utils.SendIBCPacket( + ctx, + k.scopedKeeper, + k.channelKeeper, + channelID, // source channel id + ccv.ProviderPortID, // source port id + data.GetBytes(), + ) + if err != nil { + panic(fmt.Errorf("packet could not be sent over IBC: %w", err)) + } + } +} + // OnRecvSlashPacket slashes and jails the given validator in the packet data func (k Keeper) OnRecvSlashPacket(ctx sdk.Context, packet channeltypes.Packet, data ccv.SlashPacketData) exported.Acknowledgement { // check that the channel is established