Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement Expanded to Compact Difficulty Conversion #1196

Merged
merged 15 commits into from
Oct 30, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions zebra-chain/proptest-regressions/work/tests/prop.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Seeds for failure cases proptest has generated in the past. It is
# automatically read and these particular cases re-run before any
# novel cases are generated.
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 8e9b7658e31f20a01083e3b065f8ca0cdc98fedaf3058405e9e9fb59fd90b570 # shrinks to expanded_seed = block::Hash("0000000000000000000000000000000000000000000000000000000000000000")
9 changes: 5 additions & 4 deletions zebra-chain/src/block/arbitrary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -73,26 +73,27 @@ impl Arbitrary for Header {
any::<[u8; 32]>(),
// time is interpreted as u32 in the spec, but rust timestamps are i64
(0i64..(u32::MAX as i64)),
any::<CompactDifficulty>(),
any::<[u8; 32]>(),
any::<equihash::Solution>(),
)
.prop_map(
|(
version,
previous_block_hash,
merkle_root_hash,
merkle_root,
root_bytes,
timestamp,
difficulty_threshold,
nonce,
solution,
)| Header {
version,
previous_block_hash,
merkle_root: merkle_root_hash,
merkle_root,
root_bytes,
time: Utc.timestamp(timestamp, 0),
// TODO: replace with `ExpandedDifficulty.to_compact` when that method is implemented
difficulty_threshold: CompactDifficulty(545259519),
difficulty_threshold,
nonce,
solution,
},
Expand Down
123 changes: 98 additions & 25 deletions zebra-chain/src/work/difficulty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,16 @@

use crate::{block, parameters::Network};

use std::cmp::{Ordering, PartialEq, PartialOrd};
use std::{fmt, ops::Add, ops::AddAssign};
use std::{
cmp::{Ordering, PartialEq, PartialOrd},
convert::TryFrom,
fmt,
};

use primitive_types::U256;

#[cfg(any(test, feature = "proptest-impl"))]
use proptest_derive::Arbitrary;

mod arbitrary;
#[cfg(test)]
mod tests;

Expand Down Expand Up @@ -53,8 +55,7 @@ mod tests;
/// multiple equivalent `CompactDifficulty` values, due to redundancy in the
/// floating-point format.
#[derive(Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "proptest-impl"), derive(Arbitrary))]
pub struct CompactDifficulty(pub u32);
pub struct CompactDifficulty(pub(crate) u32);

impl fmt::Debug for CompactDifficulty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Expand All @@ -65,6 +66,9 @@ impl fmt::Debug for CompactDifficulty {
}
}

/// An invalid CompactDifficulty value, for testing.
pub const INVALID_COMPACT_DIFFICULTY: CompactDifficulty = CompactDifficulty(u32::MAX);

/// A 256-bit unsigned "expanded difficulty" value.
///
/// Used as a target threshold for the difficulty of a `block::Hash`.
Expand All @@ -74,13 +78,19 @@ impl fmt::Debug for CompactDifficulty {
/// The precise bit pattern of an `ExpandedDifficulty` value is
/// consensus-critical, because it is compared with the `block::Hash`.
///
/// Note that each `CompactDifficulty` value represents a range of
/// `ExpandedDifficulty` values, because the precision of the
/// floating-point format requires rounding on conversion.
/// Note that each `CompactDifficulty` value can be converted from a
/// range of `ExpandedDifficulty` values, because the precision of
/// the floating-point format requires rounding on conversion.
///
/// Therefore, consensus-critical code must perform the specified
/// conversions to `CompactDifficulty`, even if the original
/// `ExpandedDifficulty` values are known.
///
/// Callers should avoid constructing `ExpandedDifficulty` zero
/// values, because they are rejected by the consensus rules,
/// and cause some conversion functions to panic.
//
// TODO: Use NonZeroU256, when available
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd)]
pub struct ExpandedDifficulty(U256);

Expand Down Expand Up @@ -236,7 +246,7 @@ impl CompactDifficulty {
// `((2^256 - expanded - 1) / (expanded + 1)) + 1`, or
let result = (!expanded.0 / (expanded.0 + 1)) + 1;
if result <= u128::MAX.into() {
Work(result.as_u128()).into()
Some(Work(result.as_u128()))
} else {
None
}
Expand All @@ -255,7 +265,7 @@ impl ExpandedDifficulty {
///
/// Hashes are not used to calculate the difficulties of future blocks, so
/// users of this module should avoid converting hashes into difficulties.
fn from_hash(hash: &block::Hash) -> ExpandedDifficulty {
pub(super) fn from_hash(hash: &block::Hash) -> ExpandedDifficulty {
U256::from_little_endian(&hash.0).into()
}

Expand All @@ -272,6 +282,73 @@ impl ExpandedDifficulty {

limit.into()
}

/// Calculate the CompactDifficulty for an expanded difficulty.
///
/// See `ToCompact()` in the Zcash Specification, and `GetCompact()`
/// in zcashd.
///
/// Panics:
///
/// If `self` is zero.
///
/// `ExpandedDifficulty` values are generated in two ways:
/// * conversion from `CompactDifficulty` values, which rejects zeroes, and
/// * difficulty adjustment calculations, which impose a non-zero minimum
/// `target_difficulty_limit`.
///
/// Neither of these methods yield zero values.
pub fn to_compact(&self) -> CompactDifficulty {
// The zcashd implementation supports negative and zero compact values.
// These values are rejected by the protocol rules. Zebra is designed so
// that invalid states are not representable. Therefore, this function
// does not produce negative compact values, and panics on zero compact
// values. (The negative compact value code in zcashd is unused.)
assert!(self.0 > 0.into(), "Zero difficulty values are invalid");

// The constants for this floating-point representation.
// Alias the constants here, so the code is easier to read.
const UNSIGNED_MANTISSA_MASK: u32 = CompactDifficulty::UNSIGNED_MANTISSA_MASK;
const OFFSET: i32 = CompactDifficulty::OFFSET;

// Calculate the final size, accounting for the sign bit.
// This is the size *after* applying the sign bit adjustment in `ToCompact()`.
let size = self.0.bits() / 8 + 1;

// Make sure the mantissa is non-negative, by shifting down values that
// would otherwise overflow into the sign bit
let mantissa = if self.0 <= UNSIGNED_MANTISSA_MASK.into() {
// Value is small, shift up if needed
self.0 << (8 * (3 - size))
} else {
// Value is large, shift down
self.0 >> (8 * (size - 3))
};

// This assertion also makes sure that size fits in its 8 bit compact field
assert!(
size < (31 + OFFSET) as _,
format!(
"256^size (256^{}) must fit in a u256, after the sign bit adjustment and offset",
size
)
);
let size = u32::try_from(size).expect("a 0-6 bit value fits in a u32");

assert!(
mantissa <= UNSIGNED_MANTISSA_MASK.into(),
format!("mantissa {:x?} must fit in its compact field", mantissa)
);
let mantissa = u32::try_from(mantissa).expect("a 0-23 bit value fits in a u32");

if mantissa > 0 {
CompactDifficulty(mantissa + (size << 24))
} else {
// This check catches invalid mantissas. Overflows and underflows
// should also be unreachable, but they aren't caught here.
unreachable!("converted CompactDifficulty values must be valid")
}
}
}

impl From<U256> for ExpandedDifficulty {
Expand Down Expand Up @@ -328,28 +405,24 @@ impl PartialOrd<ExpandedDifficulty> for block::Hash {
}
}

impl Add for Work {
type Output = Self;

fn add(self, rhs: Work) -> Self {
let result = self
.0
.checked_add(rhs.0)
.expect("Work values do not overflow");
Work(result)
}
}
impl std::ops::Add for Work {
type Output = PartialCumulativeWork;

impl AddAssign for Work {
fn add_assign(&mut self, rhs: Work) {
*self = *self + rhs;
fn add(self, rhs: Work) -> PartialCumulativeWork {
PartialCumulativeWork::from(self) + rhs
}
}

#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
/// Partial work used to track relative work in non-finalized chains
pub struct PartialCumulativeWork(u128);

impl From<Work> for PartialCumulativeWork {
fn from(work: Work) -> Self {
PartialCumulativeWork(work.0)
}
}

impl std::ops::Add<Work> for PartialCumulativeWork {
type Output = PartialCumulativeWork;

Expand Down
83 changes: 83 additions & 0 deletions zebra-chain/src/work/difficulty/arbitrary.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
use super::*;

use crate::block;

use proptest::{arbitrary::Arbitrary, collection::vec, prelude::*};

impl Arbitrary for CompactDifficulty {
type Parameters = ();

fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
(vec(any::<u8>(), 32))
.prop_filter_map("zero CompactDifficulty values are invalid", |v| {
let mut bytes = [0; 32];
bytes.copy_from_slice(v.as_slice());
if bytes == [0; 32] {
return None;
}
// In the Zcash protocol, a CompactDifficulty is generated using the difficulty
// adjustment functions. Instead of using those functions, we make a random
// ExpandedDifficulty, then convert it to a CompactDifficulty.
ExpandedDifficulty::from_hash(&block::Hash(bytes))
.to_compact()
.into()
})
.boxed()
}

type Strategy = BoxedStrategy<Self>;
}

impl Arbitrary for ExpandedDifficulty {
type Parameters = ();

fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {
any::<CompactDifficulty>()
.prop_map(|d| {
// In the Zcash protocol, an ExpandedDifficulty is converted from a CompactDifficulty,
// or generated using the difficulty adjustment functions. We use the conversion in
// our proptest strategy.
d.to_expanded()
.expect("arbitrary CompactDifficulty is valid")
})
.boxed()
}

type Strategy = BoxedStrategy<Self>;
}

impl Arbitrary for Work {
type Parameters = ();

fn arbitrary_with(_args: ()) -> Self::Strategy {
// In the Zcash protocol, a Work is converted from an ExpandedDifficulty.
// But some randomised difficulties are impractically large, and will
// never appear in any real-world block. So we just use a random Work value.
(any::<u128>())
.prop_filter_map("zero Work values are invalid", |w| {
if w == 0 {
None
} else {
Work(w).into()
}
})
.boxed()
}

type Strategy = BoxedStrategy<Self>;
}

impl Arbitrary for PartialCumulativeWork {
type Parameters = ();

fn arbitrary_with(_args: ()) -> Self::Strategy {
// In Zebra's implementation, a PartialCumulativeWork is the sum of 0..100 Work values.
// But our Work values are randomised, rather than being derived from real-world
// difficulties. So we don't need to sum multiple Work values here.
(any::<Work>())
.prop_map(PartialCumulativeWork::from)
.boxed()
}

type Strategy = BoxedStrategy<Self>;
}
Loading