Skip to content

Commit

Permalink
Drop ahash dependency in favor of core's SipHasher
Browse files Browse the repository at this point in the history
tkaitchuck/aHash#196 bumped the MSRV of
`ahash` in a patch release, which makes it rather difficult for us
to have it as a dependency.

Further, it seems that `ahash` hasn't been particularly robust in
the past, notably
tkaitchuck/aHash#163 and
tkaitchuck/aHash#166.

Luckily, `core` provides `SipHasher` even on no-std (sadly its
SipHash-2-4 unlike the SipHash-1-3 used by the `DefaultHasher` in
`std`). Thus, we drop the `ahash` dependency entirely here and
simply wrap `SipHasher` for our `no-std` HashMaps.
  • Loading branch information
TheBlueMatt committed Feb 13, 2024
1 parent e4321bb commit 9fdfba5
Show file tree
Hide file tree
Showing 12 changed files with 193 additions and 159 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ jobs:
run: |
cd lightning
RUSTFLAGS="--cfg=require_route_graph_test" cargo test
RUSTFLAGS="--cfg=require_route_graph_test" cargo test --features hashbrown,ahash
RUSTFLAGS="--cfg=require_route_graph_test" cargo test --features hashbrown
cd ..
- name: Run benchmarks on Rust ${{ matrix.toolchain }}
run: |
Expand Down
2 changes: 1 addition & 1 deletion bench/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ name = "bench"
harness = false

[features]
hashbrown = ["lightning/hashbrown", "lightning/ahash"]
hashbrown = ["lightning/hashbrown"]

[dependencies]
lightning = { path = "../lightning", features = ["_test_utils", "criterion"] }
Expand Down
2 changes: 1 addition & 1 deletion ci/check-cfg-flags.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def check_feature(feature):
pass
elif feature == "no-std":
pass
elif feature == "ahash":
elif feature == "possiblyrandom":
pass
elif feature == "getrandom":
pass
Expand Down
1 change: 0 additions & 1 deletion fuzz/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ lightning = { path = "../lightning", features = ["regex", "hashbrown", "_test_ut
lightning-rapid-gossip-sync = { path = "../lightning-rapid-gossip-sync" }
bitcoin = { version = "0.30.2", features = ["secp-lowmemory"] }
hex = { package = "hex-conservative", version = "0.1.1", default-features = false }
hashbrown = "0.13"

afl = { version = "0.12", optional = true }
honggfuzz = { version = "0.5", optional = true, default-features = false }
Expand Down
23 changes: 10 additions & 13 deletions fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ use lightning::offers::invoice_request::UnsignedInvoiceRequest;
use lightning::onion_message::messenger::{Destination, MessageRouter, OnionMessagePath};
use lightning::util::test_channel_signer::{TestChannelSigner, EnforcementState};
use lightning::util::errors::APIError;
use lightning::util::hash_tables::*;
use lightning::util::logger::Logger;
use lightning::util::config::UserConfig;
use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer};
Expand All @@ -66,7 +67,6 @@ use bitcoin::secp256k1::schnorr;

use std::mem;
use std::cmp::{self, Ordering};
use hashbrown::{HashSet, hash_map, HashMap};
use std::sync::{Arc,Mutex};
use std::sync::atomic;
use std::io::Cursor;
Expand Down Expand Up @@ -157,7 +157,7 @@ impl TestChainMonitor {
logger,
keys,
persister,
latest_monitors: Mutex::new(HashMap::new()),
latest_monitors: Mutex::new(new_hash_map()),
}
}
}
Expand All @@ -173,16 +173,13 @@ impl chain::Watch<TestChannelSigner> for TestChainMonitor {

fn update_channel(&self, funding_txo: OutPoint, update: &channelmonitor::ChannelMonitorUpdate) -> chain::ChannelMonitorUpdateStatus {
let mut map_lock = self.latest_monitors.lock().unwrap();
let mut map_entry = match map_lock.entry(funding_txo) {
hash_map::Entry::Occupied(entry) => entry,
hash_map::Entry::Vacant(_) => panic!("Didn't have monitor on update call"),
};
let map_entry = map_lock.get_mut(&funding_txo).expect("Didn't have monitor on update call");
let deserialized_monitor = <(BlockHash, channelmonitor::ChannelMonitor<TestChannelSigner>)>::
read(&mut Cursor::new(&map_entry.get().1), (&*self.keys, &*self.keys)).unwrap().1;
read(&mut Cursor::new(&map_entry.1), (&*self.keys, &*self.keys)).unwrap().1;
deserialized_monitor.update_monitor(update, &&TestBroadcaster{}, &&FuzzEstimator { ret_val: atomic::AtomicU32::new(253) }, &self.logger).unwrap();
let mut ser = VecWriter(Vec::new());
deserialized_monitor.write(&mut ser).unwrap();
map_entry.insert((update.update_id, ser.0));
*map_entry = (update.update_id, ser.0);
self.chain_monitor.update_channel(funding_txo, update)
}

Expand Down Expand Up @@ -467,7 +464,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
($node_id: expr, $fee_estimator: expr) => { {
let logger: Arc<dyn Logger> = Arc::new(test_logger::TestLogger::new($node_id.to_string(), out.clone()));
let node_secret = SecretKey::from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, $node_id]).unwrap();
let keys_manager = Arc::new(KeyProvider { node_secret, rand_bytes_id: atomic::AtomicU32::new(0), enforcement_states: Mutex::new(HashMap::new()) });
let keys_manager = Arc::new(KeyProvider { node_secret, rand_bytes_id: atomic::AtomicU32::new(0), enforcement_states: Mutex::new(new_hash_map()) });
let monitor = Arc::new(TestChainMonitor::new(broadcast.clone(), logger.clone(), $fee_estimator.clone(),
Arc::new(TestPersister {
update_ret: Mutex::new(ChannelMonitorUpdateStatus::Completed)
Expand Down Expand Up @@ -508,13 +505,13 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
config.manually_accept_inbound_channels = true;
}

let mut monitors = HashMap::new();
let mut monitors = new_hash_map();
let mut old_monitors = $old_monitors.latest_monitors.lock().unwrap();
for (outpoint, (update_id, monitor_ser)) in old_monitors.drain() {
monitors.insert(outpoint, <(BlockHash, ChannelMonitor<TestChannelSigner>)>::read(&mut Cursor::new(&monitor_ser), (&*$keys_manager, &*$keys_manager)).expect("Failed to read monitor").1);
chain_monitor.latest_monitors.lock().unwrap().insert(outpoint, (update_id, monitor_ser));
}
let mut monitor_refs = HashMap::new();
let mut monitor_refs = new_hash_map();
for (outpoint, monitor) in monitors.iter_mut() {
monitor_refs.insert(*outpoint, monitor);
}
Expand Down Expand Up @@ -981,7 +978,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
// In case we get 256 payments we may have a hash collision, resulting in the
// second claim/fail call not finding the duplicate-hash HTLC, so we have to
// deduplicate the calls here.
let mut claim_set = HashSet::new();
let mut claim_set = new_hash_map();
let mut events = nodes[$node].get_and_clear_pending_events();
// Sort events so that PendingHTLCsForwardable get processed last. This avoids a
// case where we first process a PendingHTLCsForwardable, then claim/fail on a
Expand All @@ -1003,7 +1000,7 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
for event in events.drain(..) {
match event {
events::Event::PaymentClaimable { payment_hash, .. } => {
if claim_set.insert(payment_hash.0) {
if claim_set.insert(payment_hash.0, ()).is_none() {
if $fail {
nodes[$node].fail_htlc_backwards(&payment_hash);
} else {
Expand Down
19 changes: 8 additions & 11 deletions fuzz/src/full_stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ use lightning::routing::gossip::{P2PGossipSync, NetworkGraph};
use lightning::routing::utxo::UtxoLookup;
use lightning::routing::router::{InFlightHtlcs, PaymentParameters, Route, RouteParameters, Router};
use lightning::util::config::{ChannelConfig, UserConfig};
use lightning::util::hash_tables::*;
use lightning::util::errors::APIError;
use lightning::util::test_channel_signer::{TestChannelSigner, EnforcementState};
use lightning::util::logger::Logger;
Expand All @@ -63,7 +64,6 @@ use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature};
use bitcoin::secp256k1::schnorr;

use std::cell::RefCell;
use hashbrown::{HashMap, hash_map};
use std::convert::TryInto;
use std::cmp;
use std::sync::{Arc, Mutex};
Expand Down Expand Up @@ -229,7 +229,7 @@ impl<'a> MoneyLossDetector<'a> {

peers,
funding_txn: Vec::new(),
txids_confirmed: HashMap::new(),
txids_confirmed: new_hash_map(),
header_hashes: vec![(genesis_block(Network::Bitcoin).block_hash(), 0)],
height: 0,
max_height: 0,
Expand All @@ -241,13 +241,10 @@ impl<'a> MoneyLossDetector<'a> {
let mut txdata = Vec::with_capacity(all_txn.len());
for (idx, tx) in all_txn.iter().enumerate() {
let txid = tx.txid();
match self.txids_confirmed.entry(txid) {
hash_map::Entry::Vacant(e) => {
e.insert(self.height);
txdata.push((idx + 1, tx));
},
_ => {},
}
self.txids_confirmed.entry(txid).or_insert_with(|| {
txdata.push((idx + 1, tx));
self.height
});
}

self.blocks_connected += 1;
Expand Down Expand Up @@ -489,7 +486,7 @@ pub fn do_test(mut data: &[u8], logger: &Arc<dyn Logger>) {
node_secret: our_network_key.clone(),
inbound_payment_key: KeyMaterial(inbound_payment_key.try_into().unwrap()),
counter: AtomicU64::new(0),
signer_state: RefCell::new(HashMap::new())
signer_state: RefCell::new(new_hash_map())
});
let network = Network::Bitcoin;
let best_block_timestamp = genesis_block(network).header.time;
Expand Down Expand Up @@ -518,7 +515,7 @@ pub fn do_test(mut data: &[u8], logger: &Arc<dyn Logger>) {
let mut intercepted_htlcs: Vec<InterceptId> = Vec::new();
let mut payments_sent: u16 = 0;
let mut pending_funding_generation: Vec<(ChannelId, PublicKey, u64, ScriptBuf)> = Vec::new();
let mut pending_funding_signatures = HashMap::new();
let mut pending_funding_signatures = new_hash_map();

loop {
match get_slice!(1)[0] {
Expand Down
10 changes: 5 additions & 5 deletions fuzz/src/indexedmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

use lightning::util::indexed_map::{IndexedMap, self};
use std::collections::{BTreeMap, btree_map};
use hashbrown::HashSet;
use lightning::util::hash_tables::*;

use crate::utils::test_logger;

Expand Down Expand Up @@ -80,23 +80,23 @@ fn check_eq(btree: &BTreeMap<u8, u8>, mut indexed: IndexedMap<u8, u8>) {
}
}

let mut key_set = HashSet::with_capacity(256);
let mut key_set = hash_map_with_capacity(1024);
for k in indexed.unordered_keys() {
assert!(key_set.insert(*k));
assert!(key_set.insert(*k, ()).is_none());
assert!(btree.contains_key(k));
}
assert_eq!(key_set.len(), btree.len());

key_set.clear();
for (k, v) in indexed.unordered_iter() {
assert!(key_set.insert(*k));
assert!(key_set.insert(*k, ()).is_none());
assert_eq!(btree.get(k).unwrap(), v);
}
assert_eq!(key_set.len(), btree.len());

key_set.clear();
for (k, v) in indexed_clone.unordered_iter_mut() {
assert!(key_set.insert(*k));
assert!(key_set.insert(*k, ()).is_none());
assert_eq!(btree.get(k).unwrap(), v);
}
assert_eq!(key_set.len(), btree.len());
Expand Down
24 changes: 13 additions & 11 deletions fuzz/src/router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ use lightning::routing::utxo::{UtxoFuture, UtxoLookup, UtxoLookupError, UtxoResu
use lightning::routing::router::{find_route, PaymentParameters, RouteHint, RouteHintHop, RouteParameters};
use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters, ProbabilisticScoringDecayParameters};
use lightning::util::config::UserConfig;
use lightning::util::hash_tables::*;
use lightning::util::ser::Readable;

use bitcoin::hashes::Hash;
Expand All @@ -32,7 +33,6 @@ use bitcoin::network::constants::Network;
use crate::utils::test_logger;

use std::convert::TryInto;
use hashbrown::HashSet;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};

Expand Down Expand Up @@ -198,7 +198,7 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
net_graph: &net_graph,
};

let mut node_pks = HashSet::new();
let mut node_pks = new_hash_map();
let mut scid = 42;

macro_rules! first_hops {
Expand All @@ -208,7 +208,8 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
count => {
for _ in 0..count {
scid += 1;
let rnid = node_pks.iter().skip(u16::from_be_bytes(get_slice!(2).try_into().unwrap()) as usize % node_pks.len()).next().unwrap();
let (rnid, _) =
node_pks.iter().skip(u16::from_be_bytes(get_slice!(2).try_into().unwrap()) as usize % node_pks.len()).next().unwrap();
let capacity = u64::from_be_bytes(get_slice!(8).try_into().unwrap());
$first_hops_vec.push(ChannelDetails {
channel_id: ChannelId::new_zero(),
Expand Down Expand Up @@ -257,7 +258,8 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
let count = get_slice!(1)[0];
for _ in 0..count {
scid += 1;
let rnid = node_pks.iter().skip(slice_to_be16(get_slice!(2))as usize % node_pks.len()).next().unwrap();
let (rnid, _) =
node_pks.iter().skip(slice_to_be16(get_slice!(2)) as usize % node_pks.len()).next().unwrap();
$last_hops.push(RouteHint(vec![RouteHintHop {
src_node_id: *rnid,
short_channel_id: scid,
Expand All @@ -277,7 +279,7 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
($first_hops: expr, $node_pks: expr, $route_params: expr) => {
let scorer = ProbabilisticScorer::new(ProbabilisticScoringDecayParameters::default(), &net_graph, &logger);
let random_seed_bytes: [u8; 32] = [get_slice!(1)[0]; 32];
for target in $node_pks {
for (target, ()) in $node_pks {
let final_value_msat = slice_to_be64(get_slice!(8));
let final_cltv_expiry_delta = slice_to_be32(get_slice!(4));
let route_params = $route_params(final_value_msat, final_cltv_expiry_delta, target);
Expand All @@ -297,20 +299,20 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
return;
}
let msg = decode_msg_with_len16!(msgs::UnsignedNodeAnnouncement, 288);
node_pks.insert(get_pubkey_from_node_id!(msg.node_id));
node_pks.insert(get_pubkey_from_node_id!(msg.node_id), ());
let _ = net_graph.update_node_from_unsigned_announcement(&msg);
},
1 => {
let msg = decode_msg_with_len16!(msgs::UnsignedChannelAnnouncement, 32+8+33*4);
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_1));
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_2));
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_1), ());
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_2), ());
let _ = net_graph.update_channel_from_unsigned_announcement::
<&FuzzChainSource<'_, '_, Out>>(&msg, &None);
},
2 => {
let msg = decode_msg_with_len16!(msgs::UnsignedChannelAnnouncement, 32+8+33*4);
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_1));
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_2));
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_1), ());
node_pks.insert(get_pubkey_from_node_id!(msg.node_id_2), ());
let _ = net_graph.update_channel_from_unsigned_announcement(&msg, &Some(&chain_source));
},
3 => {
Expand Down Expand Up @@ -367,7 +369,7 @@ pub fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
}).collect();
let mut features = Bolt12InvoiceFeatures::empty();
features.set_basic_mpp_optional();
find_routes!(first_hops, vec![dummy_pk].iter(), |final_amt, _, _| {
find_routes!(first_hops, [(dummy_pk, ())].iter(), |final_amt, _, _| {
RouteParameters::from_payment_params_and_value(PaymentParameters::blinded(last_hops.clone())
.with_bolt12_features(features.clone()).unwrap(),
final_amt)
Expand Down
16 changes: 3 additions & 13 deletions lightning/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ unsafe_revoked_tx_signing = []
# Override signing to not include randomness when generating signatures for test vectors.
_test_vectors = []

no-std = ["hashbrown", "ahash", "bitcoin/no-std", "core2/alloc", "libm"]
no-std = ["hashbrown", "possiblyrandom", "bitcoin/no-std", "core2/alloc", "libm"]
std = ["bitcoin/std"]

# Generates low-r bitcoin signatures, which saves 1 byte in 50% of the cases
Expand All @@ -42,25 +42,15 @@ default = ["std", "grind_signatures"]
[dependencies]
bitcoin = { version = "0.30.2", default-features = false, features = ["secp-recovery"] }

hashbrown = { version = "0.13", optional = true }
ahash = { version = "0.8", optional = true, default-features = false }
hashbrown = { version = "0.13", optional = true, default-features = false }
possiblyrandom = { version = "0.1", optional = true, default-features = false }
hex = { package = "hex-conservative", version = "0.1.1", default-features = false }
regex = { version = "1.5.6", optional = true }
backtrace = { version = "0.3", optional = true }

core2 = { version = "0.3.0", optional = true, default-features = false }
libm = { version = "0.2", optional = true, default-features = false }

# Because ahash no longer (kinda poorly) does it for us, (roughly) list out the targets that
# getrandom supports and turn on ahash's `runtime-rng` feature for them.
[target.'cfg(not(any(target_os = "unknown", target_os = "none")))'.dependencies]
ahash = { version = "0.8", optional = true, default-features = false, features = ["runtime-rng"] }

# Not sure what target_os gets set to for sgx, so to be safe always enable runtime-rng for x86_64
# platforms (assuming LDK isn't being used on embedded x86-64 running directly on metal).
[target.'cfg(target_arch = "x86_64")'.dependencies]
ahash = { version = "0.8", optional = true, default-features = false, features = ["runtime-rng"] }

[dev-dependencies]
regex = "1.5.6"

Expand Down
Loading

0 comments on commit 9fdfba5

Please sign in to comment.