Skip to content

Commit

Permalink
pytest: fix flake in test_node_reannounce
Browse files Browse the repository at this point in the history
Again, our new behaviour of sending our own gossip even before
they ask can confuse our gossip query tests.

In this case, simply eliminate duplicates.

Signed-off-by: Rusty Russell <[email protected]>
  • Loading branch information
rustyrussell committed Jun 27, 2022
1 parent a677ea4 commit 2569999
Showing 1 changed file with 7 additions and 0 deletions.
7 changes: 7 additions & 0 deletions tests/test_gossip.py
Original file line number Diff line number Diff line change
Expand Up @@ -1263,6 +1263,8 @@ def test_node_reannounce(node_factory, bitcoind, chainparams):
# And pings.
filters=['0109', '0107', '0102', '0100', '0012'])

# May send its own announcement *twice*, since it always spams us.
msgs = list(set(msgs))
assert len(msgs) == 2
assert (bytes("SENIORBEAM", encoding="utf8").hex() in msgs[0]
or bytes("SENIORBEAM", encoding="utf8").hex() in msgs[1])
Expand All @@ -1277,6 +1279,9 @@ def test_node_reannounce(node_factory, bitcoind, chainparams):
# channel_announcement and channel_updates.
# And pings.
filters=['0109', '0107', '0102', '0100', '0012'])

# May send its own announcement *twice*, since it always spams us.
msgs2 = list(set(msgs2))
assert msgs == msgs2
# Won't have queued up another one, either.
assert not l1.daemon.is_in_log('node_announcement: delaying')
Expand All @@ -1295,13 +1300,15 @@ def test_node_reannounce(node_factory, bitcoind, chainparams):
assert ad['channel_fee_max_base_msat'] == Millisatoshi('2000msat')
assert ad['channel_fee_max_proportional_thousandths'] == 22

# May send its own announcement *twice*, since it always spams us.
msgs2 = l1.query_gossip('gossip_timestamp_filter',
genesis_blockhash,
'0', '0xFFFFFFFF',
# Filter out gossip_timestamp_filter,
# channel_announcement and channel_updates.
# And pings.
filters=['0109', '0107', '0102', '0100', '0012'])
msgs2 = list(set(msgs2))
assert msgs != msgs2


Expand Down

0 comments on commit 2569999

Please sign in to comment.