Skip to content

Commit 7cfafc9

Browse files
Add test coverage ChannelClosed event fields
1 parent bd6323e commit 7cfafc9

13 files changed

+242
-204
lines changed

lightning-persister/src/lib.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -237,7 +237,7 @@ mod tests {
237237
// Force close because cooperative close doesn't result in any persisted
238238
// updates.
239239
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
240-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
240+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
241241
check_closed_broadcast!(nodes[0], true);
242242
check_added_monitors!(nodes[0], 1);
243243

@@ -246,7 +246,7 @@ mod tests {
246246

247247
connect_block(&nodes[1], &create_dummy_block(nodes[0].best_block_hash(), 42, vec![node_txn[0].clone(), node_txn[0].clone()]));
248248
check_closed_broadcast!(nodes[1], true);
249-
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
249+
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed, [nodes[0].node.get_our_node_id()], 100000);
250250
check_added_monitors!(nodes[1], 1);
251251

252252
// Make sure everything is persisted as expected after close.
@@ -270,7 +270,7 @@ mod tests {
270270
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
271271
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
272272
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
273-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
273+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
274274
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
275275
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
276276
let update_id = update_map.get(&added_monitors[0].0.to_channel_id()).unwrap();
@@ -309,7 +309,7 @@ mod tests {
309309
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
310310
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
311311
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
312-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
312+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
313313
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
314314
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
315315
let update_id = update_map.get(&added_monitors[0].0.to_channel_id()).unwrap();

lightning/src/chain/chainmonitor.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -966,7 +966,8 @@ mod tests {
966966
assert!(err.contains("ChannelMonitor storage failure")));
967967
check_added_monitors!(nodes[0], 2); // After the failure we generate a close-channel monitor update
968968
check_closed_broadcast!(nodes[0], true);
969-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
969+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
970+
[nodes[1].node.get_our_node_id()], 100000);
970971

971972
// However, as the ChainMonitor is still waiting for the original persistence to complete,
972973
// it won't yet release the MonitorEvents.
@@ -1013,7 +1014,8 @@ mod tests {
10131014
// ... however once we get events once, the channel will close, creating a channel-closed
10141015
// ChannelMonitorUpdate.
10151016
check_closed_broadcast!(nodes[0], true);
1016-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() });
1017+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "Failed to persist ChannelMonitor update during chain sync".to_string() },
1018+
[nodes[1].node.get_our_node_id()], 100000);
10171019
check_added_monitors!(nodes[0], 1);
10181020
}
10191021
}

lightning/src/chain/channelmonitor.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -4276,7 +4276,8 @@ mod tests {
42764276
assert!(err.contains("ChannelMonitor storage failure")));
42774277
check_added_monitors!(nodes[1], 2); // After the failure we generate a close-channel monitor update
42784278
check_closed_broadcast!(nodes[1], true);
4279-
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
4279+
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
4280+
[nodes[0].node.get_our_node_id()], 100000);
42804281

42814282
// Build a new ChannelMonitorUpdate which contains both the failing commitment tx update
42824283
// and provides the claim preimages for the two pending HTLCs. The first update generates

lightning/src/ln/chanmon_update_fail_tests.rs

+14-11
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,8 @@ fn test_simple_monitor_permanent_update_fail() {
7070
// PaymentPathFailed event
7171

7272
assert_eq!(nodes[0].node.list_channels().len(), 0);
73-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
73+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
74+
[nodes[1].node.get_our_node_id()], 100000);
7475
}
7576

7677
#[test]
@@ -247,7 +248,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
247248
// PaymentPathFailed event
248249

249250
assert_eq!(nodes[0].node.list_channels().len(), 0);
250-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
251+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
251252
}
252253

253254
#[test]
@@ -1987,8 +1988,8 @@ fn do_during_funding_monitor_fail(confirm_a_first: bool, restore_b_before_conf:
19871988

19881989
send_payment(&nodes[0], &[&nodes[1]], 8000000);
19891990
close_channel(&nodes[0], &nodes[1], &channel_id, funding_tx, true);
1990-
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure);
1991-
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure);
1991+
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure, [nodes[1].node.get_our_node_id()], 100000);
1992+
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure, [nodes[0].node.get_our_node_id()], 100000);
19921993
}
19931994

19941995
#[test]
@@ -2188,7 +2189,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
21882189
expect_payment_forwarded!(nodes[1], nodes[0], nodes[2], Some(1000), false, false);
21892190

21902191
mine_transaction(&nodes[1], &bs_txn[0]);
2191-
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed);
2192+
check_closed_event!(nodes[1], 1, ClosureReason::CommitmentTxConfirmed, [nodes[2].node.get_our_node_id()], 100000);
21922193
check_closed_broadcast!(nodes[1], true);
21932194
connect_blocks(&nodes[1], ANTI_REORG_DELAY - 1);
21942195
check_added_monitors!(nodes[1], 1);
@@ -2666,8 +2667,8 @@ fn test_temporary_error_during_shutdown() {
26662667
assert_eq!(txn_a, txn_b);
26672668
assert_eq!(txn_a.len(), 1);
26682669
check_spends!(txn_a[0], funding_tx);
2669-
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure);
2670-
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure);
2670+
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure, [nodes[0].node.get_our_node_id()], 100000);
2671+
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure, [nodes[1].node.get_our_node_id()], 100000);
26712672
}
26722673

26732674
#[test]
@@ -2696,7 +2697,8 @@ fn test_permanent_error_during_sending_shutdown() {
26962697
if let MessageSendEvent::HandleError { .. } = msg_events[2] {} else { panic!(); }
26972698

26982699
check_added_monitors!(nodes[0], 2);
2699-
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
2700+
check_closed_event!(nodes[0], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
2701+
[nodes[1].node.get_our_node_id()], 100000);
27002702
}
27012703

27022704
#[test]
@@ -2727,7 +2729,8 @@ fn test_permanent_error_during_handling_shutdown() {
27272729
if let MessageSendEvent::HandleError { .. } = msg_events[2] {} else { panic!(); }
27282730

27292731
check_added_monitors!(nodes[1], 2);
2730-
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() });
2732+
check_closed_event!(nodes[1], 1, ClosureReason::ProcessingError { err: "ChannelMonitor storage failure".to_string() },
2733+
[nodes[0].node.get_our_node_id()], 100000);
27312734
}
27322735

27332736
#[test]
@@ -2921,7 +2924,7 @@ fn do_test_outbound_reload_without_init_mon(use_0conf: bool) {
29212924
nodes[0].chain_source.watched_outputs.lock().unwrap().clear();
29222925

29232926
reload_node!(nodes[0], &nodes[0].node.encode(), &[], persister, new_chain_monitor, nodes_0_deserialized);
2924-
check_closed_event!(nodes[0], 1, ClosureReason::DisconnectedPeer);
2927+
check_closed_event!(nodes[0], 1, ClosureReason::DisconnectedPeer, [nodes[1].node.get_our_node_id()], 100000);
29252928
assert!(nodes[0].node.list_channels().is_empty());
29262929
}
29272930

@@ -3008,7 +3011,7 @@ fn do_test_inbound_reload_without_init_mon(use_0conf: bool, lock_commitment: boo
30083011

30093012
reload_node!(nodes[1], &nodes[1].node.encode(), &[], persister, new_chain_monitor, nodes_1_deserialized);
30103013

3011-
check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer);
3014+
check_closed_event!(nodes[1], 1, ClosureReason::DisconnectedPeer, [nodes[0].node.get_our_node_id()], 100000);
30123015
assert!(nodes[1].node.list_channels().is_empty());
30133016
}
30143017

lightning/src/ln/channelmanager.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -9636,7 +9636,7 @@ mod tests {
96369636
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
96379637
check_closed_broadcast!(nodes[0], true);
96389638
check_added_monitors!(nodes[0], 1);
9639-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed);
9639+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
96409640

96419641
{
96429642
// Assert that nodes[1] is awaiting removal for nodes[0] once nodes[1] has been
@@ -9799,8 +9799,8 @@ mod tests {
97999799
}
98009800
let (_nodes_1_update, _none) = get_closing_signed_broadcast!(nodes[1].node, nodes[0].node.get_our_node_id());
98019801

9802-
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure);
9803-
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure);
9802+
check_closed_event!(nodes[0], 1, ClosureReason::CooperativeClosure, [nodes[1].node.get_our_node_id()], 1000000);
9803+
check_closed_event!(nodes[1], 1, ClosureReason::CooperativeClosure, [nodes[0].node.get_our_node_id()], 1000000);
98049804
}
98059805

98069806
fn check_not_connected_to_peer_error<T>(res_err: Result<T, APIError>, expected_public_key: PublicKey) {
@@ -10195,7 +10195,7 @@ mod tests {
1019510195
let open_channel_msg = get_event_msg!(nodes[0], MessageSendEvent::SendOpenChannel, nodes[1].node.get_our_node_id());
1019610196
assert!(!open_channel_msg.channel_type.unwrap().supports_anchors_zero_fee_htlc_tx());
1019710197

10198-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed);
10198+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
1019910199
}
1020010200

1020110201
#[test]

lightning/src/ln/functional_test_utils.rs

+12-7
Original file line numberDiff line numberDiff line change
@@ -1418,14 +1418,18 @@ macro_rules! check_closed_broadcast {
14181418
}
14191419

14201420
/// Check that a channel's closing channel events has been issued
1421-
pub fn check_closed_event(node: &Node, events_count: usize, expected_reason: ClosureReason, is_check_discard_funding: bool) {
1421+
pub fn check_closed_event(node: &Node, events_count: usize, expected_reason: ClosureReason, is_check_discard_funding: bool,
1422+
expected_counterparty_node_ids: &[PublicKey], expected_channel_capacity: u64) {
14221423
let events = node.node.get_and_clear_pending_events();
14231424
assert_eq!(events.len(), events_count, "{:?}", events);
14241425
let mut issues_discard_funding = false;
1425-
for event in events {
1426+
for (idx, event) in events.into_iter().enumerate() {
14261427
match event {
1427-
Event::ChannelClosed { ref reason, .. } => {
1428+
Event::ChannelClosed { ref reason, counterparty_node_id,
1429+
channel_capacity_sats, .. } => {
14281430
assert_eq!(*reason, expected_reason);
1431+
assert_eq!(counterparty_node_id.unwrap(), expected_counterparty_node_ids[idx]);
1432+
assert_eq!(channel_capacity_sats.unwrap(), expected_channel_capacity);
14291433
},
14301434
Event::DiscardFunding { .. } => {
14311435
issues_discard_funding = true;
@@ -1441,11 +1445,12 @@ pub fn check_closed_event(node: &Node, events_count: usize, expected_reason: Clo
14411445
/// Don't use this, use the identically-named function instead.
14421446
#[macro_export]
14431447
macro_rules! check_closed_event {
1444-
($node: expr, $events: expr, $reason: expr) => {
1445-
check_closed_event!($node, $events, $reason, false);
1448+
($node: expr, $events: expr, $reason: expr, $counterparty_node_ids: expr, $channel_capacity: expr) => {
1449+
check_closed_event!($node, $events, $reason, false, $counterparty_node_ids, $channel_capacity);
14461450
};
1447-
($node: expr, $events: expr, $reason: expr, $is_check_discard_funding: expr) => {
1448-
$crate::ln::functional_test_utils::check_closed_event(&$node, $events, $reason, $is_check_discard_funding);
1451+
($node: expr, $events: expr, $reason: expr, $is_check_discard_funding: expr, $counterparty_node_ids: expr, $channel_capacity: expr) => {
1452+
$crate::ln::functional_test_utils::check_closed_event(&$node, $events, $reason,
1453+
$is_check_discard_funding, &$counterparty_node_ids, $channel_capacity);
14491454
}
14501455
}
14511456

0 commit comments

Comments
 (0)